aboutsummaryrefslogtreecommitdiff
path: root/tools/Vulkan-Tools/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'tools/Vulkan-Tools/scripts')
-rw-r--r--tools/Vulkan-Tools/scripts/CMakeLists.txt135
-rwxr-xr-xtools/Vulkan-Tools/scripts/android.py199
-rw-r--r--tools/Vulkan-Tools/scripts/common_codegen.py114
-rwxr-xr-xtools/Vulkan-Tools/scripts/generate_source.py279
-rw-r--r--tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py1452
-rw-r--r--tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py163
-rw-r--r--tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py1165
-rw-r--r--tools/Vulkan-Tools/scripts/gn/DEPS68
-rwxr-xr-xtools/Vulkan-Tools/scripts/gn/generate_vulkan_icd_json.py120
-rwxr-xr-xtools/Vulkan-Tools/scripts/gn/gn.py56
-rw-r--r--tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/build.gni18
-rw-r--r--tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_headers.gni15
-rw-r--r--tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_tools.gni21
-rwxr-xr-xtools/Vulkan-Tools/scripts/gn/update_deps.sh49
-rw-r--r--tools/Vulkan-Tools/scripts/known_good.json79
-rw-r--r--tools/Vulkan-Tools/scripts/kvt_genvk.py416
-rwxr-xr-xtools/Vulkan-Tools/scripts/update_deps.py804
17 files changed, 5153 insertions, 0 deletions
diff --git a/tools/Vulkan-Tools/scripts/CMakeLists.txt b/tools/Vulkan-Tools/scripts/CMakeLists.txt
new file mode 100644
index 00000000..65c8bade
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/CMakeLists.txt
@@ -0,0 +1,135 @@
+# ~~~
+# Copyright (c) 2023 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ~~~
+
+option(UPDATE_DEPS "Run update_deps.py for user")
+if (UPDATE_DEPS)
+ find_package(Python3 REQUIRED QUIET)
+
+ set(update_dep_py "${CMAKE_CURRENT_LIST_DIR}/update_deps.py")
+ set(known_good_json "${CMAKE_CURRENT_LIST_DIR}/known_good.json")
+
+ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${update_dep_py} ${known_good_json})
+
+ list(APPEND update_dep_command "${update_dep_py}")
+ list(APPEND update_dep_command "--generator")
+ list(APPEND update_dep_command "${CMAKE_GENERATOR}")
+
+ if (CMAKE_GENERATOR_PLATFORM)
+ list(APPEND update_dep_command "--arch")
+ list(APPEND update_dep_command "${CMAKE_GENERATOR_PLATFORM}")
+ endif()
+
+ if (NOT CMAKE_BUILD_TYPE)
+ message(WARNING "CMAKE_BUILD_TYPE not set. Using Debug for dependency build type")
+ set(_build_type Debug)
+ else()
+ set(_build_type ${CMAKE_BUILD_TYPE})
+ endif()
+ list(APPEND update_dep_command "--config")
+ list(APPEND update_dep_command "${_build_type}")
+ list(APPEND update_dep_command "--api")
+ list(APPEND update_dep_command "${API_TYPE}")
+
+ set(UPDATE_DEPS_DIR_SUFFIX "${_build_type}")
+ if (CMAKE_CROSSCOMPILING)
+ set(UPDATE_DEPS_DIR_SUFFIX "${CMAKE_SYSTEM_NAME}/${UPDATE_DEPS_DIR_SUFFIX}/${CMAKE_SYSTEM_PROCESSOR}")
+ else()
+ math(EXPR bitness "8 * ${CMAKE_SIZEOF_VOID_P}")
+ set(UPDATE_DEPS_DIR_SUFFIX "${UPDATE_DEPS_DIR_SUFFIX}/${bitness}")
+ endif()
+ set(UPDATE_DEPS_DIR "${PROJECT_SOURCE_DIR}/external/${UPDATE_DEPS_DIR_SUFFIX}" CACHE PATH "Location where update_deps.py installs packages")
+ list(APPEND update_dep_command "--dir" )
+ list(APPEND update_dep_command "${UPDATE_DEPS_DIR}")
+
+ if (NOT BUILD_TESTS)
+ list(APPEND update_dep_command "--optional=tests")
+ endif()
+
+ if (UPDATE_DEPS_SKIP_EXISTING_INSTALL)
+ list(APPEND update_dep_command "--skip-existing-install")
+ endif()
+
+ list(APPEND cmake_vars "CMAKE_TOOLCHAIN_FILE")
+
+ # Avoids manually setting CMAKE_SYSTEM_NAME unless it's needed:
+ # https://cmake.org/cmake/help/latest/variable/CMAKE_CROSSCOMPILING.html
+ if (NOT "${CMAKE_SYSTEM_NAME}" STREQUAL "${CMAKE_HOST_SYSTEM_NAME}")
+ list(APPEND cmake_vars "CMAKE_SYSTEM_NAME")
+ endif()
+ if (APPLE)
+ list(APPEND cmake_vars "CMAKE_OSX_ARCHITECTURES" "CMAKE_OSX_DEPLOYMENT_TARGET")
+ endif()
+ if (NOT MSVC_IDE)
+ list(APPEND cmake_vars "CMAKE_CXX_COMPILER" "CMAKE_C_COMPILER" "CMAKE_ASM_COMPILER")
+ endif()
+ if (ANDROID)
+ list(APPEND cmake_vars "ANDROID_PLATFORM" "CMAKE_ANDROID_ARCH_ABI" "CMAKE_ANDROID_STL_TYPE" "CMAKE_ANDROID_RTTI" "CMAKE_ANDROID_EXCEPTIONS" "ANDROID_USE_LEGACY_TOOLCHAIN_FILE")
+ endif()
+
+ set(cmake_var)
+ foreach(var IN LISTS cmake_vars)
+ if (DEFINED ${var})
+ list(APPEND update_dep_command "--cmake_var")
+ list(APPEND update_dep_command "${var}=${${var}}")
+ endif()
+ endforeach()
+
+ if (cmake_var)
+ list(APPEND update_dep_command "${cmake_var}")
+ endif()
+
+ file(TIMESTAMP ${update_dep_py} timestamp_1)
+ file(TIMESTAMP ${known_good_json} timestamp_2)
+
+ string("MD5" md5_hash "${timestamp_1}-${timestamp_2}-${update_dep_command}")
+
+ set(UPDATE_DEPS_HASH "0" CACHE STRING "Default value until we run update_deps.py")
+ mark_as_advanced(UPDATE_DEPS_HASH)
+
+ if ("${UPDATE_DEPS_HASH}" STREQUAL "0")
+ list(APPEND update_dep_command "--clean-build")
+ list(APPEND update_dep_command "--clean-install")
+ endif()
+
+ if ("${md5_hash}" STREQUAL $CACHE{UPDATE_DEPS_HASH})
+ message(DEBUG "update_deps.py: no work to do.")
+ else()
+ execute_process(
+ COMMAND ${Python3_EXECUTABLE} ${update_dep_command}
+ RESULT_VARIABLE _update_deps_result
+ )
+ if (NOT (${_update_deps_result} EQUAL 0))
+ message(FATAL_ERROR "Could not run update_deps.py which is necessary to download dependencies.")
+ endif()
+ set(UPDATE_DEPS_HASH ${md5_hash} CACHE STRING "Ensure we only run update_deps.py when we need to." FORCE)
+ include("${UPDATE_DEPS_DIR}/helper.cmake")
+ endif()
+endif()
+if (VULKAN_HEADERS_INSTALL_DIR)
+ list(APPEND CMAKE_PREFIX_PATH ${VULKAN_HEADERS_INSTALL_DIR})
+endif()
+if (GOOGLETEST_INSTALL_DIR)
+ list(APPEND CMAKE_PREFIX_PATH ${GOOGLETEST_INSTALL_DIR})
+endif()
+if (VULKAN_LOADER_INSTALL_DIR)
+ list(APPEND CMAKE_PREFIX_PATH ${VULKAN_LOADER_INSTALL_DIR})
+endif()
+
+if (CMAKE_CROSSCOMPILING)
+ set(CMAKE_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH} ${CMAKE_PREFIX_PATH} PARENT_SCOPE)
+else()
+ set(CMAKE_PREFIX_PATH ${CMAKE_PREFIX_PATH} PARENT_SCOPE)
+endif()
diff --git a/tools/Vulkan-Tools/scripts/android.py b/tools/Vulkan-Tools/scripts/android.py
new file mode 100755
index 00000000..20cc69d4
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/android.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python3
+# Copyright (c) 2023 Valve Corporation
+# Copyright (c) 2023 LunarG, Inc.
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# NOTE: Android this documentation is crucial for understanding the layout of the NDK.
+# https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
+
+# NOTE: Environment variables we can rely on users/environments setting.
+# https://github.com/actions/runner-images/blob/main/images/linux/Ubuntu2204-Readme.md#environment-variables-2
+
+import argparse
+import os
+import sys
+import shutil
+import subprocess
+
+# helper to define paths relative to the repo root
+def RepoRelative(path):
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
+
+# Points to the directory containing the top level CMakeLists.txt
+PROJECT_SRC_DIR = os.path.abspath(os.path.join(os.path.split(os.path.abspath(__file__))[0], '..'))
+if not os.path.isfile(f'{PROJECT_SRC_DIR}/CMakeLists.txt'):
+ print(f'PROJECT_SRC_DIR invalid! {PROJECT_SRC_DIR}')
+ sys.exit(1)
+
+# Runs a command in a directory and returns its return code.
+# Directory is project root by default, or a relative path from project root
+def RunShellCmd(command, start_dir = PROJECT_SRC_DIR, env=None, verbose=False):
+ # Flush stdout here. Helps when debugging on CI.
+ sys.stdout.flush()
+
+ if start_dir != PROJECT_SRC_DIR:
+ start_dir = RepoRelative(start_dir)
+ cmd_list = command.split(" ")
+
+ if verbose:
+ print(f'CICMD({cmd_list}, env={env})')
+ subprocess.check_call(cmd_list, cwd=start_dir, env=env)
+
+# Manifest file describing out test application
+def get_android_manifest() -> str:
+ manifest = RepoRelative('cube/android/AndroidManifest.xml')
+ if not os.path.isfile(manifest):
+ print(f"Unable to find manifest for APK! {manifest}")
+ sys.exit(-1)
+ return manifest
+
+# Generate the APK from the CMake binaries
+def generate_apk(SDK_ROOT : str, CMAKE_INSTALL_DIR : str) -> str:
+ apk_dir = RepoRelative('build-android/bin')
+
+ # Delete APK directory since it could contain files from old runs
+ if os.path.isdir(apk_dir):
+ shutil.rmtree(apk_dir)
+
+ shutil.copytree(CMAKE_INSTALL_DIR, apk_dir)
+
+ android_manifest = get_android_manifest()
+
+ android_jar = f"{SDK_ROOT}/platforms/android-26/android.jar"
+ if not os.path.isfile(android_jar):
+ print(f"Unable to find {android_jar}!")
+ sys.exit(-1)
+
+ apk_name = 'VkCube'
+
+ unaligned_apk = f'{apk_dir}/{apk_name}-unaligned.apk'
+ test_apk = f'{apk_dir}/{apk_name}.apk'
+
+ # Create APK
+ RunShellCmd(f'aapt package -f -M {android_manifest} -I {android_jar} -F {unaligned_apk} {CMAKE_INSTALL_DIR}')
+
+ # Align APK
+ RunShellCmd(f'zipalign -f 4 {unaligned_apk} {test_apk}')
+
+ # Create Key (If it doesn't already exist)
+ debug_key = RepoRelative(f'{apk_dir}/debug.keystore')
+ ks_pass = 'android'
+ if not os.path.isfile(debug_key):
+ dname = 'CN=Android-Debug,O=Android,C=US'
+ RunShellCmd(f'keytool -genkey -v -keystore {debug_key} -alias androiddebugkey -storepass {ks_pass} -keypass {ks_pass} -keyalg RSA -keysize 2048 -validity 10000 -dname {dname}')
+
+ # Sign APK
+ RunShellCmd(f'apksigner sign --verbose --ks {debug_key} --ks-pass pass:{ks_pass} {test_apk}')
+
+# Android APKs can contain binaries for multiple ABIs (armeabi-v7a, arm64-v8a, x86, x86_64).
+# https://en.wikipedia.org/wiki/Apk_(file_format)#Package_contents
+#
+# As a result CMake will need to be run multiple times to create a complete test APK that can be run on any Android device.
+def main():
+ configs = ['Release', 'Debug']
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--config', type=str, choices=configs, default=configs[0])
+ parser.add_argument('--app-abi', dest='android_abi', type=str, default="arm64-v8a")
+ parser.add_argument('--app-stl', dest='android_stl', type=str, choices=["c++_static", "c++_shared"], default="c++_static")
+ parser.add_argument('--apk', action='store_true', help='Generate an APK as a post build step.')
+ parser.add_argument('--clean', action='store_true', help='Cleans CMake build artifacts')
+ args = parser.parse_args()
+
+ cmake_config = args.config
+ android_abis = args.android_abi.split(" ")
+ android_stl = args.android_stl
+ create_apk = args.apk
+ clean = args.clean
+
+ if "ANDROID_NDK_HOME" not in os.environ:
+ print("Cannot find ANDROID_NDK_HOME!")
+ sys.exit(1)
+
+ android_ndk_home = os.environ.get('ANDROID_NDK_HOME')
+ android_toolchain = f'{android_ndk_home}/build/cmake/android.toolchain.cmake'
+
+ # The only tool we require for building is CMake/Ninja
+ required_cli_tools = ['cmake', 'ninja']
+
+ # If we are building an APK we need a few more tools.
+ if create_apk:
+ if "ANDROID_SDK_ROOT" not in os.environ:
+ print("Cannot find ANDROID_SDK_ROOT!")
+ sys.exit(1)
+
+ android_sdk_root = os.environ.get('ANDROID_SDK_ROOT')
+ print(f"ANDROID_SDK_ROOT = {android_sdk_root}")
+ required_cli_tools += ['aapt', 'zipalign', 'keytool', 'apksigner']
+
+ print(f"ANDROID_NDK_HOME = {android_ndk_home}")
+ print(f"Build configured for {cmake_config} | {android_stl} | {android_abis} | APK {create_apk}")
+
+ if not os.path.isfile(android_toolchain):
+ print(f'Unable to find android.toolchain.cmake at {android_toolchain}')
+ exit(-1)
+
+ for tool in required_cli_tools:
+ path = shutil.which(tool)
+ if path is None:
+ print(f"Unable to find {tool}!")
+ exit(-1)
+
+ print(f"Using {tool} : {path}")
+
+ cmake_install_dir = RepoRelative('build-android/libs')
+
+ # Delete install directory since it could contain files from old runs
+ if os.path.isdir(cmake_install_dir):
+ print("Cleaning CMake install")
+ shutil.rmtree(cmake_install_dir)
+
+ for abi in android_abis:
+ build_dir = RepoRelative(f'build-android/cmake/{abi}')
+ lib_dir = f'lib/{abi}'
+
+ if clean:
+ print("Deleting CMakeCache.txt")
+
+ # Delete CMakeCache.txt to ensure clean builds
+ # NOTE: CMake 3.24 has --fresh which would be better to use in the future.
+ cmake_cache = f'{build_dir}/CMakeCache.txt'
+ if os.path.isfile(cmake_cache):
+ os.remove(cmake_cache)
+
+ cmake_cmd = f'cmake -S . -B {build_dir} -G Ninja'
+
+ cmake_cmd += f' -D CMAKE_BUILD_TYPE={cmake_config}'
+ cmake_cmd += f' -D UPDATE_DEPS=ON -D UPDATE_DEPS_DIR={build_dir}'
+ cmake_cmd += f' -D CMAKE_TOOLCHAIN_FILE={android_toolchain}'
+ cmake_cmd += f' -D CMAKE_ANDROID_ARCH_ABI={abi}'
+ cmake_cmd += f' -D CMAKE_INSTALL_LIBDIR={lib_dir}'
+ cmake_cmd += f' -D CMAKE_ANDROID_STL_TYPE={android_stl}'
+
+ cmake_cmd += ' -D ANDROID_PLATFORM=26'
+ cmake_cmd += ' -D ANDROID_USE_LEGACY_TOOLCHAIN_FILE=NO'
+
+ RunShellCmd(cmake_cmd)
+
+ build_cmd = f'cmake --build {build_dir}'
+ RunShellCmd(build_cmd)
+
+ install_cmd = f'cmake --install {build_dir} --prefix {cmake_install_dir}'
+ RunShellCmd(install_cmd)
+
+ if create_apk:
+ generate_apk(SDK_ROOT = android_sdk_root, CMAKE_INSTALL_DIR = cmake_install_dir)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/Vulkan-Tools/scripts/common_codegen.py b/tools/Vulkan-Tools/scripts/common_codegen.py
new file mode 100644
index 00000000..d44cd643
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/common_codegen.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2017, 2019-2021 The Khronos Group Inc.
+# Copyright (c) 2015-2017, 2019-2021 Valve Corporation
+# Copyright (c) 2015-2017, 2019-2021 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+
+import os
+import sys
+import subprocess
+
+# Copyright text prefixing all headers (list of strings).
+prefixStrings = [
+ '/*',
+ '** Copyright (c) 2015-2017, 2019-2021 The Khronos Group Inc.',
+ '** Copyright (c) 2015-2017, 2019-2021 Valve Corporation',
+ '** Copyright (c) 2015-2017, 2019-2021 LunarG, Inc.',
+ '** Copyright (c) 2015-2017, 2019-2021 Google Inc.',
+ '**',
+ '** Licensed under the Apache License, Version 2.0 (the "License");',
+ '** you may not use this file except in compliance with the License.',
+ '** You may obtain a copy of the License at',
+ '**',
+ '** http://www.apache.org/licenses/LICENSE-2.0',
+ '**',
+ '** Unless required by applicable law or agreed to in writing, software',
+ '** distributed under the License is distributed on an "AS IS" BASIS,',
+ '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+ '** See the License for the specific language governing permissions and',
+ '** limitations under the License.',
+ '*/',
+ ''
+]
+
+
+platform_dict = {
+ 'android' : 'VK_USE_PLATFORM_ANDROID_KHR',
+ 'fuchsia' : 'VK_USE_PLATFORM_FUCHSIA',
+ 'ggp': 'VK_USE_PLATFORM_GGP',
+ 'ios' : 'VK_USE_PLATFORM_IOS_MVK',
+ 'macos' : 'VK_USE_PLATFORM_MACOS_MVK',
+ 'metal' : 'VK_USE_PLATFORM_METAL_EXT',
+ 'vi' : 'VK_USE_PLATFORM_VI_NN',
+ 'wayland' : 'VK_USE_PLATFORM_WAYLAND_KHR',
+ 'win32' : 'VK_USE_PLATFORM_WIN32_KHR',
+ 'xcb' : 'VK_USE_PLATFORM_XCB_KHR',
+ 'xlib' : 'VK_USE_PLATFORM_XLIB_KHR',
+ 'xlib_xrandr' : 'VK_USE_PLATFORM_XLIB_XRANDR_EXT',
+ 'provisional' : 'VK_ENABLE_BETA_EXTENSIONS',
+ 'directfb' : 'VK_USE_PLATFORM_DIRECTFB_EXT',
+ 'screen' : 'VK_USE_PLATFORM_SCREEN_QNX',
+ 'sci' : 'VK_USE_PLATFORM_SCI',
+ 'ohos' : 'VK_USE_PLATFORM_OHOS',
+}
+
+#
+# Return appropriate feature protect string from 'platform' tag on feature
+def GetFeatureProtect(interface):
+ """Get platform protection string"""
+ platform = interface.get('platform')
+ if platform is not None:
+ return platform_dict[platform]
+
+ provisional = interface.get('provisional')
+ if provisional == 'true':
+ return platform_dict['provisional']
+
+# Returns true if we are running in GitHub actions
+# https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables
+def IsGHA():
+ if 'GITHUB_ACTION' in os.environ:
+ return True
+ return False
+
+# Points to the directory containing the top level CMakeLists.txt
+PROJECT_SRC_DIR = os.path.abspath(os.path.join(os.path.split(os.path.abspath(__file__))[0], '..'))
+if not os.path.isfile(f'{PROJECT_SRC_DIR}/CMakeLists.txt'):
+ print(f'PROJECT_SRC_DIR invalid! {PROJECT_SRC_DIR}')
+ sys.exit(1)
+
+# helper to define paths relative to the repo root
+def RepoRelative(path):
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '..', path))
+
+# Runs a command in a directory and returns its return code.
+# Directory is project root by default, or a relative path from project root
+def RunShellCmd(command, start_dir = PROJECT_SRC_DIR, env=None, verbose=False):
+ # Flush stdout here. Helps when debugging on CI.
+ sys.stdout.flush()
+
+ if start_dir != PROJECT_SRC_DIR:
+ start_dir = RepoRelative(start_dir)
+ cmd_list = command.split(" ")
+
+ # Helps a lot when debugging CI issues
+ if IsGHA():
+ verbose = True
+
+ if verbose:
+ print(f'CICMD({cmd_list}, env={env})')
+ subprocess.check_call(cmd_list, cwd=start_dir, env=env) \ No newline at end of file
diff --git a/tools/Vulkan-Tools/scripts/generate_source.py b/tools/Vulkan-Tools/scripts/generate_source.py
new file mode 100755
index 00000000..59314533
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generate_source.py
@@ -0,0 +1,279 @@
+#!/usr/bin/env python3
+# Copyright (c) 2019 The Khronos Group Inc.
+# Copyright (c) 2019 Valve Corporation
+# Copyright (c) 2019 LunarG, Inc.
+# Copyright (c) 2019 Google Inc.
+# Copyright (c) 2021-2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
+# Copyright (c) 2023-2023 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mike Schuchardt <mikes@lunarg.com>
+
+import argparse
+import filecmp
+import os
+import shutil
+import sys
+import re
+import tempfile
+import json
+import common_codegen
+
+from xml.etree import ElementTree
+
+# Because we have special logic to import the Registry from input arguments and the BaseGenerator comes from the registry, we have to delay defining it until *after*
+# the Registry has been imported. Yes this is awkward, but it was the least awkward way to make --verify work.
+generators = {}
+
+def RunGenerators(api: str, registry: str, video_registry: str, directory: str, styleFile: str, targetFilter: str, flatOutput: bool):
+
+ try:
+ common_codegen.RunShellCmd(f'clang-format --version')
+ has_clang_format = True
+ except:
+ has_clang_format = False
+ if not has_clang_format:
+ print("WARNING: Unable to find clang-format!")
+
+ # These live in the Vulkan-Docs repo, but are pulled in via the
+ # Vulkan-Headers/registry folder
+ # At runtime we inject python path to find these helper scripts
+ scripts = os.path.dirname(registry)
+ scripts_directory_path = os.path.dirname(os.path.abspath(__file__))
+ registry_headers_path = os.path.join(scripts_directory_path, scripts)
+ sys.path.insert(0, registry_headers_path)
+ try:
+ from reg import Registry
+ except:
+ print("ModuleNotFoundError: No module named 'reg'") # normal python error message
+ print(f'{registry_headers_path} is not pointing to the Vulkan-Headers registry directory.')
+ print("Inside Vulkan-Headers there is a registry/reg.py file that is used.")
+ sys.exit(1) # Return without call stack so easy to spot error
+
+ from base_generator import BaseGeneratorOptions
+ from generators.mock_icd_generator import MockICDOutputGenerator
+ from generators.vulkan_tools_helper_file_generator import HelperFileOutputGenerator
+ from generators.vulkaninfo_generator import VulkanInfoGenerator
+
+ # These set fields that are needed by both OutputGenerator and BaseGenerator,
+ # but are uniform and don't need to be set at a per-generated file level
+ from base_generator import (SetTargetApiName, SetMergedApiNames)
+ SetTargetApiName(api)
+
+ # Generated directory and dispatch table helper file name may be API specific (e.g. Vulkan SC)
+ mock_icd_generated_directory = 'icd/generated'
+ vulkaninfo_generated_directory = 'vulkaninfo/generated'
+
+ generators.update({
+ 'vk_typemap_helper.h': {
+ 'generator' : HelperFileOutputGenerator,
+ 'genCombined': False,
+ 'directory' : mock_icd_generated_directory,
+ },
+ 'function_declarations.h': {
+ 'generator' : MockICDOutputGenerator,
+ 'genCombined': False,
+ 'directory' : mock_icd_generated_directory,
+ },
+ 'function_definitions.h': {
+ 'generator' : MockICDOutputGenerator,
+ 'genCombined': False,
+ 'directory' : mock_icd_generated_directory,
+ },
+ 'vulkaninfo.hpp': {
+ 'generator' : VulkanInfoGenerator,
+ 'genCombined': False,
+ 'directory' : vulkaninfo_generated_directory,
+ },
+ })
+
+ unknownTargets = [x for x in (targetFilter if targetFilter else []) if x not in generators.keys()]
+ if unknownTargets:
+ print(f'ERROR: No generator options for unknown target(s): {", ".join(unknownTargets)}', file=sys.stderr)
+ return 1
+
+ # Filter if --target is passed in
+ targets = [x for x in generators.keys() if not targetFilter or x in targetFilter]
+
+ for index, target in enumerate(targets, start=1):
+ print(f'[{index}|{len(targets)}] Generating {target}')
+
+ # First grab a class constructor object and create an instance
+ generator = generators[target]['generator']
+ gen = generator()
+
+ # This code and the 'genCombined' generator metadata is used by downstream
+ # users to generate code with all Vulkan APIs merged into the target API variant
+ # (e.g. Vulkan SC) when needed. The constructed apiList is also used to filter
+ # out non-applicable extensions later below.
+ apiList = [api]
+ if api != 'vulkan' and generators[target]['genCombined']:
+ SetMergedApiNames('vulkan')
+ apiList.append('vulkan')
+ else:
+ SetMergedApiNames(None)
+
+ # For people who want to generate all the files in a single director
+ if flatOutput:
+ outDirectory = os.path.abspath(os.path.join(directory))
+ else:
+ outDirectory = os.path.abspath(os.path.join(directory, generators[target]['directory']))
+
+ options = BaseGeneratorOptions(
+ customFileName = target,
+ customDirectory = outDirectory)
+
+ if not os.path.exists(outDirectory):
+ os.makedirs(outDirectory)
+
+ # Create the registry object with the specified generator and generator
+ # options. The options are set before XML loading as they may affect it.
+ reg = Registry(gen, options)
+
+ # Parse the specified registry XML into an ElementTree object
+ tree = ElementTree.parse(registry)
+
+ # Load the XML tree into the registry object
+ reg.loadElementTree(tree)
+
+ # Set the path to the video registry so that videoStd is available
+ reg.genOpts.videoXmlPath = video_registry
+
+ # Finally, use the output generator to create the requested target
+ reg.apiGen()
+
+ # Run clang-format on the file
+ if has_clang_format and styleFile:
+ common_codegen.RunShellCmd(f'clang-format -i --style=file:{styleFile} {os.path.join(outDirectory, target)}')
+
+
+def main(argv):
+
+ # files to exclude from --verify check
+ verify_exclude = ['.clang-format'] # None currently
+
+ parser = argparse.ArgumentParser(description='Generate source code for this repository')
+ parser.add_argument('registry', metavar='REGISTRY_PATH', help='path to the Vulkan-Headers registry directory')
+ parser.add_argument('--api',
+ default='vulkan',
+ choices=['vulkan', 'vulkansc'],
+ help='Specify API name to generate')
+ parser.add_argument('--generated-version', help='sets the header version used to generate the repo')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--target', nargs='+', help='only generate file names passed in')
+ group.add_argument('-i', '--incremental', action='store_true', help='only update repo files that change')
+ group.add_argument('-v', '--verify', action='store_true', help='verify repo files match generator output')
+ group.add_argument('-o', action='store', dest='directory', help='Create target and related files in specified directory')
+ args = parser.parse_args(argv)
+
+ repo_dir = common_codegen.RepoRelative('.')
+
+ registry = os.path.abspath(os.path.join(args.registry, 'vk.xml'))
+ video_registry = os.path.abspath(os.path.join(args.registry, 'video.xml'))
+ if not os.path.isfile(registry) and not os.path.isfile(registry):
+ registry = os.path.abspath(os.path.join(args.registry, 'Vulkan-Headers/registry/vk.xml'))
+ if not os.path.isfile(registry):
+ print(f'cannot find vk.xml in {args.registry}')
+ return -1
+ video_registry = os.path.abspath(os.path.join(args.registry, 'Vulkan-Headers/registry/video.xml'))
+ if not os.path.isfile(video_registry):
+ print(f'{video_registry} does not exist')
+ return -1
+
+ # Need pass style file incase running with --verify and it can't find the file automatically in the temp directory
+ style_file = os.path.join(repo_dir, '.clang-format')
+
+ # get directory where generators will run
+ if args.verify or args.incremental:
+ # generate in temp directory so we can compare or copy later
+ temp_obj = tempfile.TemporaryDirectory(prefix='vulkan_tools_codegen_')
+ temp_dir = temp_obj.name
+ gen_dir = temp_dir
+ elif args.directory:
+ gen_dir = args.directory
+ else:
+ # generate directly in the repo
+ gen_dir = repo_dir
+
+ RunGenerators(api=args.api,registry=registry, video_registry=video_registry, directory=gen_dir, styleFile=style_file, targetFilter=args.target, flatOutput=False)
+
+ # optional post-generation steps
+ if args.verify:
+ # compare contents of temp dir and repo
+ temp_files = {}
+ repo_files = {}
+ for details in generators.values():
+ if details['directory'] not in temp_files:
+ temp_files[details['directory']] = set()
+ temp_files[details['directory']].update(set(os.listdir(os.path.join(temp_dir, details['directory']))))
+ if details['directory'] not in repo_files:
+ repo_files[details['directory']] = set()
+ repo_files[details['directory']].update(set(os.listdir(os.path.join(repo_dir, details['directory']))) - set(verify_exclude))
+
+ # compare contents of temp dir and repo
+ files_match = True
+ for filename, details in generators.items():
+ if filename not in repo_files[details['directory']]:
+ print('ERROR: Missing repo file', filename)
+ files_match = False
+ elif filename not in temp_files[details['directory']]:
+ print('ERROR: Missing generator for', filename)
+ files_match = False
+ elif not filecmp.cmp(os.path.join(temp_dir, details['directory'], filename),
+ os.path.join(repo_dir, details['directory'], filename),
+ shallow=False):
+ print('ERROR: Repo files do not match generator output for', filename)
+ files_match = False
+
+ # return code for test scripts
+ if files_match:
+ print('SUCCESS: Repo files match generator output')
+ return 0
+ return 1
+
+ elif args.incremental:
+ # copy missing or differing files from temp directory to repo
+ for filename, details in generators.items():
+ temp_filename = os.path.join(temp_dir, details['directory'], filename)
+ repo_filename = os.path.join(repo_dir, details['directory'], filename)
+ if not os.path.exists(repo_filename) or \
+ not filecmp.cmp(temp_filename, repo_filename, shallow=False):
+ print('update', repo_filename)
+ shutil.copyfile(temp_filename, repo_filename)
+
+ # write out the header version used to generate the code to a checked in CMake file
+ if args.generated_version:
+ json_files = []
+ json_files.append(common_codegen.RepoRelative('icd/VkICD_mock_icd.json.in'))
+ for json_file in json_files:
+ with open(json_file) as f:
+ data = json.load(f)
+
+ data["ICD"]["api_version"] = args.generated_version
+
+ with open(json_file, mode='w', encoding='utf-8', newline='\n') as f:
+ f.write(json.dumps(data, indent=4))
+
+ # Update the CMake project version
+ with open(common_codegen.RepoRelative('CMakeLists.txt'), "r+") as f:
+ data = f.read()
+ f.seek(0)
+ f.write(re.sub("project.*VERSION.*", f"project(Vulkan-Tools VERSION {args.generated_version})", data))
+ f.truncate()
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
+
diff --git a/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py b/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py
new file mode 100644
index 00000000..b1d81c42
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py
@@ -0,0 +1,1452 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2025 The Khronos Group Inc.
+# Copyright (c) 2015-2025 Valve Corporation
+# Copyright (c) 2015-2025 LunarG, Inc.
+# Copyright (c) 2015-2025 Google Inc.
+# Copyright (c) 2023-2025 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+#
+# This script generates a Mock ICD that intercepts almost all Vulkan
+# functions. That layer is not intended to be useful or even compilable
+# in its initial state. Rather it's intended to be a starting point that
+# can be copied and customized to assist in creation of a new layer.
+
+from base_generator import BaseGenerator
+
+CUSTOM_C_INTERCEPTS = {
+'vkCreateInstance': '''
+ // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
+ // apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
+ // ICD should behave as normal.
+ if (loader_interface_version <= 4) {
+ return VK_ERROR_INCOMPATIBLE_DRIVER;
+ }
+ *pInstance = (VkInstance)CreateDispObjHandle();
+ for (auto& physical_device : physical_device_map[*pInstance])
+ physical_device = (VkPhysicalDevice)CreateDispObjHandle();
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return VK_SUCCESS;
+''',
+'vkDestroyInstance': '''
+ if (instance) {
+ for (const auto physical_device : physical_device_map.at(instance)) {
+ display_map.erase(physical_device);
+ DestroyDispObjHandle((void*)physical_device);
+ }
+ physical_device_map.erase(instance);
+ DestroyDispObjHandle((void*)instance);
+ }
+''',
+'vkAllocateCommandBuffers': '''
+ unique_lock_t lock(global_lock);
+ for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; ++i) {
+ pCommandBuffers[i] = (VkCommandBuffer)CreateDispObjHandle();
+ command_pool_buffer_map[pAllocateInfo->commandPool].push_back(pCommandBuffers[i]);
+ }
+ return VK_SUCCESS;
+''',
+'vkFreeCommandBuffers': '''
+ unique_lock_t lock(global_lock);
+ for (auto i = 0u; i < commandBufferCount; ++i) {
+ if (!pCommandBuffers[i]) {
+ continue;
+ }
+
+ for (auto& pair : command_pool_buffer_map) {
+ auto& cbs = pair.second;
+ auto it = std::find(cbs.begin(), cbs.end(), pCommandBuffers[i]);
+ if (it != cbs.end()) {
+ cbs.erase(it);
+ }
+ }
+
+ DestroyDispObjHandle((void*) pCommandBuffers[i]);
+ }
+''',
+'vkCreateCommandPool': '''
+ unique_lock_t lock(global_lock);
+ *pCommandPool = (VkCommandPool)global_unique_handle++;
+ command_pool_map[device].insert(*pCommandPool);
+ return VK_SUCCESS;
+''',
+'vkDestroyCommandPool': '''
+ // destroy command buffers for this pool
+ unique_lock_t lock(global_lock);
+ auto it = command_pool_buffer_map.find(commandPool);
+ if (it != command_pool_buffer_map.end()) {
+ for (auto& cb : it->second) {
+ DestroyDispObjHandle((void*) cb);
+ }
+ command_pool_buffer_map.erase(it);
+ }
+ command_pool_map[device].erase(commandPool);
+''',
+'vkEnumeratePhysicalDevices': '''
+ VkResult result_code = VK_SUCCESS;
+ if (pPhysicalDevices) {
+ const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
+ for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
+ if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
+ *pPhysicalDeviceCount = return_count;
+ } else {
+ *pPhysicalDeviceCount = icd_physical_device_count;
+ }
+ return result_code;
+''',
+'vkCreateDevice': '''
+ *pDevice = (VkDevice)CreateDispObjHandle();
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return VK_SUCCESS;
+''',
+'vkDestroyDevice': '''
+ unique_lock_t lock(global_lock);
+ // First destroy sub-device objects
+ // Destroy Queues
+ for (auto queue_family_map_pair : queue_map[device]) {
+ for (auto index_queue_pair : queue_map[device][queue_family_map_pair.first]) {
+ DestroyDispObjHandle((void*)index_queue_pair.second);
+ }
+ }
+
+ for (auto& cp : command_pool_map[device]) {
+ for (auto& cb : command_pool_buffer_map[cp]) {
+ DestroyDispObjHandle((void*) cb);
+ }
+ command_pool_buffer_map.erase(cp);
+ }
+ command_pool_map[device].clear();
+
+ queue_map.erase(device);
+ buffer_map.erase(device);
+ image_memory_size_map.erase(device);
+ // Now destroy device
+ DestroyDispObjHandle((void*)device);
+ // TODO: If emulating specific device caps, will need to add intelligence here
+''',
+'vkGetDeviceQueue': '''
+ unique_lock_t lock(global_lock);
+ auto queue = queue_map[device][queueFamilyIndex][queueIndex];
+ if (queue) {
+ *pQueue = queue;
+ } else {
+ *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
+ }
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return;
+''',
+'vkGetDeviceQueue2': '''
+ GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
+ // TODO: Add further support for GetDeviceQueue2 features
+''',
+'vkEnumerateInstanceLayerProperties': '''
+ return VK_SUCCESS;
+''',
+'vkEnumerateInstanceVersion': '''
+ *pApiVersion = VK_HEADER_VERSION_COMPLETE;
+ return VK_SUCCESS;
+''',
+'vkEnumerateDeviceLayerProperties': '''
+ return VK_SUCCESS;
+''',
+'vkEnumerateInstanceExtensionProperties': '''
+ // If requesting number of extensions, return that
+ if (!pLayerName) {
+ if (!pProperties) {
+ *pPropertyCount = (uint32_t)instance_extension_map.size();
+ } else {
+ uint32_t i = 0;
+ for (const auto &name_ver_pair : instance_extension_map) {
+ if (i == *pPropertyCount) {
+ break;
+ }
+ std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+ pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+ pProperties[i].specVersion = name_ver_pair.second;
+ ++i;
+ }
+ if (i != instance_extension_map.size()) {
+ return VK_INCOMPLETE;
+ }
+ }
+ }
+ // If requesting extension properties, fill in data struct for number of extensions
+ return VK_SUCCESS;
+''',
+'vkEnumerateDeviceExtensionProperties': '''
+ // If requesting number of extensions, return that
+ if (!pLayerName) {
+ if (!pProperties) {
+ *pPropertyCount = (uint32_t)device_extension_map.size();
+ } else {
+ uint32_t i = 0;
+ for (const auto &name_ver_pair : device_extension_map) {
+ if (i == *pPropertyCount) {
+ break;
+ }
+ std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+ pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+ pProperties[i].specVersion = name_ver_pair.second;
+ ++i;
+ }
+ *pPropertyCount = i;
+ if (i != device_extension_map.size()) {
+ return VK_INCOMPLETE;
+ }
+ }
+ }
+ // If requesting extension properties, fill in data struct for number of extensions
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
+ // Currently always say that all present modes are supported
+ if (!pPresentModes) {
+ *pPresentModeCount = 6;
+ } else {
+ if (*pPresentModeCount >= 6) pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
+ if (*pPresentModeCount >= 5) pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+ if (*pPresentModeCount >= 4) pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
+ if (*pPresentModeCount >= 3) pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
+ if (*pPresentModeCount >= 2) pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
+ if (*pPresentModeCount >= 1) pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+ *pPresentModeCount = *pPresentModeCount < 6 ? *pPresentModeCount : 6;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
+ // Currently always say that RGBA8 & BGRA8 are supported
+ if (!pSurfaceFormats) {
+ *pSurfaceFormatCount = 2;
+ } else {
+ if (*pSurfaceFormatCount >= 2) {
+ pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+ pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ if (*pSurfaceFormatCount >= 1) {
+ pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+ pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
+ // Currently always say that RGBA8 & BGRA8 are supported
+ if (!pSurfaceFormats) {
+ *pSurfaceFormatCount = 2;
+ } else {
+ if (*pSurfaceFormatCount >= 2) {
+ pSurfaceFormats[1].pNext = nullptr;
+ pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
+ pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ if (*pSurfaceFormatCount >= 1) {
+ pSurfaceFormats[1].pNext = nullptr;
+ pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
+ pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceSupportKHR': '''
+ // Currently say that all surface/queue combos are supported
+ *pSupported = VK_TRUE;
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
+ // In general just say max supported is available for requested surface
+ pSurfaceCapabilities->minImageCount = 1;
+ pSurfaceCapabilities->maxImageCount = 0;
+ pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
+ pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
+ pSurfaceCapabilities->minImageExtent.width = 1;
+ pSurfaceCapabilities->minImageExtent.height = 1;
+ pSurfaceCapabilities->maxImageExtent.width = 0xFFFF;
+ pSurfaceCapabilities->maxImageExtent.height = 0xFFFF;
+ pSurfaceCapabilities->maxImageArrayLayers = 128;
+ pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
+ VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
+ pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+ pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
+ VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
+ VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
+ VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+ pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT |
+ VK_IMAGE_USAGE_SAMPLED_BIT |
+ VK_IMAGE_USAGE_STORAGE_BIT |
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
+ GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
+
+ auto *present_mode_compatibility = lvl_find_mod_in_chain<VkSurfacePresentModeCompatibilityEXT>(pSurfaceCapabilities->pNext);
+ if (present_mode_compatibility) {
+ if (!present_mode_compatibility->pPresentModes) {
+ present_mode_compatibility->presentModeCount = 3;
+ } else {
+ // arbitrary
+ present_mode_compatibility->pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+ present_mode_compatibility->pPresentModes[1] = VK_PRESENT_MODE_FIFO_KHR;
+ present_mode_compatibility->pPresentModes[2] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetInstanceProcAddr': '''
+ if (!negotiate_loader_icd_interface_called) {
+ loader_interface_version = 0;
+ }
+ const auto &item = name_to_funcptr_map.find(pName);
+ if (item != name_to_funcptr_map.end()) {
+ return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+ }
+ // Mock should intercept all functions so if we get here just return null
+ return nullptr;
+''',
+'vkGetDeviceProcAddr': '''
+ return GetInstanceProcAddr(nullptr, pName);
+''',
+'vkGetPhysicalDeviceMemoryProperties': '''
+ pMemoryProperties->memoryTypeCount = 6;
+ // Host visible Coherent
+ pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ pMemoryProperties->memoryTypes[0].heapIndex = 0;
+ // Host visible Cached
+ pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+ pMemoryProperties->memoryTypes[1].heapIndex = 0;
+ // Device local and Host visible
+ pMemoryProperties->memoryTypes[2].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ pMemoryProperties->memoryTypes[2].heapIndex = 1;
+ // Device local lazily
+ pMemoryProperties->memoryTypes[3].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
+ pMemoryProperties->memoryTypes[3].heapIndex = 1;
+ // Device local protected
+ pMemoryProperties->memoryTypes[4].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_PROTECTED_BIT;
+ pMemoryProperties->memoryTypes[4].heapIndex = 1;
+ // Device local only
+ pMemoryProperties->memoryTypes[5].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ pMemoryProperties->memoryTypes[5].heapIndex = 1;
+ pMemoryProperties->memoryHeapCount = 2;
+ pMemoryProperties->memoryHeaps[0].flags = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT;
+ pMemoryProperties->memoryHeaps[0].size = 8000000000;
+ pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
+ pMemoryProperties->memoryHeaps[1].size = 8000000000;
+''',
+'vkGetPhysicalDeviceMemoryProperties2KHR': '''
+ GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties': '''
+ if (pQueueFamilyProperties) {
+ std::vector<VkQueueFamilyProperties2KHR> props2(*pQueueFamilyPropertyCount, {
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR});
+ GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, props2.data());
+ for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
+ pQueueFamilyProperties[i] = props2[i].queueFamilyProperties;
+ }
+ } else {
+ GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, nullptr);
+ }
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
+ if (pQueueFamilyProperties) {
+ if (*pQueueFamilyPropertyCount >= 1) {
+ auto props = &pQueueFamilyProperties[0].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT
+ | VK_QUEUE_SPARSE_BINDING_BIT | VK_QUEUE_PROTECTED_BIT;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+ }
+ if (*pQueueFamilyPropertyCount >= 2) {
+ auto props = &pQueueFamilyProperties[1].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_TRANSFER_BIT | VK_QUEUE_PROTECTED_BIT | VK_QUEUE_VIDEO_DECODE_BIT_KHR;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+
+ auto status_query_props = lvl_find_mod_in_chain<VkQueueFamilyQueryResultStatusPropertiesKHR>(pQueueFamilyProperties[1].pNext);
+ if (status_query_props) {
+ status_query_props->queryResultStatusSupport = VK_TRUE;
+ }
+ auto video_props = lvl_find_mod_in_chain<VkQueueFamilyVideoPropertiesKHR>(pQueueFamilyProperties[1].pNext);
+ if (video_props) {
+ video_props->videoCodecOperations = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR;
+ }
+ }
+ if (*pQueueFamilyPropertyCount >= 3) {
+ auto props = &pQueueFamilyProperties[2].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_TRANSFER_BIT | VK_QUEUE_PROTECTED_BIT | VK_QUEUE_VIDEO_ENCODE_BIT_KHR;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+
+ auto status_query_props = lvl_find_mod_in_chain<VkQueueFamilyQueryResultStatusPropertiesKHR>(pQueueFamilyProperties[2].pNext);
+ if (status_query_props) {
+ status_query_props->queryResultStatusSupport = VK_TRUE;
+ }
+ auto video_props = lvl_find_mod_in_chain<VkQueueFamilyVideoPropertiesKHR>(pQueueFamilyProperties[2].pNext);
+ if (video_props) {
+ video_props->videoCodecOperations = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR;
+ }
+ }
+ if (*pQueueFamilyPropertyCount > 3) {
+ *pQueueFamilyPropertyCount = 3;
+ }
+ } else {
+ *pQueueFamilyPropertyCount = 3;
+ }
+''',
+'vkGetPhysicalDeviceFeatures': '''
+ uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
+ VkBool32 *bool_array = &pFeatures->robustBufferAccess;
+ SetBoolArrayTrue(bool_array, num_bools);
+''',
+'vkGetPhysicalDeviceFeatures2KHR': '''
+ GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
+ uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
+ VkBool32* feat_bools = nullptr;
+ auto vk_1_1_features = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan11Features>(pFeatures->pNext);
+ if (vk_1_1_features) {
+ vk_1_1_features->protectedMemory = VK_TRUE;
+ }
+ auto vk_1_3_features = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan13Features>(pFeatures->pNext);
+ if (vk_1_3_features) {
+ vk_1_3_features->synchronization2 = VK_TRUE;
+ }
+ auto prot_features = lvl_find_mod_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pFeatures->pNext);
+ if (prot_features) {
+ prot_features->protectedMemory = VK_TRUE;
+ }
+ auto sync2_features = lvl_find_mod_in_chain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pFeatures->pNext);
+ if (sync2_features) {
+ sync2_features->synchronization2 = VK_TRUE;
+ }
+ auto video_maintenance1_features = lvl_find_mod_in_chain<VkPhysicalDeviceVideoMaintenance1FeaturesKHR>(pFeatures->pNext);
+ if (video_maintenance1_features) {
+ video_maintenance1_features->videoMaintenance1 = VK_TRUE;
+ }
+ const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
+ if (desc_idx_features) {
+ const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
+ num_bools = bool_size/sizeof(VkBool32);
+ feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
+ SetBoolArrayTrue(feat_bools, num_bools);
+ }
+ const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
+ if (blendop_features) {
+ const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
+ num_bools = bool_size/sizeof(VkBool32);
+ feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
+ SetBoolArrayTrue(feat_bools, num_bools);
+ }
+ const auto *host_image_copy_features = lvl_find_in_chain<VkPhysicalDeviceHostImageCopyFeaturesEXT>(pFeatures->pNext);
+ if (host_image_copy_features) {
+ feat_bools = (VkBool32*)&host_image_copy_features->hostImageCopy;
+ SetBoolArrayTrue(feat_bools, 1);
+ }
+''',
+'vkGetPhysicalDeviceFormatProperties': '''
+ if (VK_FORMAT_UNDEFINED == format) {
+ *pFormatProperties = { 0x0, 0x0, 0x0 };
+ } else {
+ // Default to a color format, skip DS bit
+ *pFormatProperties = { 0x00FFFDFF, 0x00FFFDFF, 0x00FFFDFF };
+ switch (format) {
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ case VK_FORMAT_S8_UINT:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ // Don't set color bits for DS formats
+ *pFormatProperties = { 0x00FFFE7F, 0x00FFFE7F, 0x00FFFE7F };
+ break;
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+ case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+ case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM:
+ // Set decode/encode bits for these formats
+ *pFormatProperties = { 0x1EFFFDFF, 0x1EFFFDFF, 0x00FFFDFF };
+ break;
+ default:
+ break;
+ }
+ }
+''',
+'vkGetPhysicalDeviceFormatProperties2KHR': '''
+ GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
+ VkFormatProperties3KHR *props_3 = lvl_find_mod_in_chain<VkFormatProperties3KHR>(pFormatProperties->pNext);
+ if (props_3) {
+ props_3->linearTilingFeatures = pFormatProperties->formatProperties.linearTilingFeatures;
+ props_3->optimalTilingFeatures = pFormatProperties->formatProperties.optimalTilingFeatures;
+ props_3->bufferFeatures = pFormatProperties->formatProperties.bufferFeatures;
+ props_3->optimalTilingFeatures |= VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT;
+ }
+''',
+'vkGetPhysicalDeviceImageFormatProperties': '''
+ // A hardcoded unsupported format
+ if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
+ return VK_ERROR_FORMAT_NOT_SUPPORTED;
+ }
+
+ // TODO: Just hard-coding some values for now
+ // TODO: If tiling is linear, limit the mips, levels, & sample count
+ if (VK_IMAGE_TILING_LINEAR == tiling) {
+ *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
+ } else {
+ // We hard-code support for all sample counts except 64 bits.
+ *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
+ auto *external_image_prop = lvl_find_mod_in_chain<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
+ auto *external_image_format = lvl_find_in_chain<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
+ if (external_image_prop && external_image_format) {
+ external_image_prop->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT;
+ external_image_prop->externalMemoryProperties.compatibleHandleTypes = external_image_format->handleType;
+ }
+
+ GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSparseImageFormatProperties': '''
+ if (!pProperties) {
+ *pPropertyCount = 1;
+ } else {
+ // arbitrary
+ pProperties->imageGranularity = {4, 4, 4};
+ pProperties->flags = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT;
+ switch (format) {
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_D32_SFLOAT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ break;
+ case VK_FORMAT_S8_UINT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+ break;
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ break;
+ default:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ break;
+ }
+ }
+''',
+'vkGetPhysicalDeviceSparseImageFormatProperties2KHR': '''
+ if (pPropertyCount && pProperties) {
+ GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage, pFormatInfo->tiling, pPropertyCount, &pProperties->properties);
+ } else {
+ GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage, pFormatInfo->tiling, pPropertyCount, nullptr);
+ }
+''',
+'vkGetPhysicalDeviceProperties': '''
+ pProperties->apiVersion = VK_HEADER_VERSION_COMPLETE;
+ pProperties->driverVersion = 1;
+ pProperties->vendorID = 0xba5eba11;
+ pProperties->deviceID = 0xf005ba11;
+ pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
+ //std::string devName = "Vulkan Mock Device";
+ strcpy(pProperties->deviceName, "Vulkan Mock Device");
+ pProperties->pipelineCacheUUID[0] = 18;
+ pProperties->limits = SetLimits(&pProperties->limits);
+ pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
+''',
+'vkGetPhysicalDeviceProperties2KHR': '''
+ // The only value that need to be set are those the Profile layer can't set
+ // see https://github.com/KhronosGroup/Vulkan-Profiles/issues/352
+ // All values set are arbitrary
+ GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
+
+ auto *props_11 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan11Properties>(pProperties->pNext);
+ if (props_11) {
+ props_11->protectedNoFault = VK_FALSE;
+ }
+
+ auto *props_12 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan12Properties>(pProperties->pNext);
+ if (props_12) {
+ props_12->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ props_12->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ }
+
+ auto *props_13 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan13Properties>(pProperties->pNext);
+ if (props_13) {
+ props_13->storageTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ props_13->uniformTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ props_13->storageTexelBufferOffsetAlignmentBytes = 16;
+ props_13->uniformTexelBufferOffsetAlignmentBytes = 16;
+ }
+
+ auto *protected_memory_props = lvl_find_mod_in_chain<VkPhysicalDeviceProtectedMemoryProperties>(pProperties->pNext);
+ if (protected_memory_props) {
+ protected_memory_props->protectedNoFault = VK_FALSE;
+ }
+
+ auto *float_controls_props = lvl_find_mod_in_chain<VkPhysicalDeviceFloatControlsProperties>(pProperties->pNext);
+ if (float_controls_props) {
+ float_controls_props->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ float_controls_props->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ }
+
+ auto *conservative_raster_props = lvl_find_mod_in_chain<VkPhysicalDeviceConservativeRasterizationPropertiesEXT>(pProperties->pNext);
+ if (conservative_raster_props) {
+ conservative_raster_props->primitiveOverestimationSize = 0.00195313f;
+ conservative_raster_props->conservativePointAndLineRasterization = VK_TRUE;
+ conservative_raster_props->degenerateTrianglesRasterized = VK_TRUE;
+ conservative_raster_props->degenerateLinesRasterized = VK_TRUE;
+ }
+
+ auto *rt_pipeline_props = lvl_find_mod_in_chain<VkPhysicalDeviceRayTracingPipelinePropertiesKHR>(pProperties->pNext);
+ if (rt_pipeline_props) {
+ rt_pipeline_props->shaderGroupHandleSize = 32;
+ rt_pipeline_props->shaderGroupBaseAlignment = 64;
+ rt_pipeline_props->shaderGroupHandleCaptureReplaySize = 32;
+ }
+
+ auto *rt_pipeline_nv_props = lvl_find_mod_in_chain<VkPhysicalDeviceRayTracingPropertiesNV>(pProperties->pNext);
+ if (rt_pipeline_nv_props) {
+ rt_pipeline_nv_props->shaderGroupHandleSize = 32;
+ rt_pipeline_nv_props->shaderGroupBaseAlignment = 64;
+ }
+
+ auto *texel_buffer_props = lvl_find_mod_in_chain<VkPhysicalDeviceTexelBufferAlignmentProperties>(pProperties->pNext);
+ if (texel_buffer_props) {
+ texel_buffer_props->storageTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ texel_buffer_props->uniformTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ texel_buffer_props->storageTexelBufferOffsetAlignmentBytes = 16;
+ texel_buffer_props->uniformTexelBufferOffsetAlignmentBytes = 16;
+ }
+
+ auto *descriptor_buffer_props = lvl_find_mod_in_chain<VkPhysicalDeviceDescriptorBufferPropertiesEXT>(pProperties->pNext);
+ if (descriptor_buffer_props) {
+ descriptor_buffer_props->combinedImageSamplerDescriptorSingleArray = VK_TRUE;
+ descriptor_buffer_props->bufferlessPushDescriptors = VK_TRUE;
+ descriptor_buffer_props->allowSamplerImageViewPostSubmitCreation = VK_TRUE;
+ descriptor_buffer_props->descriptorBufferOffsetAlignment = 4;
+ }
+
+ auto *mesh_shader_props = lvl_find_mod_in_chain<VkPhysicalDeviceMeshShaderPropertiesEXT>(pProperties->pNext);
+ if (mesh_shader_props) {
+ mesh_shader_props->meshOutputPerVertexGranularity = 32;
+ mesh_shader_props->meshOutputPerPrimitiveGranularity = 32;
+ mesh_shader_props->prefersLocalInvocationVertexOutput = VK_TRUE;
+ mesh_shader_props->prefersLocalInvocationPrimitiveOutput = VK_TRUE;
+ mesh_shader_props->prefersCompactVertexOutput = VK_TRUE;
+ mesh_shader_props->prefersCompactPrimitiveOutput = VK_TRUE;
+ }
+
+ auto *fragment_density_map2_props = lvl_find_mod_in_chain<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT>(pProperties->pNext);
+ if (fragment_density_map2_props) {
+ fragment_density_map2_props->subsampledLoads = VK_FALSE;
+ fragment_density_map2_props->subsampledCoarseReconstructionEarlyAccess = VK_FALSE;
+ fragment_density_map2_props->maxSubsampledArrayLayers = 2;
+ fragment_density_map2_props->maxDescriptorSetSubsampledSamplers = 1;
+ }
+
+ auto *maintenance3_props = lvl_find_mod_in_chain<VkPhysicalDeviceMaintenance3Properties>(pProperties->pNext);
+ if (maintenance3_props) {
+ maintenance3_props->maxMemoryAllocationSize = 1073741824;
+ maintenance3_props->maxPerSetDescriptors = 1024;
+ }
+
+ const uint32_t num_copy_layouts = 5;
+ const VkImageLayout HostCopyLayouts[]{
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_GENERAL,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+ };
+
+ auto *host_image_copy_props = lvl_find_mod_in_chain<VkPhysicalDeviceHostImageCopyPropertiesEXT>(pProperties->pNext);
+ if (host_image_copy_props){
+ if (host_image_copy_props->pCopyDstLayouts == nullptr) host_image_copy_props->copyDstLayoutCount = num_copy_layouts;
+ else {
+ uint32_t num_layouts = (std::min)(host_image_copy_props->copyDstLayoutCount, num_copy_layouts);
+ for (uint32_t i = 0; i < num_layouts; i++) {
+ host_image_copy_props->pCopyDstLayouts[i] = HostCopyLayouts[i];
+ }
+ }
+ if (host_image_copy_props->pCopySrcLayouts == nullptr) host_image_copy_props->copySrcLayoutCount = num_copy_layouts;
+ else {
+ uint32_t num_layouts = (std::min)(host_image_copy_props->copySrcLayoutCount, num_copy_layouts);
+ for (uint32_t i = 0; i < num_layouts; i++) {
+ host_image_copy_props->pCopySrcLayouts[i] = HostCopyLayouts[i];
+ }
+ }
+ }
+
+ auto *driver_properties = lvl_find_mod_in_chain<VkPhysicalDeviceDriverProperties>(pProperties->pNext);
+ if (driver_properties) {
+ std::strncpy(driver_properties->driverName, "Vulkan Mock Device", VK_MAX_DRIVER_NAME_SIZE);
+#if defined(GIT_BRANCH_NAME) && defined(GIT_TAG_INFO)
+ std::strncpy(driver_properties->driverInfo, "Branch: " GIT_BRANCH_NAME " Tag Info: " GIT_TAG_INFO, VK_MAX_DRIVER_INFO_SIZE);
+#else
+ std::strncpy(driver_properties->driverInfo, "Branch: --unknown-- Tag Info: --unknown--", VK_MAX_DRIVER_INFO_SIZE);
+#endif
+ }
+
+ auto *layered_properties = lvl_find_mod_in_chain<VkPhysicalDeviceLayeredApiPropertiesListKHR>(pProperties->pNext);
+ if (layered_properties) {
+ layered_properties->layeredApiCount = 1;
+ if (layered_properties->pLayeredApis) {
+ layered_properties->pLayeredApis[0] = VkPhysicalDeviceLayeredApiPropertiesKHR{
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR,
+ nullptr,
+ 0xba5eba11,
+ 0xf005ba11,
+ VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR
+ };
+ std::strncpy(layered_properties->pLayeredApis[0].deviceName, "Fake Driver", VK_MAX_PHYSICAL_DEVICE_NAME_SIZE);
+ }
+ }
+''',
+'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
+ // Hard code support for all handle types and features
+ pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
+ pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
+ pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
+ GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+''',
+'vkGetPhysicalDeviceExternalFenceProperties':'''
+ // Hard-code support for all handle types and features
+ pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
+ pExternalFenceProperties->compatibleHandleTypes = 0xF;
+ pExternalFenceProperties->externalFenceFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
+ GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+''',
+'vkGetPhysicalDeviceExternalBufferProperties':'''
+ constexpr VkExternalMemoryHandleTypeFlags supported_flags = 0x1FF;
+ if (pExternalBufferInfo->handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
+ // Can't have dedicated memory with AHB
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = pExternalBufferInfo->handleType;
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = pExternalBufferInfo->handleType;
+ } else if (pExternalBufferInfo->handleType & supported_flags) {
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = supported_flags;
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = supported_flags;
+ } else {
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
+ // According to spec, handle type is always compatible with itself. Even if export/import
+ // not supported, it's important to properly implement self-compatibility property since
+ // application's control flow can rely on this.
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = pExternalBufferInfo->handleType;
+ }
+''',
+'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
+ GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+''',
+'vkGetBufferMemoryRequirements': '''
+ // TODO: Just hard-coding reqs for now
+ pMemoryRequirements->size = 4096;
+ pMemoryRequirements->alignment = 1;
+ pMemoryRequirements->memoryTypeBits = 0xFFFF;
+ // Return a better size based on the buffer size from the create info.
+ unique_lock_t lock(global_lock);
+ auto d_iter = buffer_map.find(device);
+ if (d_iter != buffer_map.end()) {
+ auto iter = d_iter->second.find(buffer);
+ if (iter != d_iter->second.end()) {
+ pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
+ }
+ }
+''',
+'vkGetBufferMemoryRequirements2KHR': '''
+ GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+''',
+'vkGetDeviceBufferMemoryRequirements': '''
+ // TODO: Just hard-coding reqs for now
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF;
+
+ // Return a size based on the buffer size from the create info.
+ pMemoryRequirements->memoryRequirements.size = ((pInfo->pCreateInfo->size + 4095) / 4096) * 4096;
+''',
+'vkGetDeviceBufferMemoryRequirementsKHR': '''
+ GetDeviceBufferMemoryRequirements(device, pInfo, pMemoryRequirements);
+''',
+'vkGetImageMemoryRequirements': '''
+ pMemoryRequirements->size = 0;
+ pMemoryRequirements->alignment = 1;
+
+ unique_lock_t lock(global_lock);
+ auto d_iter = image_memory_size_map.find(device);
+ if(d_iter != image_memory_size_map.end()){
+ auto iter = d_iter->second.find(image);
+ if (iter != d_iter->second.end()) {
+ pMemoryRequirements->size = iter->second;
+ }
+ }
+ // Here we hard-code that the memory type at index 3 doesn't support this image.
+ pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+''',
+'vkGetImageMemoryRequirements2KHR': '''
+ GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
+''',
+'vkGetDeviceImageMemoryRequirements': '''
+ pMemoryRequirements->memoryRequirements.size = GetImageSizeFromCreateInfo(pInfo->pCreateInfo);
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ // Here we hard-code that the memory type at index 3 doesn't support this image.
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+''',
+'vkGetDeviceImageMemoryRequirementsKHR': '''
+ GetDeviceImageMemoryRequirements(device, pInfo, pMemoryRequirements);
+''',
+'vkMapMemory': '''
+ unique_lock_t lock(global_lock);
+ if (VK_WHOLE_SIZE == size) {
+ if (allocated_memory_size_map.count(memory) != 0)
+ size = allocated_memory_size_map[memory] - offset;
+ else
+ size = 0x10000;
+ }
+ void* map_addr = malloc((size_t)size);
+ mapped_memory_map[memory].push_back(map_addr);
+ *ppData = map_addr;
+ return VK_SUCCESS;
+''',
+'vkMapMemory2KHR': '''
+ return MapMemory(device, pMemoryMapInfo->memory, pMemoryMapInfo->offset, pMemoryMapInfo->size, pMemoryMapInfo->flags, ppData);
+''',
+'vkUnmapMemory': '''
+ unique_lock_t lock(global_lock);
+ for (auto map_addr : mapped_memory_map[memory]) {
+ free(map_addr);
+ }
+ mapped_memory_map.erase(memory);
+''',
+'vkUnmapMemory2KHR': '''
+ UnmapMemory(device, pMemoryUnmapInfo->memory);
+ return VK_SUCCESS;
+''',
+'vkGetImageSubresourceLayout': '''
+ // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
+ *pLayout = VkSubresourceLayout(); // Default constructor zero values.
+''',
+'vkCreateSwapchainKHR': '''
+ unique_lock_t lock(global_lock);
+ *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
+ for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
+ swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
+ }
+ return VK_SUCCESS;
+''',
+'vkDestroySwapchainKHR': '''
+ unique_lock_t lock(global_lock);
+ swapchain_image_map.clear();
+''',
+'vkGetSwapchainImagesKHR': '''
+ if (!pSwapchainImages) {
+ *pSwapchainImageCount = icd_swapchain_image_count;
+ } else {
+ unique_lock_t lock(global_lock);
+ for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
+ pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
+ }
+
+ if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
+ else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
+ }
+ return VK_SUCCESS;
+''',
+'vkAcquireNextImageKHR': '''
+ *pImageIndex = 0;
+ return VK_SUCCESS;
+''',
+'vkAcquireNextImage2KHR': '''
+ *pImageIndex = 0;
+ return VK_SUCCESS;
+''',
+'vkCreateBuffer': '''
+ unique_lock_t lock(global_lock);
+ *pBuffer = (VkBuffer)global_unique_handle++;
+ buffer_map[device][*pBuffer] = {
+ pCreateInfo->size,
+ current_available_address
+ };
+ current_available_address += pCreateInfo->size;
+ // Always align to next 64-bit pointer
+ const uint64_t alignment = current_available_address % 64;
+ if (alignment != 0) {
+ current_available_address += (64 - alignment);
+ }
+ return VK_SUCCESS;
+''',
+'vkDestroyBuffer': '''
+ unique_lock_t lock(global_lock);
+ buffer_map[device].erase(buffer);
+''',
+'vkCreateImage': '''
+ unique_lock_t lock(global_lock);
+ *pImage = (VkImage)global_unique_handle++;
+ image_memory_size_map[device][*pImage] = GetImageSizeFromCreateInfo(pCreateInfo);
+ return VK_SUCCESS;
+''',
+'vkDestroyImage': '''
+ unique_lock_t lock(global_lock);
+ image_memory_size_map[device].erase(image);
+''',
+'vkEnumeratePhysicalDeviceGroupsKHR': '''
+ if (!pPhysicalDeviceGroupProperties) {
+ *pPhysicalDeviceGroupCount = 1;
+ } else {
+ // arbitrary
+ pPhysicalDeviceGroupProperties->physicalDeviceCount = 1;
+ pPhysicalDeviceGroupProperties->physicalDevices[0] = physical_device_map.at(instance)[0];
+ pPhysicalDeviceGroupProperties->subsetAllocation = VK_FALSE;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceMultisamplePropertiesEXT': '''
+ if (pMultisampleProperties) {
+ // arbitrary
+ pMultisampleProperties->maxSampleLocationGridSize = {32, 32};
+ }
+''',
+'vkGetPhysicalDeviceFragmentShadingRatesKHR': '''
+ if (!pFragmentShadingRates) {
+ *pFragmentShadingRateCount = 1;
+ } else {
+ // arbitrary
+ pFragmentShadingRates->sampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
+ pFragmentShadingRates->fragmentSize = {8, 8};
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCalibrateableTimeDomainsEXT': '''
+ if (!pTimeDomains) {
+ *pTimeDomainCount = 1;
+ } else {
+ // arbitrary
+ *pTimeDomains = VK_TIME_DOMAIN_DEVICE_EXT;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCalibrateableTimeDomainsKHR': '''
+ if (!pTimeDomains) {
+ *pTimeDomainCount = 1;
+ } else {
+ // arbitrary
+ *pTimeDomains = VK_TIME_DOMAIN_DEVICE_KHR;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetFenceWin32HandleKHR': '''
+ *pHandle = (HANDLE)0x12345678;
+ return VK_SUCCESS;
+''',
+'vkGetFenceFdKHR': '''
+ *pFd = 0x42;
+ return VK_SUCCESS;
+''',
+'vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR': '''
+ if (!pCounters) {
+ *pCounterCount = 3;
+ } else {
+ if (*pCounterCount == 0){
+ return VK_INCOMPLETE;
+ }
+ // arbitrary
+ pCounters[0].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[0].scope = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR;
+ pCounters[0].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[0].uuid[0] = 0x01;
+ if (*pCounterCount == 1){
+ return VK_INCOMPLETE;
+ }
+ pCounters[1].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[1].scope = VK_QUERY_SCOPE_RENDER_PASS_KHR;
+ pCounters[1].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[1].uuid[0] = 0x02;
+ if (*pCounterCount == 2){
+ return VK_INCOMPLETE;
+ }
+ pCounters[2].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[2].scope = VK_QUERY_SCOPE_COMMAND_KHR;
+ pCounters[2].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[2].uuid[0] = 0x03;
+ *pCounterCount = 3;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR': '''
+ if (pNumPasses) {
+ // arbitrary
+ *pNumPasses = 1;
+ }
+''',
+'vkGetShaderModuleIdentifierEXT': '''
+ if (pIdentifier) {
+ // arbitrary
+ pIdentifier->identifierSize = 1;
+ pIdentifier->identifier[0] = 0x01;
+ }
+''',
+'vkGetImageSparseMemoryRequirements': '''
+ if (!pSparseMemoryRequirements) {
+ *pSparseMemoryRequirementCount = 1;
+ } else {
+ // arbitrary
+ pSparseMemoryRequirements->imageMipTailFirstLod = 0;
+ pSparseMemoryRequirements->imageMipTailSize = 8;
+ pSparseMemoryRequirements->imageMipTailOffset = 0;
+ pSparseMemoryRequirements->imageMipTailStride = 4;
+ pSparseMemoryRequirements->formatProperties.imageGranularity = {4, 4, 4};
+ pSparseMemoryRequirements->formatProperties.flags = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT;
+ // Would need to track the VkImage to know format for better value here
+ pSparseMemoryRequirements->formatProperties.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+ }
+
+''',
+'vkGetImageSparseMemoryRequirements2KHR': '''
+ if (pSparseMemoryRequirementCount && pSparseMemoryRequirements) {
+ GetImageSparseMemoryRequirements(device, pInfo->image, pSparseMemoryRequirementCount, &pSparseMemoryRequirements->memoryRequirements);
+ } else {
+ GetImageSparseMemoryRequirements(device, pInfo->image, pSparseMemoryRequirementCount, nullptr);
+ }
+''',
+'vkGetBufferDeviceAddress': '''
+ VkDeviceAddress address = 0;
+ auto d_iter = buffer_map.find(device);
+ if (d_iter != buffer_map.end()) {
+ auto iter = d_iter->second.find(pInfo->buffer);
+ if (iter != d_iter->second.end()) {
+ address = iter->second.address;
+ }
+ }
+ return address;
+''',
+'vkGetBufferDeviceAddressKHR': '''
+ return GetBufferDeviceAddress(device, pInfo);
+''',
+'vkGetBufferDeviceAddressEXT': '''
+ return GetBufferDeviceAddress(device, pInfo);
+''',
+'vkGetDescriptorSetLayoutSizeEXT': '''
+ // Need to give something non-zero
+ *pLayoutSizeInBytes = 4;
+''',
+'vkGetAccelerationStructureBuildSizesKHR': '''
+ // arbitrary
+ pSizeInfo->accelerationStructureSize = 4;
+ pSizeInfo->updateScratchSize = 4;
+ pSizeInfo->buildScratchSize = 4;
+''',
+'vkGetAccelerationStructureMemoryRequirementsNV': '''
+ // arbitrary
+ pMemoryRequirements->memoryRequirements.size = 4096;
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF;
+''',
+'vkGetAccelerationStructureDeviceAddressKHR': '''
+ // arbitrary - need to be aligned to 256 bytes
+ return 0x262144;
+''',
+'vkGetVideoSessionMemoryRequirementsKHR': '''
+ if (!pMemoryRequirements) {
+ *pMemoryRequirementsCount = 1;
+ } else {
+ // arbitrary
+ pMemoryRequirements[0].memoryBindIndex = 0;
+ pMemoryRequirements[0].memoryRequirements.size = 4096;
+ pMemoryRequirements[0].memoryRequirements.alignment = 1;
+ pMemoryRequirements[0].memoryRequirements.memoryTypeBits = 0xFFFF;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR': '''
+ if (!pProperties) {
+ *pPropertyCount = 2;
+ } else {
+ // arbitrary
+ pProperties[0].MSize = 16;
+ pProperties[0].NSize = 16;
+ pProperties[0].KSize = 16;
+ pProperties[0].AType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].BType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].CType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].ResultType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].saturatingAccumulation = VK_FALSE;
+ pProperties[0].scope = VK_SCOPE_SUBGROUP_KHR;
+
+ pProperties[1] = pProperties[0];
+ pProperties[1].scope = VK_SCOPE_DEVICE_KHR;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceVideoCapabilitiesKHR': '''
+ return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR;
+''',
+'vkGetPhysicalDeviceVideoFormatPropertiesKHR': '''
+ return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR;
+''',
+'vkGetDescriptorSetLayoutSupport':'''
+ if (pSupport) {
+ pSupport->supported = VK_TRUE;
+ }
+''',
+'vkGetDescriptorSetLayoutSupportKHR':'''
+ GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+''',
+'vkGetRenderAreaGranularity': '''
+ pGranularity->width = 1;
+ pGranularity->height = 1;
+''',
+'vkGetMemoryFdKHR': '''
+ *pFd = 1;
+ return VK_SUCCESS;
+''',
+'vkGetMemoryHostPointerPropertiesEXT': '''
+ pMemoryHostPointerProperties->memoryTypeBits = 1 << 5; // DEVICE_LOCAL only type
+ return VK_SUCCESS;
+''',
+'vkGetAndroidHardwareBufferPropertiesANDROID': '''
+ pProperties->allocationSize = 65536;
+ pProperties->memoryTypeBits = 1 << 5; // DEVICE_LOCAL only type
+
+ auto *format_prop = lvl_find_mod_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
+ if (format_prop) {
+ // Likley using this format
+ format_prop->format = VK_FORMAT_R8G8B8A8_UNORM;
+ format_prop->externalFormat = 37;
+ }
+
+ auto *format_resolve_prop = lvl_find_mod_in_chain<VkAndroidHardwareBufferFormatResolvePropertiesANDROID>(pProperties->pNext);
+ if (format_resolve_prop) {
+ format_resolve_prop->colorAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceDisplayPropertiesKHR': '''
+ if (!pProperties) {
+ *pPropertyCount = 1;
+ } else {
+ unique_lock_t lock(global_lock);
+ pProperties[0].display = (VkDisplayKHR)global_unique_handle++;
+ display_map[physicalDevice].insert(pProperties[0].display);
+ }
+ return VK_SUCCESS;
+''',
+'vkRegisterDisplayEventEXT': '''
+ unique_lock_t lock(global_lock);
+ *pFence = (VkFence)global_unique_handle++;
+ return VK_SUCCESS;
+''',
+'vkQueueSubmit': '''
+ // Special way to cause DEVICE_LOST
+ // Picked VkExportFenceCreateInfo because needed some struct that wouldn't get cleared by validation Safe Struct
+ // ... TODO - It would be MUCH nicer to have a layer or other setting control when this occured
+ // For now this is used to allow Validation Layers test reacting to device losts
+ if (submitCount > 0 && pSubmits) {
+ auto pNext = reinterpret_cast<const VkBaseInStructure *>(pSubmits[0].pNext);
+ if (pNext && pNext->sType == VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO && pNext->pNext == nullptr) {
+ return VK_ERROR_DEVICE_LOST;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetMemoryWin32HandlePropertiesKHR': '''
+ pMemoryWin32HandleProperties->memoryTypeBits = 0xFFFF;
+ return VK_SUCCESS;
+''',
+'vkCreatePipelineBinariesKHR': '''
+ unique_lock_t lock(global_lock);
+ if (pBinaries->pPipelineBinaries != nullptr)
+ {
+ for (uint32_t i = 0; i < pBinaries->pipelineBinaryCount; ++i) {
+ pBinaries->pPipelineBinaries[i] = (VkPipelineBinaryKHR)global_unique_handle++;
+ }
+ }
+ else
+ {
+ // In this case, we need to return a return count, let's set it to 3
+ pBinaries->pipelineBinaryCount = 3;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPipelineKeyKHR': '''
+ if (pPipelineKey != nullptr)
+ {
+ pPipelineKey->keySize = 16;
+ std::memset(pPipelineKey->key, 0x12, pPipelineKey->keySize);
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPipelineBinaryDataKHR': '''
+ static uint32_t fake_size = 64;
+ if (pPipelineBinaryDataSize != nullptr)
+ {
+ if (pPipelineBinaryData == nullptr)
+ {
+ *pPipelineBinaryDataSize = fake_size;
+ }
+ else
+ {
+ std::memset(pPipelineBinaryData, 0xABCD, fake_size);
+ }
+ }
+ return VK_SUCCESS;
+'''
+}
+
+# MockICDOutputGenerator
+# Generates a mock vulkan ICD.
+# This is intended to be a minimal replacement for a vulkan device in order
+# to enable testing of Vulkan applications and layers
+#
+class MockICDOutputGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+
+ # Ignore extensions that ICDs should not implement or are not safe to report
+ self.ignore_exts = ['VK_EXT_validation_cache', 'VK_KHR_portability_subset']
+
+ # Dispatchable handles
+ self.dispatchable_handles = ['VkInstance','VkPhysicalDevice', 'VkDevice', 'VkCommandBuffer', 'VkQueue']
+
+ def generate_function_declarations(self, out):
+
+ out.append('#include <stdint.h>\n')
+ out.append('#include <cstring>\n')
+ out.append('#include <string>\n')
+ out.append('#include <unordered_map>\n')
+ out.append('#include <vulkan/vulkan.h>\n')
+ out.append('\n')
+ out.append('namespace vkmock {\n')
+ out.append('// Map of instance extension name to version\n')
+ out.append('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {\n')
+ for ext in [x for x in self.vk.extensions.values() if x.instance and x.name not in self.ignore_exts]:
+ if ext.protect:
+ out.append(f'#ifdef {ext.protect}\n')
+ out.append(f' {{"{ext.name}", {ext.specVersion}}},\n')
+ if ext.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+ out.append('// Map of device extension name to version\n')
+ out.append('static const std::unordered_map<std::string, uint32_t> device_extension_map = {\n')
+ for ext in [x for x in self.vk.extensions.values() if x.device and x.name not in self.ignore_exts]:
+ if ext.protect:
+ out.append(f'#ifdef {ext.protect}\n')
+ out.append(f' {{"{ext.name}", {ext.specVersion}}},\n')
+ if ext.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+
+ current_protect = None
+ for name, cmd in self.vk.commands.items():
+ prepend_newline = '\n'
+ if cmd.protect != current_protect:
+ if current_protect is not None:
+ out.append(f'#endif /* {current_protect} */\n')
+ prepend_newline = ''
+ if current_protect is not None and cmd.protect is not None:
+ out.append('\n')
+ if cmd.protect is not None:
+ out.append(f'#ifdef {cmd.protect}\n')
+ current_protect = cmd.protect
+ out.append(f'{prepend_newline}static {cmd.cPrototype.replace(name, name[2:])}\n')
+ if current_protect is not None:
+ out.append('#endif\n')
+
+ # record intercepted procedures
+ out.append('// Map of all APIs to be intercepted by this layer\n')
+ out.append('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {\n')
+ for name, cmd in self.vk.commands.items():
+ if cmd.protect:
+ out.append(f'#ifdef {cmd.protect}\n')
+ out.append(f' {{"{name}", (void*){name[2:]}}},\n')
+ if cmd.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+
+ def generate_function_definitions(self, out):
+ out.append('#include "mock_icd.h"\n')
+ out.append('#include "function_declarations.h"\n')
+ out.append('namespace vkmock {\n')
+
+ manual_functions = [
+ # Include functions here to be intercepted w/ manually implemented function bodies
+ 'vkGetDeviceProcAddr',
+ 'vkGetInstanceProcAddr',
+ 'vkCreateDevice',
+ 'vkDestroyDevice',
+ 'vkCreateInstance',
+ 'vkDestroyInstance',
+ 'vkFreeCommandBuffers',
+ 'vkAllocateCommandBuffers',
+ 'vkDestroyCommandPool',
+ #'vkCreateDebugReportCallbackEXT',
+ #'vkDestroyDebugReportCallbackEXT',
+ 'vkEnumerateInstanceLayerProperties',
+ 'vkEnumerateInstanceVersion',
+ 'vkEnumerateInstanceExtensionProperties',
+ 'vkEnumerateDeviceLayerProperties',
+ 'vkEnumerateDeviceExtensionProperties',
+ 'vkGetPipelineKeyKHR',
+ 'vkGetPipelineBinaryDataKHR',
+ ]
+
+ current_protect = None
+ for name, cmd in self.vk.commands.items():
+ if cmd.protect != current_protect:
+ if current_protect is not None:
+ out.append(f'#endif /* {current_protect} */\n')
+ if current_protect is not None and cmd.protect is not None:
+ out.append('\n')
+ if cmd.protect is not None:
+ out.append(f'#ifdef {cmd.protect}\n')
+ current_protect = cmd.protect
+
+ if name in manual_functions:
+ if name not in CUSTOM_C_INTERCEPTS:
+ out.append(f'static {cmd.cPrototype.replace(name, name[2:])}\n')
+ out.append('// TODO: Implement custom intercept body\n')
+ else:
+ out.append(f'static {cmd.cPrototype[:-1].replace(name, name[2:])}\n')
+ out.append(f'{{{CUSTOM_C_INTERCEPTS[name]}}}\n')
+ continue
+
+ out.append(f'static {cmd.cPrototype[:-1].replace(name, name[2:])}\n')
+ if name in CUSTOM_C_INTERCEPTS:
+ out.append(f'{{{CUSTOM_C_INTERCEPTS[name]}}}\n')
+ continue
+
+ # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
+ # Call the KHR custom version instead of generating separate code
+ khr_name = name + "KHR"
+ if khr_name in CUSTOM_C_INTERCEPTS:
+ return_string = ''
+ if cmd.returnType != 'void':
+ return_string = 'return '
+
+ param_names = []
+ for param in cmd.params:
+ param_names.append(param.name)
+ out.append(f'{{\n {return_string}{khr_name[2:]}({", ".join(param_names)});\n}}\n')
+ continue
+ out.append('{\n')
+
+ # GET THE TYPE OF FUNCTION
+ if any(name.startswith(ftxt) for ftxt in ('vkCreate', 'vkAllocate')):
+ # Get last param
+ last_param = cmd.params[-1]
+ lp_txt = last_param.name
+ lp_len = None
+ if last_param.length is not None:
+ lp_len = last_param.length
+ lp_len = lp_len.replace('::', '->')
+ lp_type = last_param.type
+ handle_type = 'dispatchable'
+ allocator_txt = 'CreateDispObjHandle()'
+ if lp_type not in self.dispatchable_handles:
+ handle_type = 'non-' + handle_type
+ allocator_txt = 'global_unique_handle++'
+ # Need to lock in both cases
+ out.append(' unique_lock_t lock(global_lock);\n')
+ if lp_len is not None:
+ #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
+ out.append(f' for (uint32_t i = 0; i < {lp_len}; ++i) {{\n')
+ out.append(f' {lp_txt}[i] = ({lp_type}){allocator_txt};\n')
+ out.append(' }\n')
+ else:
+ #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
+ if 'AllocateMemory' in name:
+ # Store allocation size in case it's mapped
+ out.append(' allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;\n')
+ out.append(f' *{lp_txt} = ({lp_type}){allocator_txt};\n')
+ elif True in [ftxt in name for ftxt in ['Destroy', 'Free']]:
+ out.append('//Destroy object\n')
+ if 'FreeMemory' in name:
+ # If the memory is mapped, unmap it
+ out.append(' UnmapMemory(device, memory);\n')
+ # Remove from allocation map
+ out.append(' unique_lock_t lock(global_lock);\n')
+ out.append(' allocated_memory_size_map.erase(memory);\n')
+ else:
+ out.append('//Not a CREATE or DESTROY function\n')
+
+ # Return result variable, if any.
+ if cmd.returnType != 'void':
+ if name == 'vkGetEventStatus':
+ out.append(' return VK_EVENT_SET;\n')
+ else:
+ out.append(' return VK_SUCCESS;\n')
+ out.append('}\n')
+ if current_protect is not None:
+ out.append('#endif\n')
+
+ def generate(self):
+ out = []
+ out.append('''/*
+** Copyright (c) 2015-2025 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+#pragma once
+''')
+
+ if self.filename == "function_declarations.h":
+ self.generate_function_declarations(out)
+ else:
+ self.generate_function_definitions(out)
+
+ out.append('\n')
+ out.append('} // namespace vkmock\n')
+ out.append('\n')
+ self.write(''.join(out))
diff --git a/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py b/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py
new file mode 100644
index 00000000..baf3e816
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py
@@ -0,0 +1,163 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2021 The Khronos Group Inc.
+# Copyright (c) 2015-2021 Valve Corporation
+# Copyright (c) 2015-2021 LunarG, Inc.
+# Copyright (c) 2015-2021 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: John Zulauf <jzulauf@lunarg.com>
+
+from base_generator import BaseGenerator
+
+# HelperFileOutputGenerator - subclass of OutputGenerator. Outputs Vulkan helper files
+class HelperFileOutputGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+
+
+ def generate(self):
+ out = []
+
+ # File Comment
+ out.append('// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n')
+ out.append('// See vulkan_tools_helper_file_generator.py for modifications\n')
+
+ # Copyright Notice
+ out.append('''
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (c) 2015-2017 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+''')
+
+ # Generate header
+ out.append('''
+#pragma once
+#include <vulkan/vulkan.h>
+
+// These empty generic templates are specialized for each type with sType
+// members and for each sType -- providing a two way map between structure
+// types and sTypes
+
+template <VkStructureType id> struct LvlSTypeMap {};
+template <typename T> struct LvlTypeMap {};
+
+''')
+
+ # Generate the specializations for each type and stype
+ for struct in self.vk.structs.values():
+ if struct.sType is None:
+ continue
+
+ if struct.protect is not None:
+ out.append(f'#ifdef {struct.protect}\n')
+
+ out.append(f'// Map type {struct.name} to id {struct.sType}\n')
+ out.append(f'template <> struct LvlTypeMap<{struct.name}> {{\n')
+ out.append(f' static const VkStructureType kSType = {struct.sType};\n')
+ out.append('};\n\n')
+
+
+ out.append(f'template <> struct LvlSTypeMap<{struct.sType}> {{\n')
+ out.append(f' typedef {struct.name} Type;\n')
+ out.append('};\n\n')
+
+ if struct.protect is not None:
+ out.append(f'#endif // {struct.protect}\n')
+
+ # Define the utilities (here so any renaming stays consistent), if this grows large, refactor to a fixed .h file
+
+ out.append('''// Header "base class" for pNext chain traversal
+struct LvlGenericHeader {
+ VkStructureType sType;
+ const LvlGenericHeader *pNext;
+};
+struct LvlGenericModHeader {
+ VkStructureType sType;
+ LvlGenericModHeader *pNext;
+};
+
+// Find an entry of the given type in the pNext chain
+template <typename T> const T *lvl_find_in_chain(const void *next) {
+ const LvlGenericHeader *current = reinterpret_cast<const LvlGenericHeader *>(next);
+ const T *found = nullptr;
+ while (current) {
+ if (LvlTypeMap<T>::kSType == current->sType) {
+ found = reinterpret_cast<const T*>(current);
+ current = nullptr;
+ } else {
+ current = current->pNext;
+ }
+ }
+ return found;
+}
+// Find an entry of the given type in the pNext chain
+template <typename T> T *lvl_find_mod_in_chain(void *next) {
+ LvlGenericModHeader *current = reinterpret_cast<LvlGenericModHeader *>(next);
+ T *found = nullptr;
+ while (current) {
+ if (LvlTypeMap<T>::kSType == current->sType) {
+ found = reinterpret_cast<T*>(current);
+ current = nullptr;
+ } else {
+ current = current->pNext;
+ }
+ }
+ return found;
+}
+
+// Init the header of an sType struct with pNext
+template <typename T> T lvl_init_struct(void *p_next) {
+ T out = {};
+ out.sType = LvlTypeMap<T>::kSType;
+ out.pNext = p_next;
+ return out;
+}
+
+// Init the header of an sType struct
+template <typename T> T lvl_init_struct() {
+ T out = {};
+ out.sType = LvlTypeMap<T>::kSType;
+ return out;
+}
+''')
+
+ self.write(''.join(out))
+
diff --git a/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py b/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py
new file mode 100644
index 00000000..472459ac
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py
@@ -0,0 +1,1165 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2019-2026 Valve Corporation
+# Copyright (c) 2019-2026 LunarG, Inc.
+# Copyright (c) 2019-2022 Google Inc.
+# Copyright (c) 2023-2024 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Charles Giessen <charles@lunarg.com>
+
+from base_generator import BaseGenerator
+
+from collections import OrderedDict
+
+LICENSE_HEADER = '''
+/*
+ * Copyright (c) 2019-2026 The Khronos Group Inc.
+ * Copyright (c) 2019-2026 Valve Corporation
+ * Copyright (c) 2019-2026 LunarG, Inc.
+ * Copyright (c) 2023-2024 RasterGrid Kft.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+/*
+ * This file is generated from the Khronos Vulkan XML API Registry.
+ */
+'''
+
+CUSTOM_FORMATTERS = r'''
+template <typename T>
+std::string to_hex_str(const T i) {
+ std::stringstream stream;
+ stream << "0x" << std::setfill('0') << std::setw(sizeof(T)) << std::hex << i;
+ return stream.str();
+}
+
+template <typename T>
+std::string to_hex_str(Printer &p, const T i) {
+ if (p.Type() == OutputType::json)
+ return std::to_string(i);
+ else if (p.Type() == OutputType::vkconfig_output)
+ return std::string("\"") + to_hex_str(i) + std::string("\"");
+ else
+ return to_hex_str(i);
+}
+
+'''
+
+
+# used in the .cpp code
+STRUCTURES_TO_GEN = ['VkExtent3D', 'VkExtent2D', 'VkPhysicalDeviceLimits', 'VkPhysicalDeviceFeatures', 'VkPhysicalDeviceSparseProperties',
+ 'VkSurfaceCapabilitiesKHR', 'VkSurfaceFormatKHR', 'VkLayerProperties', 'VkPhysicalDeviceToolProperties', 'VkFormatProperties',
+ 'VkSurfacePresentScalingCapabilitiesKHR', 'VkSurfacePresentModeCompatibilityKHR', 'VkPhysicalDeviceHostImageCopyProperties',
+ 'VkVideoProfileInfoKHR', 'VkVideoCapabilitiesKHR', 'VkVideoFormatPropertiesKHR', 'VkCooperativeMatrixPropertiesKHR',
+ 'VkPhysicalDeviceFragmentShadingRateKHR', 'VkMultisamplePropertiesEXT',
+ 'VkDisplayPropertiesKHR', 'VkDisplayPlanePropertiesKHR', 'VkDisplayPlaneCapabilitiesKHR', 'VkDisplayModePropertiesKHR',
+ 'VkDisplayModeParametersKHR']
+
+ENUMS_TO_GEN = ['VkResult', 'VkFormat', 'VkPresentModeKHR',
+ 'VkPhysicalDeviceType', 'VkImageTiling', 'VkTimeDomainKHR']
+FLAGS_TO_GEN = ['VkSurfaceTransformFlagsKHR', 'VkCompositeAlphaFlagsKHR', 'VkSurfaceCounterFlagsEXT', 'VkQueueFlags',
+ 'VkDeviceGroupPresentModeFlagsKHR', 'VkFormatFeatureFlags', 'VkFormatFeatureFlags2', 'VkMemoryPropertyFlags', 'VkMemoryHeapFlags']
+FLAG_STRINGS_TO_GEN = ['VkQueueFlags']
+
+STRUCT_SHORT_VERSIONS_TO_GEN = ['VkExtent3D', 'VkExtent2D']
+
+STRUCT_COMPARISONS_TO_GEN = ['VkSurfaceFormatKHR', 'VkSurfaceFormat2KHR', 'VkSurfaceCapabilitiesKHR',
+ 'VkSurfaceCapabilities2KHR', 'VkSurfaceCapabilities2EXT']
+# don't generate these structures
+STRUCT_BLACKLIST = ['VkVideoProfileListInfoKHR', 'VkDrmFormatModifierPropertiesListEXT', 'VkDrmFormatModifierPropertiesEXT', 'VkDrmFormatModifierPropertiesList2EXT']
+# These structures are only used in version 1.1, otherwise they are included in the promoted structs
+STRUCT_1_1_LIST = ['VkPhysicalDeviceProtectedMemoryFeatures', 'VkPhysicalDeviceShaderDrawParametersFeatures', 'VkPhysicalDeviceSubgroupProperties', 'VkPhysicalDeviceProtectedMemoryProperties']
+
+# generate these structures such that they only print when not in json mode (as json wants them separate)
+PORTABILITY_STRUCTS = ['VkPhysicalDevicePortabilitySubsetFeaturesKHR', 'VkPhysicalDevicePortabilitySubsetPropertiesKHR']
+
+# iostream or custom outputter handles these types
+PREDEFINED_TYPES = ['char', 'VkBool32', 'uint32_t', 'uint8_t', 'int32_t',
+ 'float', 'uint64_t', 'size_t', 'VkDeviceSize', 'int64_t']
+
+NAMES_TO_IGNORE = ['sType', 'pNext', 'displayMode', 'display', 'currentDisplay']
+
+EXTENSION_TYPE_INSTANCE = 'instance'
+EXTENSION_TYPE_DEVICE = 'device'
+EXTENSION_TYPE_BOTH = 'both'
+
+# Types that need pNext Chains built. 'extends' is the xml tag used in the structextends member. 'type' can be device, instance, or both
+EXTENSION_CATEGORIES = OrderedDict((
+ ('phys_device_props2',
+ {'extends': 'VkPhysicalDeviceProperties2',
+ 'type': EXTENSION_TYPE_BOTH,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': True,
+ 'ignore_vendor_exclusion': False}),
+ ('phys_device_mem_props2',
+ {'extends': 'VkPhysicalDeviceMemoryProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': False,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('phys_device_features2',
+ {'extends': 'VkPhysicalDeviceFeatures2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': True,
+ 'ignore_vendor_exclusion': False}),
+ ('surface_capabilities2',
+ {'extends': 'VkSurfaceCapabilities2KHR',
+ 'type': EXTENSION_TYPE_BOTH,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False,
+ 'exclude': ['VkSurfacePresentScalingCapabilitiesKHR', 'VkSurfacePresentModeCompatibilityKHR']}),
+ ('format_properties2',
+ {'extends': 'VkFormatProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('queue_properties2',
+ {'extends': 'VkQueueFamilyProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('video_profile_info',
+ {'extends': 'VkVideoProfileInfoKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True}),
+ ('video_capabilities',
+ {'extends': 'VkVideoCapabilitiesKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True,}),
+ ('video_format_properties',
+ {'extends': 'VkVideoFormatPropertiesKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True})
+ ))
+class VulkanInfoGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+ self.format_ranges = []
+
+ def generate(self):
+ self.findFormatRanges()
+
+ # gather the types that are needed to generate
+ types_to_gen = set()
+ types_to_gen.update(ENUMS_TO_GEN)
+ types_to_gen.update(FLAGS_TO_GEN)
+ types_to_gen.update(STRUCTURES_TO_GEN)
+
+ extension_types = {}
+ for key, ext_info in EXTENSION_CATEGORIES.items():
+ extension_types[key] = []
+
+ for extended_struct in self.vk.structs[ext_info.get('extends')].extendedBy:
+ if ext_info.get('exclude') is not None and extended_struct in ext_info.get('exclude'):
+ continue
+ elif ext_info.get('ignore_vendor_exclusion'):
+ extension_types[key].append(extended_struct)
+ continue
+ vendor_tags = []
+ for extension in self.vk.structs[extended_struct].extensions:
+ vendor_tags.append(extension.split('_')[1])
+ if len(vendor_tags) == 0 or 'KHR' in vendor_tags or 'EXT' in vendor_tags:
+ extension_types[key].append(extended_struct)
+ extension_types[key] = sorted(extension_types[key])
+ types_to_gen.update(extension_types[key])
+
+ # find all the types that need
+ types_to_gen.update(self.findAllTypesToGen(types_to_gen))
+
+ types_to_gen = sorted(types_to_gen)
+
+ comparison_types_to_gen = set()
+ comparison_types_to_gen.update(STRUCT_COMPARISONS_TO_GEN)
+ comparison_types_to_gen.update(self.findAllTypesToGen(comparison_types_to_gen))
+ comparison_types_to_gen = sorted(comparison_types_to_gen)
+
+
+ # print the types gathered
+ out = []
+ out.append(LICENSE_HEADER + '\n')
+ out.append('#include "vulkaninfo.h"\n')
+ out.append('#include "outputprinter.h"\n')
+ out.append(CUSTOM_FORMATTERS)
+
+ out.extend(self.genVideoEnums())
+
+ for enum in (e for e in types_to_gen if e in self.vk.enums):
+ out.extend(self.PrintEnumToString(self.vk.enums[enum]))
+ out.extend(self.PrintEnum(self.vk.enums[enum]))
+
+ # Need to go through all flags to find if they or their associated bitmask needs printing
+ # This is because both bitmask and flag types are generated in PrintBitMask
+ for name in (x for x in sorted(self.vk.flags.keys()) if x in types_to_gen or self.vk.flags[x].bitmaskName in types_to_gen):
+ bitmask = self.vk.bitmasks[self.vk.flags[name].bitmaskName]
+
+ out.extend(self.PrintBitMask(bitmask, bitmask.flagName))
+
+ if bitmask.flagName in FLAG_STRINGS_TO_GEN:
+ out.extend(self.PrintBitMaskToString(bitmask, bitmask.flagName))
+ # make sure dump functions for nested structures are declared before use
+ for s in (x for x in types_to_gen if x in self.vk.structs and x not in STRUCT_BLACKLIST):
+ out.extend(self.PrintStructure(self.vk.structs[s], True))
+ for s in (x for x in types_to_gen if x in self.vk.structs and x not in STRUCT_BLACKLIST):
+ out.extend(self.PrintStructure(self.vk.structs[s], False))
+
+ for key, value in EXTENSION_CATEGORIES.items():
+ out.extend(self.PrintChainStruct(key, extension_types[key], value))
+
+ for s in (x for x in comparison_types_to_gen if x in self.vk.structs):
+ out.extend(self.PrintStructComparisonForwardDecl(self.vk.structs[s]))
+ for s in (x for x in comparison_types_to_gen if x in self.vk.structs):
+ out.extend(self.PrintStructComparison(self.vk.structs[s]))
+ for s in (x for x in types_to_gen if x in self.vk.structs and x in STRUCT_SHORT_VERSIONS_TO_GEN):
+ out.extend(self.PrintStructShort(self.vk.structs[s]))
+
+ out.append('auto format_ranges = std::array{\n')
+ for f in self.format_ranges:
+ out.append(f' FormatRange{{{f.minimum_instance_version}, {self.vk.extensions[f.extensions[0]].nameString if len(f.extensions) > 0 else "nullptr"}, ')
+ out.append(f'static_cast<VkFormat>({f.first_format}), static_cast<VkFormat>({f.last_format})}},\n')
+ out.append('};\n')
+
+ out.extend(self.genVideoProfileUtils())
+
+ self.write(''.join(out))
+
+
+ def genVideoEnums(self):
+ out = []
+ for enum in self.vk.videoStd.enums.values():
+ out.append(f'std::string {enum.name}String({enum.name} value) {{\n')
+ out.append(' switch (value) {\n')
+ for field in enum.fields:
+ # Ignore aliases
+ if field.value is not None:
+ out.append(f' case {field.name}: return "{field.name}";\n')
+ out.append(f' default: return std::string("UNKNOWN_{enum.name}_value") + std::to_string(value);\n')
+ out.append(' }\n}\n')
+ out.append(f'void Dump{enum.name}(Printer &p, std::string name, {enum.name} value) {{\n')
+ out.append(f' p.PrintKeyString(name, {enum.name}String(value));\n}}\n')
+ return out
+
+
+ # Utility to get the extension / version precondition of a list of type names
+ def GetTypesPrecondition(self, typelist, indent):
+ indent = ' ' * indent
+ out = []
+ extEnables = []
+ for typename in typelist:
+ extEnables.extend(self.vk.structs[typename].extensions)
+
+ version = None
+ for typename in typelist:
+ for v in self.vk.versions.values():
+ if typename in v.name:
+ if version is not None and (v.major > version.major or (v.major == version.major and v.minor > version.minor)):
+ version = v
+
+
+ has_version = version is not None
+ has_extNameStr = len(extEnables) > 0
+ if has_version or has_extNameStr:
+ out.append(f'{indent}if (')
+ has_printed_condition = False
+ if has_extNameStr:
+ for ext in extEnables:
+ if has_printed_condition:
+ out.append(f'\n{indent} || ')
+ else:
+ has_printed_condition = True
+ if has_version:
+ out.append('(')
+ if self.vk.extensions[ext].device:
+ out.append(f'gpu.CheckPhysicalDeviceExtensionIncluded({self.vk.extensions[ext].nameString})')
+ else:
+ assert False, 'Should never get here'
+ if has_version:
+ if has_printed_condition:
+ out.append(f'\n{indent} || (gpu.api_version >= {version.nameApi})')
+ else:
+ out.append(f'gpu.api_version >= {version.nameApi}')
+ out.append(') {\n')
+ else:
+ out = f'{indent}{{\n'
+ return out
+
+ # Utility to construct a capability prerequisite condition evaluation expression
+ def GetRequiredCapsCondition(self, structName, memberName, memberRef, value):
+ condition = ''
+ requiredCapStructDef = self.vk.structs[structName]
+ for member in requiredCapStructDef.members:
+ if member.name == memberName:
+ if member.type in self.vk.flags:
+ # Check that the flags contain all the required values
+ def genExpressionFromValue(value):
+ return value if value == "" else f"({memberRef} & {value}) != 0"
+
+ for char in condition:
+ if char in ['(', ')', '+', ',']:
+ condition += genExpressionFromValue(value)
+ value = ""
+ if char == '+':
+ # '+' means AND
+ condition += ' && '
+ elif char == ',':
+ # ',' means OR
+ condition += ' || '
+ else:
+ condition += char
+ else:
+ value += char
+ condition += genExpressionFromValue(value)
+ else:
+ condition = f'{memberRef} == {value}'
+ if condition == '':
+ return 'true'
+ else:
+ return f'({condition})'
+
+ def genVideoProfileUtils(self):
+ out = []
+
+ # Generate video format properties comparator
+ out.append('''
+bool is_video_format_same(const VkVideoFormatPropertiesKHR &format_a, const VkVideoFormatPropertiesKHR &format_b) {
+ auto a = reinterpret_cast<const VkBaseInStructure*>(&format_a);
+ auto b = reinterpret_cast<const VkBaseInStructure*>(&format_b);
+ bool same = true;
+ while (same && a != nullptr && b != nullptr) {
+ if (a->sType != b->sType) {
+ // Structure type mismatch (extension structures are expected to be chained in the same order)
+ same = false;
+ } else {
+ switch (a->sType) {''')
+
+ if 'VkVideoFormatPropertiesKHR' in self.registry.validextensionstructs:
+ for extstruct in ['VkVideoFormatPropertiesKHR'] + self.registry.validextensionstructs['VkVideoFormatPropertiesKHR']:
+ extstructDef = self.vk.structs[extstruct]
+ out.append(f'''
+ case {extstructDef.sType}:
+ same = same && memcmp(reinterpret_cast<const char*>(a) + sizeof(VkBaseInStructure),
+ reinterpret_cast<const char*>(b) + sizeof(VkBaseInStructure),
+ sizeof({extstruct}) - sizeof(VkBaseInStructure)) == 0;
+ break;''')
+
+ out.append('''
+ default:
+ // Unexpected structure type
+ same = false;
+ break;
+ }
+ }
+ a = a->pNext;
+ b = b->pNext;
+ }
+ return same;
+}
+''')
+
+ # Generate video profile info capture utilities
+ out.append('''
+std::vector<std::unique_ptr<AppVideoProfile>> enumerate_supported_video_profiles(AppGpu &gpu) {
+ std::vector<std::unique_ptr<AppVideoProfile>> result{};
+
+ struct ChromaSubsamplingInfo {
+ VkVideoChromaSubsamplingFlagsKHR value;
+ const char* name;
+ };
+ const std::vector<ChromaSubsamplingInfo> chroma_subsampling_list = {
+ {VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, "4:2:0"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, "4:2:2"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR, "4:4:4"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, "monochrome"}
+ };
+
+ struct BitDepthInfo {
+ VkVideoComponentBitDepthFlagsKHR value;
+ const char* name;
+ };
+ const std::vector<BitDepthInfo> bit_depth_list = {
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, "8"},
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, "10"},
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR, "12"}
+ };
+
+ auto find_caps_struct = [](const VkVideoCapabilitiesKHR &capabilities, VkStructureType stype) -> const VkBaseInStructure* {
+ auto p = reinterpret_cast<const VkBaseInStructure*>(&capabilities);
+ while (p != nullptr) {
+ if (p->sType == stype) {
+ return p;
+ }
+ p = p->pNext;
+ }
+ return nullptr;
+ };
+
+ auto base_format = []
+ (const ChromaSubsamplingInfo &chroma_subsampling, const BitDepthInfo &luma_bit_depth, const BitDepthInfo &chroma_bit_depth) {
+ std::string result{};
+ result += " (";
+ result += chroma_subsampling.name;
+ result += " ";
+ result += luma_bit_depth.name;
+ if (luma_bit_depth.value != chroma_bit_depth.value) {
+ result += ":";
+ result += chroma_bit_depth.name;
+ }
+ result += "-bit)";
+ return result;
+ };
+
+ auto add_profile = [&](
+ const std::string &name,
+ const VkVideoProfileInfoKHR &profile_info,
+ AppVideoProfile::CreateProfileInfoChainCb create_profile_info_chain,
+ AppVideoProfile::CreateCapabilitiesChainCb create_capabilities_chain,
+ const AppVideoProfile::CreateFormatPropertiesChainCbList &create_format_properties_chain_list,
+ AppVideoProfile::InitProfileCb init_profile) {
+ auto profile = std::make_unique<AppVideoProfile>(gpu, gpu.phys_device,
+ name, profile_info,
+ create_profile_info_chain,
+ create_capabilities_chain,
+ create_format_properties_chain_list,
+ init_profile);
+ if (profile->supported) {
+ result.push_back(std::move(profile));
+ }
+ };
+''')
+
+ # Generate individual video profiles from the video codec metadata
+ for videoCodec in self.vk.videoCodecs.values():
+ # Ignore video codec categories
+ if videoCodec.value is None:
+ continue
+
+ out.append('\n')
+ out.extend(self.GetTypesPrecondition(videoCodec.profiles.keys(), 4))
+ out.append(f'{" " * 8}const std::string codec_name = "{videoCodec.name}";\n')
+
+ out.append('''
+ for (auto chroma_subsampling : chroma_subsampling_list) {
+ for (auto luma_bit_depth : bit_depth_list) {
+ for (auto chroma_bit_depth : bit_depth_list) {
+ if (chroma_subsampling.value == VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR && luma_bit_depth.value != chroma_bit_depth.value) {
+ // Ignore the chroma bit depth dimension for monochrome
+ continue;
+ }
+
+ std::string profile_base_name = codec_name + base_format(chroma_subsampling, luma_bit_depth, chroma_bit_depth);
+''')
+
+ # Setup video profile info
+ out.append(f'{" " * 20}VkVideoProfileInfoKHR profile_info{{\n')
+ out.append(f'{" " * 20} VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR,\n')
+ out.append(f'{" " * 20} nullptr,\n')
+ out.append(f'{" " * 20} {videoCodec.value},\n')
+ out.append(f'{" " * 20} chroma_subsampling.value,\n')
+ out.append(f'{" " * 20} luma_bit_depth.value,\n')
+ out.append(f'{" " * 20} chroma_bit_depth.value\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video profile info chain creation callback
+ out.append(f'{" " * 20}auto create_profile_info_chain = [&](const void **ppnext) -> std::unique_ptr<video_profile_info_chain> {{\n')
+ out.append(f'{" " * 20} auto profile_info_chain = std::make_unique<video_profile_info_chain>();\n')
+ for profileStruct in videoCodec.profiles:
+ structDef = self.vk.structs[profileStruct]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 24}if (profile_info_chain != nullptr) {{\n')
+ out.append(f'{" " * 28}profile_info_chain->{profileStruct[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 28}profile_info_chain->{profileStruct[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 28}*ppnext = &profile_info_chain->{profileStruct[2:]};\n')
+ out.append(f'{" " * 28}ppnext = &profile_info_chain->{profileStruct[2:]}.pNext;\n')
+ out.append(f'{" " * 24}}}\n')
+ if structDef.protect:
+ out.append(f'#else\n{" " * 20}profile_info_chain = nullptr;\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20} return profile_info_chain;\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video capabilities chain creation callback
+ out.append(f'{" " * 20}auto create_capabilities_chain = [&](void **ppnext) -> std::unique_ptr<video_capabilities_chain> {{\n')
+ out.append(f'{" " * 20} auto capabilities_chain = std::make_unique<video_capabilities_chain>();\n')
+ for capabilities in videoCodec.capabilities:
+ structDef = self.vk.structs[capabilities]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 24}if (capabilities_chain != nullptr) {{\n')
+ out.extend(self.GetTypesPrecondition([capabilities], 28))
+ out.append(f'{" " * 32}capabilities_chain->{capabilities[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 32}capabilities_chain->{capabilities[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 32}*ppnext = &capabilities_chain->{capabilities[2:]};\n')
+ out.append(f'{" " * 32}ppnext = &capabilities_chain->{capabilities[2:]}.pNext;\n')
+ out.append(f'{" " * 28}}}\n')
+ out.append(f'{" " * 24}}}\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20} return capabilities_chain;\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video format properties chain creation callbacks
+ out.append(f'{" " * 20}const AppVideoProfile::CreateFormatPropertiesChainCbList create_format_properties_chain_list = {{\n')
+ for format in videoCodec.formats.values():
+ out.append(f'{" " * 24}AppVideoProfile::CreateFormatPropertiesChainCb {{\n')
+ out.append(f'{" " * 28}"{format.name}",\n')
+ out.append(f'{" " * 28}{format.usage.replace("+", " | ")},\n')
+
+ # Callback to check required capabilities
+ out.append(f'{" " * 28}[&](const VkVideoCapabilitiesKHR &capabilities) -> bool {{\n')
+ out.append(f'{" " * 28} bool supported = true;\n')
+ for requiredCap in format.requiredCaps:
+ structDef = self.vk.structs[requiredCap.struct]
+ out.append(self.AddGuardHeader(structDef))
+ out.extend(self.GetTypesPrecondition([requiredCap.struct], 32))
+ out.append(f'{" " * 32} auto caps = reinterpret_cast<const {requiredCap.struct}*>(find_caps_struct(capabilities, {structDef.sType}));\n')
+ out.append(f'{" " * 32} if (caps != nullptr) {{\n')
+ out.append(f'{" " * 32} supported = supported && {self.GetRequiredCapsCondition(requiredCap.struct, requiredCap.member, f"caps->{requiredCap.member}", requiredCap.value)};\n')
+ out.append(f'{" " * 32} }} else {{\n')
+ out.append(f'{" " * 32} supported = false;\n')
+ out.append(f'{" " * 32} }}\n')
+ out.append(f'{" " * 32}}} else {{\n')
+ out.append(f'{" " * 32} supported = false;\n')
+ out.append(f'{" " * 32}}}\n')
+ if structDef.protect:
+ out.append(f'#else\n{" " * 32}supported = false;\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 28} return supported;\n')
+ out.append(f'{" " * 28}}},\n')
+
+ # Callback to create video format properties chain
+ out.append(f'{" " * 28}[&](void **ppnext) -> std::unique_ptr<video_format_properties_chain> {{\n')
+ out.append(f'{" " * 28} auto format_properties_chain = std::make_unique<video_format_properties_chain>();\n')
+ for formatProps in format.properties:
+ structDef = self.vk.structs[formatProps]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 32}if (format_properties_chain != nullptr) {{\n')
+ out.extend(self.GetTypesPrecondition([formatProps], 36))
+ out.append(f'{" " * 40}format_properties_chain->{formatProps[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 40}format_properties_chain->{formatProps[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 40}*ppnext = &format_properties_chain->{formatProps[2:]};\n')
+ out.append(f'{" " * 40}ppnext = &format_properties_chain->{formatProps[2:]}.pNext;\n')
+ out.append(f'{" " * 36}}}\n')
+ out.append(f'{" " * 32}}}\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 28} return format_properties_chain;\n')
+ out.append(f'{" " * 28}}},\n')
+
+ out.append(f'{" " * 24}}},\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Permute profiles for each profile struct member value
+ profiles = {'': []}
+ for profileStruct in videoCodec.profiles.values():
+ for profileStructMember in profileStruct.members.values():
+ newProfiles = {}
+ for profileStructMemberValue, profileStructMemberName in profileStructMember.values.items():
+ for profileName, profile in profiles.items():
+ # Only add video profile name suffix to the full descriptive name if not empty to avoid excess whitespace
+ newProfileName = profileName if profileStructMemberName == '' else f'{profileName} {profileStructMemberName}'
+ newProfiles[newProfileName] = profile + [{
+ "struct": profileStruct.name,
+ "member": profileStructMember.name,
+ "value": profileStructMemberValue
+ }]
+ profiles = newProfiles
+
+ for profileName, profile in profiles.items():
+ out.append(f'{" " * 20}add_profile(profile_base_name + "{profileName}", profile_info,\n')
+ out.append(f'{" " * 20} create_profile_info_chain, create_capabilities_chain,\n')
+ out.append(f'{" " * 20} create_format_properties_chain_list,\n')
+ out.append(f'{" " * 20} [](AppVideoProfile& profile) {{\n')
+ for profileStruct in videoCodec.profiles:
+ structDef = self.vk.structs[profileStruct]
+ out.append(self.AddGuardHeader(structDef))
+ for elem in profile:
+ if elem['struct'] == profileStruct:
+ out.append(f'{" " * 24}profile.profile_info_chain->{elem["struct"][2:]}.{elem["member"]} = {elem["value"]};\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20}}});\n')
+
+ out.append(f'{" " * 16}}}\n')
+ out.append(f'{" " * 12}}}\n')
+ out.append(f'{" " * 8}}}\n')
+ out.append(f'{" " * 4}}}\n')
+
+ out.append(' return result;\n')
+ out.append('}\n\n')
+
+ return out
+
+
+ # finds all the ranges of formats from core (1.0), core versions (1.1+), and extensions
+ def findFormatRanges(self):
+ min_val = 2**32
+ prev_field = None
+ max_val = 0
+ for f in self.vk.enums['VkFormat'].fields:
+ if f.value is None:
+ continue
+ if prev_field is not None and f.value != prev_field.value + 1:
+ for ext in prev_field.extensions:
+ if self.vk.extensions[ext].promotedTo is not None:
+ self.format_ranges.append(VulkanFormatRange(self.vk.extensions[ext].promotedTo.replace("VK_", "VK_API_"), [], min_val, max_val))
+ break
+ # only bother with the first extension
+ self.format_ranges.append(VulkanFormatRange(0, prev_field.extensions, min_val, max_val))
+ min_val = 2**32
+ max_val = 0
+ min_val = min(min_val, f.value)
+ max_val = max(max_val, f.value)
+
+ prev_field = f
+
+ for ext in prev_field.extensions:
+ if self.vk.extensions[ext].promotedTo is not None:
+ self.format_ranges.append(VulkanFormatRange(self.vk.extensions[ext].promotedTo.replace("VK_", "VK_API_"), [], min_val, max_val))
+ break
+
+ self.format_ranges.append(VulkanFormatRange(0, prev_field.extensions, min_val, max_val))
+
+ def findAllTypesToGen(self, initial_type_set):
+ out_set = set()
+ current_set = initial_type_set
+ while len(current_set) > 0:
+ out_set.update(current_set)
+ next_set = set()
+
+ for current_item in current_set:
+ if current_item in self.vk.structs:
+ for member in self.vk.structs[current_item].members:
+ if member.type not in out_set and member.name not in NAMES_TO_IGNORE:
+ next_set.add(member.type)
+
+ current_set = next_set
+ return out_set
+
+ def AddGuardHeader(self,obj):
+ if obj is not None and obj.protect is not None:
+ return f'#ifdef {obj.protect}\n'
+ else:
+ return ''
+
+
+ def AddGuardFooter(self,obj):
+ if obj is not None and obj.protect is not None:
+ return f'#endif // {obj.protect}\n'
+ else:
+ return ''
+
+ def PrintEnumToString(self,enum):
+ out = []
+ out.append(self.AddGuardHeader(enum))
+ out.append(f'std::string {enum.name}String({enum.name} value) {{\n')
+ out.append(' switch (value) {\n')
+ for v in enum.fields:
+ out.append(f' case ({v.name}): return "{v.name[3:]}";\n')
+ out.append(f' default: return std::string("UNKNOWN_{enum.name}_value") + std::to_string(value);\n')
+ out.append(' }\n}\n')
+ out.append(self.AddGuardFooter(enum))
+ return out
+
+
+ def PrintEnum(self,enum):
+ out = []
+ out.append(self.AddGuardHeader(enum))
+ out.append(f'''void Dump{enum.name}(Printer &p, std::string name, {enum.name} value) {{
+ if (p.Type() == OutputType::json)
+ p.PrintKeyString(name, std::string("VK_") + {enum.name}String(value));
+ else
+ p.PrintKeyString(name, {enum.name}String(value));
+}}
+''')
+ out.append(self.AddGuardFooter(enum))
+ return out
+
+
+ def PrintGetFlagStrings(self,name, bitmask):
+ out = []
+ out.append(f'std::vector<const char *> {name}GetStrings({name} value) {{\n')
+ out.append(' std::vector<const char *> strings;\n')
+ # If a bitmask contains a field whose value is zero, we want to support printing the correct bitflag
+ # Otherwise, use "None" for when there are not bits set in the bitmask
+ if bitmask.flags[0].value != 0:
+ out.append(' if (value == 0) { strings.push_back("None"); return strings; }\n')
+ else:
+ out.append(f' if (value == 0) {{ strings.push_back("{bitmask.flags[0].name[3:]}"); return strings; }}\n')
+ for v in bitmask.flags:
+ # only check single-bit flags
+ if v.value != 0 and (v.value & (v.value - 1)) == 0:
+ out.append(f' if ({v.name} & value) strings.push_back("{v.name[3:]}");\n')
+ out.append(' return strings;\n}\n')
+ return out
+
+
+ def PrintFlags(self, bitmask, name):
+ out = []
+ out.append(f'void Dump{name}(Printer &p, std::string name, {name} value) {{\n')
+ out.append(f''' if (static_cast<{bitmask.name}>(value) == 0) {{
+ ArrayWrapper arr(p, name, 0);
+ if (p.Type() != OutputType::json && p.Type() != OutputType::vkconfig_output)
+ p.SetAsType().PrintString("None");
+ return;
+ }}
+ auto strings = {bitmask.name}GetStrings(static_cast<{bitmask.name}>(value));
+ ArrayWrapper arr(p, name, strings.size());
+ for(auto& str : strings){{
+ if (p.Type() == OutputType::json)
+ p.SetAsType().PrintString(std::string("VK_") + str);
+ else
+ p.SetAsType().PrintString(str);
+ }}
+}}
+''')
+ return out
+
+
+ def PrintFlagBits(self, bitmask):
+ return [f'''void Dump{bitmask.name}(Printer &p, std::string name, {bitmask.name} value) {{
+ auto strings = {bitmask.name}GetStrings(value);
+ if (strings.size() > 0) {{
+ if (p.Type() == OutputType::json)
+ p.PrintKeyString(name, std::string("VK_") + strings.at(0));
+ else
+ p.PrintKeyString(name, strings.at(0));
+ }}
+}}
+''']
+
+
+ def PrintBitMask(self,bitmask, name):
+ out = []
+ out.extend(self.PrintGetFlagStrings(bitmask.name, bitmask))
+ out.append(self.AddGuardHeader(bitmask))
+ out.extend(self.PrintFlags(bitmask, name))
+ out.extend(self.PrintFlagBits(bitmask))
+ out.append(self.AddGuardFooter(bitmask))
+ out.append('\n')
+ return out
+
+
+ def PrintBitMaskToString(self, bitmask, name):
+ out = []
+ out.append(self.AddGuardHeader(bitmask))
+ out.append(f'std::string {name}String({name} value) {{\n')
+ out.append(' std::string out;\n')
+ out.append(' bool is_first = true;\n')
+ for v in bitmask.flags:
+ out.append(f' if ({v.name} & value) {{\n')
+ out.append(' if (is_first) { is_first = false; } else { out += " | "; }\n')
+ out.append(f' out += "{str(v.name)[3:]}";\n')
+ out.append(' }\n')
+ out.append(' return out;\n')
+ out.append('}\n')
+ out.append(self.AddGuardFooter(bitmask))
+ return out
+
+
+ def PrintStructure(self,struct, declare_only):
+ if len(struct.members) == 0:
+ return []
+ out = []
+ out.append(self.AddGuardHeader(struct))
+ max_key_len = 0
+ for v in struct.members:
+ if (v.type in PREDEFINED_TYPES or v.type in STRUCT_BLACKLIST) and (v.length is None or v.type in ['char'] or v.fixedSizeArray[0] in ['VK_UUID_SIZE', 'VK_LUID_SIZE']):
+ max_key_len = max(max_key_len, len(v.name))
+ out.append(f'void Dump{struct.name}(Printer &p, std::string name, const {struct.name} &obj)')
+ if declare_only:
+ out.append(';\n')
+ out.append(self.AddGuardFooter(struct))
+ return out
+ out.append(' {\n')
+ if struct.name == 'VkPhysicalDeviceLimits':
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.ObjectStart("limits");\n')
+ out.append(' else\n')
+ out.append(' p.SetSubHeader().ObjectStart(name);\n')
+ elif struct.name == 'VkPhysicalDeviceSparseProperties':
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.ObjectStart("sparseProperties");\n')
+ out.append(' else\n')
+ out.append(' p.SetSubHeader().ObjectStart(name);\n')
+ else:
+ out.append(' ObjectWrapper object{p, name};\n')
+ if max_key_len > 0:
+ out.append(f' p.SetMinKeyWidth({max_key_len});\n')
+ for v in struct.members:
+ # strings
+ if v.type == 'char':
+ if v.pointer == True:
+ out.append(f' if (obj.{v.name} == nullptr) {{')
+ out.append(f' p.PrintKeyString("{v.name}", "NULL");\n')
+ out.append(' } else {')
+ out.append(f' p.PrintKeyString("{v.name}", obj.{v.name});\n')
+ if v.pointer == True:
+ out.append(' }')
+ # arrays
+ elif v.length is not None:
+ # uuid's
+ if v.type == 'uint8_t' and (v.fixedSizeArray[0] == 'VK_LUID_SIZE' or v.fixedSizeArray[0] == 'VK_UUID_SIZE'): # VK_UUID_SIZE
+ if v.fixedSizeArray[0] == 'VK_LUID_SIZE':
+ out.append(' if (obj.deviceLUIDValid) { // special case\n')
+ out.append(f' p.PrintKeyValue("{v.name}", obj.{v.name});\n')
+ if v.fixedSizeArray[0] == 'VK_LUID_SIZE':
+ out.append(' }\n')
+ elif struct.name == 'VkQueueFamilyGlobalPriorityProperties' and v.name == 'priorities':
+ out.append(f' ArrayWrapper arr(p,"{v.name}", obj.priorityCount);\n')
+ out.append(' for (uint32_t i = 0; i < obj.priorityCount; i++) {\n')
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.PrintString(std::string("VK_") + VkQueueGlobalPriorityString(obj.priorities[i]));\n')
+ out.append(' else\n')
+ out.append(' p.PrintString(VkQueueGlobalPriorityString(obj.priorities[i]));\n')
+ out.append(' }\n')
+ elif len(v.fixedSizeArray) == 2:
+ out.append(f' {{\n ArrayWrapper arr(p,"{v.name}", ' + v.fixedSizeArray[0] + ');\n')
+ out.append(f' for (uint32_t i = 0; i < {v.fixedSizeArray[0]}; i++) {{\n')
+ out.append(f' for (uint32_t j = 0; j < {v.fixedSizeArray[1]}; j++) {{\n')
+ out.append(f' p.PrintElement(obj.{v.name}[i][j]); }} }}\n')
+ out.append(' }\n')
+ elif len(v.fixedSizeArray) == 1:
+ out.append(f' {{\n ArrayWrapper arr(p,"{v.name}", ' + v.fixedSizeArray[0] + ');\n')
+ out.append(f' for (uint32_t i = 0; i < {v.fixedSizeArray[0]}; i++) {{ p.PrintElement(obj.{v.name}[i]); }}\n')
+ out.append(' }\n')
+ else: # dynamic array length based on other member
+ out.append(f' if (obj.{v.length} == 0 || obj.{v.name} == nullptr) {{\n')
+ out.append(f' p.PrintKeyString("{v.name}", "NULL");\n')
+ out.append(' } else {\n')
+ out.append(f' ArrayWrapper arr(p,"{v.name}", obj.{v.length});\n')
+ out.append(f' for (uint32_t i = 0; i < obj.{v.length}; i++) {{\n')
+ out.append(f' Dump{v.type}(p, std::to_string(i), obj.{v.name}[i]);\n')
+ out.append(' }\n')
+ out.append(' }\n')
+ elif v.type == 'VkBool32':
+ out.append(f' p.PrintKeyBool("{v.name}", static_cast<bool>(obj.{v.name}));\n')
+ elif v.type == 'uint8_t':
+ out.append(f' p.PrintKeyValue("{v.name}", static_cast<uint32_t>(obj.{v.name}));\n')
+ elif v.type == 'VkDeviceSize' or (v.type == 'uint32_t' and v.name in ['vendorID', 'deviceID']):
+ out.append(f' p.PrintKeyValue("{v.name}", to_hex_str(p, obj.{v.name}));\n')
+ elif v.type in PREDEFINED_TYPES:
+ out.append(f' p.PrintKeyValue("{v.name}", obj.{v.name});\n')
+ elif v.name not in NAMES_TO_IGNORE:
+ # if it is an enum/flag/bitmask
+ if v.type in ['VkFormatFeatureFlags', 'VkFormatFeatureFlags2']:
+ out.append(' p.SetOpenDetails();\n') # special case so that feature flags are open in html output
+ out.append(f' Dump{v.type}(p, "{v.name}", obj.{v.name});\n')
+
+ if struct.name in ['VkPhysicalDeviceLimits', 'VkPhysicalDeviceSparseProperties']:
+ out.append(' p.ObjectEnd();\n')
+ out.append('}\n')
+
+ out.append(self.AddGuardFooter(struct))
+ return out
+
+
+ def PrintStructShort(self,struct):
+ out = []
+ out.append(self.AddGuardHeader(struct))
+ out.append(f'std::ostream &operator<<(std::ostream &o, {struct.name} &obj) {{\n')
+ out.append(' return o << "(" << ')
+
+ first = True
+ for v in struct.members:
+ if first:
+ first = False
+ out.append(f'obj.{v.name} << ')
+ else:
+ out.append(f'\',\' << obj.{v.name} << ')
+ out.append('")";\n')
+ out.append('}\n')
+ out.append(self.AddGuardFooter(struct))
+ return out
+
+ def PrintChainStruct(self, listName, structs_to_print, chain_details):
+ version_desc = ''
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ version_desc = 'gpu.api_version'
+ else:
+ version_desc = 'inst.instance_version'
+
+ out = []
+
+ # use default constructor and delete copy & move operators
+ out.append(f'''struct {listName}_chain {{
+ {listName}_chain() = default;
+ {listName}_chain(const {listName}_chain &) = delete;
+ {listName}_chain& operator=(const {listName}_chain &) = delete;
+ {listName}_chain({listName}_chain &&) = delete;
+ {listName}_chain& operator=({listName}_chain &&) = delete;
+''')
+
+ out.append(' void* start_of_chain = nullptr;\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ out.append(self.AddGuardHeader(struct))
+ if struct.sType is not None:
+ out.append(f' {struct.name} {struct.name[2:]}{{}};\n')
+ # Specific versions of drivers have an incorrect definition of the size of these structs.
+ # We need to artificially pad the structure it just so the driver doesn't write out of bounds and
+ # into other structures that are adjacent. This bug comes from the in-development version of
+ # the extension having a larger size than the final version, so older drivers try to write to
+ # members which don't exist.
+ if struct.name in ['VkPhysicalDeviceShaderIntegerDotProductFeatures', 'VkPhysicalDeviceHostImageCopyFeaturesEXT']:
+ out.append(f' char {struct.name}_padding[64];\n')
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ out.append(f' std::vector<{member.type}> {struct.name}_{member.name};\n')
+ out.append(self.AddGuardFooter(struct))
+ out.append(' void initialize_chain(')
+ args = []
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ args.append('AppInstance &inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ args.append('AppGpu &gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ args.append('bool show_promoted_structs')
+ out.append(f'{", ".join(args)}) noexcept {{\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+
+ out.append(self.AddGuardHeader(struct))
+ out.append(f' {struct.name[2:]}.sType = {struct.sType};\n')
+ out.append(self.AddGuardFooter(struct))
+
+ out.append(' std::vector<VkBaseOutStructure*> chain_members{};\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ out.append(self.AddGuardHeader(struct))
+
+ has_version = struct.version is not None
+ has_extNameStr = len(struct.extensions) > 0 or len(struct.aliases) > 0
+ if has_version or has_extNameStr:
+ out.append(' if (')
+ has_printed_condition = False
+ if has_extNameStr:
+ for ext in struct.extensions:
+ if has_printed_condition:
+ out.append('\n || ')
+ else:
+ has_printed_condition = True
+ if has_version:
+ out.append('(')
+ if self.vk.extensions[ext].device:
+ out.append(f'gpu.CheckPhysicalDeviceExtensionIncluded({self.vk.extensions[ext].nameString})')
+ elif self.vk.extensions[ext].instance:
+ out.append(f'inst.CheckExtensionEnabled({self.vk.extensions[ext].nameString})')
+ else:
+ assert False, 'Should never get here'
+ if has_version:
+ str_show_promoted_structs = '|| show_promoted_structs' if chain_details.get('can_show_promoted_structs') else ''
+ if struct.name in STRUCT_1_1_LIST:
+ out.append(f'{version_desc} == {struct.version.nameApi} {str_show_promoted_structs}')
+ elif has_printed_condition:
+ out.append(f')\n && ({version_desc} < {struct.version.nameApi} {str_show_promoted_structs})')
+ else:
+ out.append(f'({version_desc} >= {struct.version.nameApi})')
+ out.append(')\n ')
+ else:
+ out.append(' ')
+ out.append(f'chain_members.push_back(reinterpret_cast<VkBaseOutStructure*>(&{struct.name[2:]}));\n')
+ out.append(self.AddGuardFooter(struct))
+ chain_param_list = []
+ chain_arg_list = []
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ chain_param_list.append('AppInstance &inst')
+ chain_arg_list.append('inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ chain_param_list.append('AppGpu &gpu')
+ chain_arg_list.append('gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ chain_param_list.append('bool show_promoted_structs')
+ chain_arg_list.append('show_promoted_structs')
+
+ out.append(f'''
+ if (!chain_members.empty()) {{
+ for(size_t i = 0; i < chain_members.size() - 1; i++){{
+ chain_members[i]->pNext = chain_members[i + 1];
+ }}
+ start_of_chain = chain_members[0];
+ }}
+ }}
+}};
+void setup_{listName}_chain({chain_details['extends']}& start, std::unique_ptr<{listName}_chain>& chain, {','.join(chain_param_list)}){{
+ chain = std::unique_ptr<{listName}_chain>(new {listName}_chain());
+ chain->initialize_chain({','.join(chain_arg_list)});
+ start.pNext = chain->start_of_chain;
+}};
+''')
+ if chain_details.get('print_iterator'):
+ out.append('\n')
+ out.append(f'void chain_iterator_{listName}(')
+ args = ['Printer &p']
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ args.append('AppInstance &inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ args.append('AppGpu &gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ args.append('bool show_promoted_structs')
+ args.append('const void * place')
+ out.append(f'{", ".join(args)}) {{\n')
+ out.append(' while (place) {\n')
+ out.append(' const VkBaseOutStructure *structure = (const VkBaseOutStructure *)place;\n')
+ out.append(' p.SetSubHeader();\n')
+
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+
+ out.append(self.AddGuardHeader(struct))
+ out.append(f' if (structure->sType == {struct.sType}')
+ if struct.name in PORTABILITY_STRUCTS:
+ out.append(' && p.Type() != OutputType::json')
+ out.append(') {\n')
+ out.append(f' const {struct.name}* props = (const {struct.name}*)structure;\n')
+ out.extend(self.PrintStructNameDecisionLogic(struct, version_desc, chain_details.get('can_show_promoted_structs')))
+ out.append(' p.AddNewline();\n')
+ out.append(' }\n')
+ out.append(self.AddGuardFooter(struct))
+ out.append(' place = structure->pNext;\n')
+ out.append(' }\n')
+ out.append('}\n')
+
+ out.append('\n')
+ out.append(f'bool prepare_{listName}_twocall_chain_vectors(std::unique_ptr<{listName}_chain>& chain) {{\n')
+ out.append(' (void)chain;\n')
+ is_twocall = False
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ has_length = False
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ has_length = True
+ if not has_length:
+ continue
+ out.append(self.AddGuardHeader(struct))
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ out.append(f' chain->{struct.name}_{member.name}.resize(chain->{struct.name[2:]}.{member.length});\n')
+ out.append(f' chain->{struct.name[2:]}.{member.name} = chain->{struct.name}_{member.name}.data();\n')
+ out.append(self.AddGuardFooter(struct))
+ is_twocall = True
+ out.append(f' return {"true" if is_twocall else "false"};\n')
+ out.append('}\n')
+
+ return out
+
+ def GetStructCheckStringForMatchingExtension(self, struct, structName):
+ for ext_name in struct.extensions:
+ ext = self.vk.extensions[ext_name]
+ vendor = ext.name.split('_')[1]
+ if structName.endswith(vendor):
+ if ext.device:
+ return f'gpu.CheckPhysicalDeviceExtensionIncluded({ext.nameString})'
+ elif ext.instance:
+ return f'inst.CheckExtensionEnabled({ext.nameString})'
+ return None
+
+ # Function is complex because it has to do the following:
+ # Always print the struct with the most appropriate name given the gpu api version & enabled instance/device extensions
+ # Print struct aliases when --show-promoted-structs is set
+ # Not let alias printing duplicate the most appropriate name
+ def PrintStructNameDecisionLogic(self, struct, version_desc, can_show_promoted_structs):
+ out = []
+ out.append(f'{" " * 12}const char* name = ')
+ # Get a list of all the conditions to check and the type name to use
+ check_list = []
+ if struct.version is not None:
+ check_list.append([f'{version_desc} >= {struct.version.nameApi}', struct.name])
+ else:
+ check_list.append([f'{self.GetStructCheckStringForMatchingExtension(struct, struct.name)}', struct.name])
+
+ for alias in struct.aliases:
+ ext_str = self.GetStructCheckStringForMatchingExtension(struct, alias)
+ if ext_str is not None:
+ check_list.append([f'{self.GetStructCheckStringForMatchingExtension(struct, alias)}', alias])
+ end_parens = ''
+ # Turn the conditions into a nested ternary condition -
+ for check in check_list:
+ if check == check_list[-1]:
+ out.append( f'"{check[1]}"')
+ else:
+ out.append( f'{check[0]} ? "{check[1]}" : (')
+ end_parens += ')'
+ out.append(f'{end_parens};\n')
+ out.append(f'{" " * 12}Dump{struct.name}(p, name, *props);\n')
+ if not can_show_promoted_structs:
+ return out
+ for alias in struct.aliases:
+ ext_str = self.GetStructCheckStringForMatchingExtension(struct, alias)
+ if ext_str is not None:
+ out.append(f'{" " * 12}if (show_promoted_structs && strcmp(name, "{alias}") != 0 && {ext_str}) {{\n')
+ out.append(f'{" " * 16}p.AddNewline();\n')
+ out.append(f'{" " * 16}p.SetSubHeader();\n')
+ out.append(f'{" " * 16}Dump{struct.name}(p, "{alias}", *props);\n')
+ out.append(f'{" " * 12}}}\n')
+ return out
+
+ def PrintStructComparisonForwardDecl(self,structure):
+ out = []
+ out.append(f'bool operator==(const {structure.name} & a, const {structure.name} b);\n')
+ return out
+
+
+ def PrintStructComparison(self,structure):
+ out = []
+ out.append(f'bool operator==(const {structure.name} & a, const {structure.name} b) {{\n')
+ out.append(' return ')
+ is_first = True
+ for m in structure.members:
+ if m.name not in NAMES_TO_IGNORE:
+ if not is_first:
+ out.append('\n && ')
+ else:
+ is_first = False
+ out.append(f'a.{m.name} == b.{m.name}')
+ out.append(';\n')
+ out.append('}\n')
+ return out
+
+class VulkanFormatRange:
+ def __init__(self, min_inst_version, extensions, first, last):
+ self.minimum_instance_version = min_inst_version
+ self.extensions = extensions
+ self.first_format = first
+ self.last_format = last
diff --git a/tools/Vulkan-Tools/scripts/gn/DEPS b/tools/Vulkan-Tools/scripts/gn/DEPS
new file mode 100644
index 00000000..8cf931a8
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/DEPS
@@ -0,0 +1,68 @@
+gclient_gn_args_file = 'build/config/gclient_args.gni'
+
+vars = {
+ 'chromium_git': 'https://chromium.googlesource.com',
+ 'ninja_version': 'version:2@1.11.1.chromium.6',
+}
+
+deps = {
+
+ 'build': {
+ 'url': '{chromium_git}/chromium/src/build.git@1015724d82945f9ef7e51c6f804034ccf5f79951',
+ },
+
+ 'buildtools': {
+ 'url': '{chromium_git}/chromium/src/buildtools.git@3c7e3f1b8b1e4c0b6ec693430379cea682de78d6',
+ },
+
+ 'buildtools/linux64': {
+ 'packages': [
+ {
+ 'package': 'gn/gn/linux-${{arch}}',
+ 'version': 'git_revision:5e19d2fb166fbd4f6f32147fbb2f497091a54ad8',
+ }
+ ],
+ 'dep_type': 'cipd',
+ 'condition': 'host_os == "linux"',
+ },
+
+ 'testing': {
+ 'url': '{chromium_git}/chromium/src/testing@949b2864b6bd27656753b917c9aa7731dc7a06f6',
+ },
+
+ 'tools/clang': {
+ 'url': '{chromium_git}/chromium/src/tools/clang.git@566877f1ff1a5fa6beaca3ab4b47bd0b92eb614f',
+ },
+
+ 'third_party/ninja': {
+ 'packages': [
+ {
+ 'package': 'infra/3pp/tools/ninja/${{platform}}',
+ 'version': Var('ninja_version'),
+ }
+ ],
+ 'dep_type': 'cipd',
+ },
+
+}
+
+hooks = [
+ {
+ 'name': 'sysroot_x64',
+ 'pattern': '.',
+ 'condition': 'checkout_linux and checkout_x64',
+ 'action': ['python3', 'build/linux/sysroot_scripts/install-sysroot.py',
+ '--arch=x64'],
+ },
+ {
+ # Note: On Win, this should run after win_toolchain, as it may use it.
+ 'name': 'clang',
+ 'pattern': '.',
+ 'action': ['python3', 'tools/clang/scripts/update.py'],
+ },
+]
+
+recursedeps = [
+ # buildtools provides clang_format.
+ 'buildtools',
+]
diff --git a/tools/Vulkan-Tools/scripts/gn/generate_vulkan_icd_json.py b/tools/Vulkan-Tools/scripts/gn/generate_vulkan_icd_json.py
new file mode 100755
index 00000000..467ba616
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/generate_vulkan_icd_json.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2022-2023 LunarG, Inc.
+# Copyright (C) 2016 The ANGLE Project Authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate copies of the Vulkan layers JSON files, with no paths, forcing
+Vulkan to use the default search path to look for layers."""
+
+from __future__ import print_function
+
+import argparse
+import glob
+import json
+import os
+import platform
+import sys
+
+def glob_slash(dirname):
+ r"""Like regular glob but replaces \ with / in returned paths."""
+ return [s.replace('\\', '/') for s in glob.glob(dirname)]
+
+def main():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--icd', action='store_true')
+ parser.add_argument('--no-path-prefix', action='store_true')
+ parser.add_argument('--platform', type=str, default=platform.system(),
+ help='Target platform to build validation layers for: '
+ 'Linux|Darwin|Windows|Fuchsia|...')
+ parser.add_argument('source_dir')
+ parser.add_argument('target_dir')
+ parser.add_argument('json_files', nargs='*')
+ args = parser.parse_args()
+
+ source_dir = args.source_dir
+ target_dir = args.target_dir
+
+ json_files = [j for j in args.json_files if j.endswith('.json')]
+ json_in_files = [j for j in args.json_files if j.endswith('.json.in')]
+
+ data_key = 'ICD' if args.icd else 'layer'
+
+ if not os.path.isdir(source_dir):
+ print(source_dir + ' is not a directory.', file=sys.stderr)
+ return 1
+
+ if not os.path.exists(target_dir):
+ os.makedirs(target_dir)
+
+ # Copy the *.json files from source dir to target dir
+ if (set(glob_slash(os.path.join(source_dir, '*.json'))) != set(json_files)):
+ print(glob.glob(os.path.join(source_dir, '*.json')))
+ print('.json list in gn file is out-of-date', file=sys.stderr)
+ return 1
+
+ for json_fname in json_files:
+ if not json_fname.endswith('.json'):
+ continue
+ with open(json_fname) as infile:
+ data = json.load(infile)
+
+ # Update the path.
+ if not data_key in data:
+ raise Exception(
+ "Could not find '%s' key in %s" % (data_key, json_fname))
+
+ # The standard validation layer has no library path.
+ if 'library_path' in data[data_key]:
+ prev_name = os.path.basename(data[data_key]['library_path'])
+ data[data_key]['library_path'] = prev_name
+
+ target_fname = os.path.join(target_dir, os.path.basename(json_fname))
+ with open(target_fname, 'w') as outfile:
+ json.dump(data, outfile)
+
+ # Set json file prefix and suffix for generating files, default to Linux.
+ if args.no_path_prefix:
+ relative_path_prefix = ''
+ elif args.platform == 'Windows':
+ relative_path_prefix = r'..\\' # json-escaped, hence two backslashes.
+ else:
+ relative_path_prefix = '../lib'
+ file_type_suffix = '.so'
+ if args.platform == 'Windows':
+ file_type_suffix = '.dll'
+ elif args.platform == 'Darwin':
+ file_type_suffix = '.dylib'
+
+ # For each *.json.in template files in source dir generate actual json file
+ # in target dir
+ if (set(glob_slash(os.path.join(source_dir, '*.json.in'))) !=
+ set(json_in_files)):
+ print('.json.in list in gn file is out-of-date', file=sys.stderr)
+ return 1
+ for json_in_name in json_in_files:
+ if not json_in_name.endswith('.json.in'):
+ continue
+ json_in_fname = os.path.basename(json_in_name)
+ layer_name = json_in_fname[:-len('.json.in')]
+ layer_lib_name = layer_name + file_type_suffix
+ json_out_fname = os.path.join(target_dir, json_in_fname[:-len('.in')])
+ with open(json_out_fname,'w') as json_out_file, \
+ open(json_in_name) as infile:
+ for line in infile:
+ line = line.replace('@JSON_LIBRARY_PATH@', relative_path_prefix + layer_lib_name)
+ json_out_file.write(line)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/Vulkan-Tools/scripts/gn/gn.py b/tools/Vulkan-Tools/scripts/gn/gn.py
new file mode 100755
index 00000000..52b20c58
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/gn.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+# Copyright 2023 The Khronos Group Inc.
+# Copyright 2023 Valve Corporation
+# Copyright 2023 LunarG, Inc.
+#
+# SPDX-License-Identifier: Apache-2.0
+
+import os
+import subprocess
+import sys
+
+# helper to define paths relative to the repo root
+def RepoRelative(path):
+ return os.path.abspath(os.path.join(os.path.dirname(__file__), '../../', path))
+
+def BuildGn():
+ if not os.path.exists(RepoRelative("depot_tools")):
+ print("Cloning Chromium depot_tools\n", flush=True)
+ clone_cmd = 'git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git depot_tools'.split(" ")
+ subprocess.call(clone_cmd)
+
+ os.environ['PATH'] = os.environ.get('PATH') + ":" + RepoRelative("depot_tools")
+
+ print("Updating Repo Dependencies and GN Toolchain\n", flush=True)
+ update_cmd = './scripts/gn/update_deps.sh'
+ subprocess.call(update_cmd)
+
+ print("Checking Header Dependencies\n", flush=True)
+ gn_check_cmd = 'gn gen --check out/Debug'.split(" ")
+ subprocess.call(gn_check_cmd)
+
+ print("Generating Ninja Files\n", flush=True)
+ gn_gen_cmd = 'gn gen out/Debug'.split(" ")
+ subprocess.call(gn_gen_cmd)
+
+ print("Running Ninja Build\n", flush=True)
+ ninja_build_cmd = 'ninja -C out/Debug'.split(" ")
+ subprocess.call(ninja_build_cmd)
+
+#
+# Module Entrypoint
+def main():
+ try:
+ BuildGn()
+
+ except subprocess.CalledProcessError as proc_error:
+ print('Command "%s" failed with return code %s' % (' '.join(proc_error.cmd), proc_error.returncode))
+ sys.exit(proc_error.returncode)
+ except Exception as unknown_error:
+ print('An unkown error occured: %s', unknown_error)
+ sys.exit(1)
+
+ sys.exit(0)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/build.gni b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/build.gni
new file mode 100644
index 00000000..dbf47039
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/build.gni
@@ -0,0 +1,18 @@
+# Copyright (c) 2019-2023 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
+ignore_elf32_limitations = true
+linux_use_bundled_binutils_override = false
+use_system_xcode = true
diff --git a/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_headers.gni b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_headers.gni
new file mode 100644
index 00000000..5f24b39e
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_headers.gni
@@ -0,0 +1,15 @@
+# Copyright (c) 2020-2023 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vulkan_use_x11 = true
diff --git a/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_tools.gni b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_tools.gni
new file mode 100644
index 00000000..c62fb64d
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/secondary/build_overrides/vulkan_tools.gni
@@ -0,0 +1,21 @@
+# Copyright (c) 2019-2023 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Paths to vulkan tools dependencies
+vulkan_headers_dir = "//external/Vulkan-Headers"
+
+# Subdirectories for generated files
+vulkan_data_subdir = ""
+vulkan_gen_subdir = ""
+
diff --git a/tools/Vulkan-Tools/scripts/gn/update_deps.sh b/tools/Vulkan-Tools/scripts/gn/update_deps.sh
new file mode 100755
index 00000000..763c3058
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/gn/update_deps.sh
@@ -0,0 +1,49 @@
+#!/bin/sh
+
+# Copyright (c) 2019-2023 LunarG, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Execute at repo root
+cd "$(dirname $0)/../../"
+
+# Use update_deps.py to update source dependencies from /scripts/known_good.json
+scripts/update_deps.py --dir="external" --no-build
+
+cat << EOF > .gn
+buildconfig = "//build/config/BUILDCONFIG.gn"
+secondary_source = "//scripts/gn/secondary/"
+
+script_executable = "python3"
+
+default_args = {
+ clang_use_chrome_plugins = false
+ use_custom_libcxx = false
+}
+EOF
+
+# Use gclient to update toolchain dependencies from /scripts/gn/DEPS (from chromium)
+cat << EOF >> .gclient
+solutions = [
+ { "name" : ".",
+ "url" : "https://github.com/KhronosGroup/Vulkan-Tools",
+ "deps_file" : "scripts/gn/DEPS",
+ "managed" : False,
+ "custom_deps" : {
+ },
+ "custom_vars": {},
+ },
+]
+EOF
+gclient sync
+
diff --git a/tools/Vulkan-Tools/scripts/known_good.json b/tools/Vulkan-Tools/scripts/known_good.json
new file mode 100644
index 00000000..14e6f5b8
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/known_good.json
@@ -0,0 +1,79 @@
+{
+ "repos": [
+ {
+ "name": "Vulkan-Headers",
+ "api": "vulkan",
+ "url": "https://github.com/KhronosGroup/Vulkan-Headers.git",
+ "sub_dir": "Vulkan-Headers",
+ "build_dir": "Vulkan-Headers/build",
+ "install_dir": "Vulkan-Headers/build/install",
+ "commit": "v1.4.347"
+ },
+ {
+ "name": "MoltenVK",
+ "url": "https://github.com/KhronosGroup/MoltenVK.git",
+ "sub_dir": "MoltenVK",
+ "build_dir": "MoltenVK",
+ "install_dir": "MoltenVK",
+ "commit": "v1.4.1",
+ "custom_build": [
+ "./fetchDependencies --macos",
+ "xcodebuild -project MoltenVKPackaging.xcodeproj GCC_PREPROCESSOR_DEFINITIONS='$GCC_PREPROCESSOR_DEFINITIONS MVK_CONFIG_LOG_LEVEL=1' -scheme \"MoltenVK Package (macOS only)\" build"
+ ],
+ "build_step": "custom",
+ "build_platforms": [
+ "darwin"
+ ]
+ },
+ {
+ "name": "googletest",
+ "url": "https://github.com/google/googletest.git",
+ "sub_dir": "googletest",
+ "build_dir": "googletest/build",
+ "install_dir": "googletest/build/install",
+ "cmake_options": [
+ "-DBUILD_GMOCK=OFF",
+ "-Dgtest_force_shared_crt=ON",
+ "-DBUILD_SHARED_LIBS=OFF"
+ ],
+ "build_platforms": [
+ "windows",
+ "linux",
+ "darwin"
+ ],
+ "commit": "v1.14.0",
+ "optional": [
+ "tests"
+ ]
+ },
+ {
+ "name": "Vulkan-Loader",
+ "api": "vulkan",
+ "url": "https://github.com/KhronosGroup/Vulkan-Loader.git",
+ "sub_dir": "Vulkan-Loader",
+ "build_dir": "Vulkan-Loader/build",
+ "install_dir": "Vulkan-Loader/build/install",
+ "cmake_options": [
+ "-DLOADER_USE_UNSAFE_FILE_SEARCH=ON"
+ ],
+ "commit": "v1.4.347",
+ "build_platforms": [
+ "windows",
+ "linux",
+ "darwin"
+ ],
+ "deps": [
+ {
+ "var_name": "VULKAN_HEADERS_INSTALL_DIR",
+ "repo_name": "Vulkan-Headers"
+ }
+ ]
+ }
+ ],
+ "install_names": {
+ "Vulkan-Headers": "VULKAN_HEADERS_INSTALL_DIR",
+ "MoltenVK": "MOLTENVK_REPO_ROOT",
+ "googletest": "GOOGLETEST_INSTALL_DIR",
+ "Vulkan-Loader": "VULKAN_LOADER_INSTALL_DIR"
+ }
+}
diff --git a/tools/Vulkan-Tools/scripts/kvt_genvk.py b/tools/Vulkan-Tools/scripts/kvt_genvk.py
new file mode 100644
index 00000000..868cc3fb
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/kvt_genvk.py
@@ -0,0 +1,416 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2013-2025 The Khronos Group Inc.
+# Copyright (c) 2023-2025 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import pdb
+import sys
+import time
+import os
+
+# Simple timer functions
+startTime = None
+
+
+def startTimer(timeit):
+ global startTime
+ if timeit:
+ startTime = time.process_time()
+
+
+def endTimer(timeit, msg):
+ global startTime
+ if timeit:
+ endTime = time.process_time()
+ write(msg, endTime - startTime, file=sys.stderr)
+ startTime = None
+
+# Turn a list of strings into a regexp string matching exactly those strings
+
+
+def makeREstring(list, default=None):
+ if len(list) > 0 or default is None:
+ return '^(' + '|'.join(list) + ')$'
+ else:
+ return default
+
+# Returns a directory of [ generator function, generator options ] indexed
+# by specified short names. The generator options incorporate the following
+# parameters:
+#
+# args is an parsed argument object; see below for the fields that are used.
+
+
+def makeGenOpts(args):
+ global genOpts
+ genOpts = {}
+
+ # API to generate sources for
+ apiname = args.api
+
+ # Default class of extensions to include, or None
+ if args.defaultExtensions is not None:
+ defaultExtensions = args.defaultExtensions
+ else:
+ defaultExtensions = apiname
+
+ # Additional extensions to include (list of extensions)
+ extensions = args.extension
+
+ # Extensions to remove (list of extensions)
+ removeExtensions = args.removeExtensions
+
+ # Extensions to emit (list of extensions)
+ emitExtensions = args.emitExtensions
+
+ # Features to include (list of features)
+ features = args.feature
+
+ # Whether to disable inclusion protect in headers
+ protect = args.protect
+
+ # Output target directory
+ directory = args.directory
+
+ # Path to generated files, particularly api.py
+ genpath = args.genpath
+
+ # Descriptive names for various regexp patterns used to select
+ # versions and extensions
+ allFeatures = allExtensions = '.*'
+ noFeatures = noExtensions = None
+
+ # Turn lists of names/patterns into matching regular expressions
+ addExtensionsPat = makeREstring(extensions, None)
+ removeExtensionsPat = makeREstring(removeExtensions, None)
+ emitExtensionsPat = makeREstring(emitExtensions, allExtensions)
+ featuresPat = makeREstring(features, allFeatures)
+
+ # Copyright text prefixing all headers (list of strings).
+ prefixStrings = [
+ '/*',
+ '** Copyright (c) 2015-2025 The Khronos Group Inc.',
+ '**',
+ '** Licensed under the Apache License, Version 2.0 (the "License");',
+ '** you may not use this file except in compliance with the License.',
+ '** You may obtain a copy of the License at',
+ '**',
+ '** http://www.apache.org/licenses/LICENSE-2.0',
+ '**',
+ '** Unless required by applicable law or agreed to in writing, software',
+ '** distributed under the License is distributed on an "AS IS" BASIS,',
+ '** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.',
+ '** See the License for the specific language governing permissions and',
+ '** limitations under the License.',
+ '*/',
+ ''
+ ]
+
+ # Text specific to Vulkan headers
+ vkPrefixStrings = [
+ '/*',
+ '** This header is generated from the Khronos Vulkan XML API Registry.',
+ '**',
+ '*/',
+ ''
+ ]
+
+ # Defaults for generating re-inclusion protection wrappers (or not)
+ protectFeature = protect
+
+ # An API style conventions object
+ conventions = VulkanConventions()
+
+ # Helper file generator options for typemap_helper.h
+ genOpts['vk_typemap_helper.h'] = [
+ HelperFileOutputGenerator,
+ HelperFileOutputGeneratorOptions(
+ conventions=conventions,
+ filename='vk_typemap_helper.h',
+ directory=directory,
+ genpath=None,
+ apiname=apiname,
+ profile=None,
+ versions=featuresPat,
+ emitversions=featuresPat,
+ defaultExtensions=defaultExtensions,
+ addExtensions=addExtensionsPat,
+ removeExtensions=removeExtensionsPat,
+ emitExtensions=emitExtensionsPat,
+ prefixText=prefixStrings + vkPrefixStrings,
+ protectFeature=False,
+ apicall='VKAPI_ATTR ',
+ apientry='VKAPI_CALL ',
+ apientryp='VKAPI_PTR *',
+ alignFuncParam=48,
+ expandEnumerants=False,
+ helper_file_type='typemap_helper_header')
+ ]
+
+ # Options for mock ICD header
+ genOpts['function_declarations.h'] = [
+ MockICDOutputGenerator,
+ MockICDGeneratorOptions(
+ conventions=conventions,
+ filename='function_declarations.h',
+ directory=directory,
+ genpath=None,
+ apiname=apiname,
+ profile=None,
+ versions=featuresPat,
+ emitversions=featuresPat,
+ defaultExtensions=defaultExtensions,
+ addExtensions=addExtensionsPat,
+ removeExtensions=removeExtensionsPat,
+ emitExtensions=emitExtensionsPat,
+ prefixText=prefixStrings + vkPrefixStrings,
+ protectFeature=False,
+ apicall='VKAPI_ATTR ',
+ apientry='VKAPI_CALL ',
+ apientryp='VKAPI_PTR *',
+ alignFuncParam=48,
+ expandEnumerants=False,
+ helper_file_type='mock_icd_function_declaration_implementation')
+ ]
+
+ # Options for mock ICD cpp
+ genOpts['function_definitions.h'] = [
+ MockICDOutputGenerator,
+ MockICDGeneratorOptions(
+ conventions=conventions,
+ filename='function_definitions.h',
+ directory=directory,
+ genpath=None,
+ apiname=apiname,
+ profile=None,
+ versions=featuresPat,
+ emitversions=featuresPat,
+ defaultExtensions=defaultExtensions,
+ addExtensions=addExtensionsPat,
+ removeExtensions=removeExtensionsPat,
+ emitExtensions=emitExtensionsPat,
+ prefixText=prefixStrings + vkPrefixStrings,
+ protectFeature=False,
+ apicall='VKAPI_ATTR ',
+ apientry='VKAPI_CALL ',
+ apientryp='VKAPI_PTR *',
+ alignFuncParam=48,
+ expandEnumerants=False,
+ helper_file_type='mock_icd_function_definition_implementation')
+ ]
+
+ # Options for vulkaninfo.hpp
+ genOpts['vulkaninfo.hpp'] = [
+ VulkanInfoGenerator,
+ VulkanInfoGeneratorOptions(
+ conventions=conventions,
+ filename='vulkaninfo.hpp',
+ directory=directory,
+ genpath=None,
+ apiname=apiname,
+ profile=None,
+ versions=featuresPat,
+ emitversions=featuresPat,
+ defaultExtensions=defaultExtensions,
+ addExtensions=addExtensionsPat,
+ removeExtensions=removeExtensionsPat,
+ emitExtensions=emitExtensionsPat,
+ prefixText=prefixStrings + vkPrefixStrings,
+ protectFeature=False,
+ apicall='VKAPI_ATTR ',
+ apientry='VKAPI_CALL ',
+ apientryp='VKAPI_PTR *',
+ alignFuncParam=48,
+ expandEnumerants=False,
+ registryFile=args.registry)
+ ]
+
+
+# Generate a target based on the options in the matching genOpts{} object.
+# This is encapsulated in a function so it can be profiled and/or timed.
+# The args parameter is an parsed argument object containing the following
+# fields that are used:
+# target - target to generate
+# directory - directory to generate it in
+# protect - True if re-inclusion wrappers should be created
+# extensions - list of additional extensions to include in generated
+# interfaces
+def genTarget(args):
+ global genOpts
+
+ # Create generator options with specified parameters
+ makeGenOpts(args)
+
+ if (args.target in genOpts.keys()):
+ createGenerator = genOpts[args.target][0]
+ options = genOpts[args.target][1]
+
+ if not args.quiet:
+ write('* Building', options.filename, file=sys.stderr)
+ write('* options.apiname =', options.apiname, file=sys.stderr)
+ write('* options.versions =', options.versions, file=sys.stderr)
+ write('* options.emitversions =', options.emitversions, file=sys.stderr)
+ write('* options.defaultExtensions =', options.defaultExtensions, file=sys.stderr)
+ write('* options.addExtensions =', options.addExtensions, file=sys.stderr)
+ write('* options.removeExtensions =', options.removeExtensions, file=sys.stderr)
+ write('* options.emitExtensions =', options.emitExtensions, file=sys.stderr)
+
+ gen = createGenerator(errFile=errWarn,
+ warnFile=errWarn,
+ diagFile=diag)
+ if not args.quiet:
+ write('* Generated', options.filename, file=sys.stderr)
+ return (gen, options)
+ else:
+ write('No generator options for unknown target:',
+ args.target, file=sys.stderr)
+ return none
+
+# -feature name
+# -extension name
+# For both, "name" may be a single name, or a space-separated list
+# of names, or a regular expression.
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument('-api', action='store',
+ default='vulkan',
+ choices=['vulkan', 'vulkansc'],
+ help='Specify API name to generate')
+ parser.add_argument('-defaultExtensions', action='store',
+ default=None,
+ help='Specify a single class of extensions to add to targets')
+ parser.add_argument('-directory', action='store', default='.',
+ help='Specify where the built file is place')
+ parser.add_argument('-extension', action='append',
+ default=[],
+ help='Specify an extension or extensions to add to targets')
+ parser.add_argument('-removeExtensions', action='append',
+ default=[],
+ help='Specify an extension or extensions to remove from targets')
+ parser.add_argument('-emitExtensions', action='append',
+ default=[],
+ help='Specify an extension or extensions to emit in targets')
+ parser.add_argument('-feature', action='append',
+ default=[],
+ help='Specify a core API feature name or names to add to targets')
+ parser.add_argument('-debug', action='store_true',
+ help='Enable debugging')
+ parser.add_argument('-dump', action='store_true',
+ help='Enable dump to stderr')
+ parser.add_argument('-diagfile', action='store',
+ default=None,
+ help='Write diagnostics to specified file')
+ parser.add_argument('-errfile', action='store',
+ default=None,
+ help='Write errors and warnings to specified file instead of stderr')
+ parser.add_argument('-noprotect', dest='protect', action='store_false',
+ help='Disable inclusion protection in output headers')
+ parser.add_argument('-profile', action='store_true',
+ help='Enable profiling')
+ parser.add_argument('-registry', action='store',
+ default='vk.xml',
+ help='Use specified registry file instead of vk.xml')
+ parser.add_argument('-time', action='store_true',
+ help='Enable timing')
+ parser.add_argument('-validate', action='store_true',
+ help='Enable XML group validation')
+ parser.add_argument('-genpath', action='store', default='gen',
+ help='Path to generated files')
+ parser.add_argument('-o', action='store', dest='directory',
+ default='.',
+ help='Create target and related files in specified directory')
+ parser.add_argument('target', metavar='target', nargs='?',
+ help='Specify target')
+ parser.add_argument('-quiet', action='store_true', default=True,
+ help='Suppress script output during normal execution.')
+ parser.add_argument('-verbose', action='store_false', dest='quiet', default=True,
+ help='Enable script output during normal execution.')
+
+ # This argument tells us where to load the script from the Vulkan-Headers registry
+ parser.add_argument('-scripts', action='store',
+ help='Find additional scripts in this directory')
+
+ args = parser.parse_args()
+
+ # default scripts path to be same as registry
+ if not args.scripts:
+ args.scripts = os.path.dirname(args.registry)
+
+ scripts_directory_path = os.path.dirname(os.path.abspath(__file__))
+ registry_headers_path = os.path.join(scripts_directory_path, args.scripts)
+ sys.path.insert(0, registry_headers_path)
+
+ from reg import *
+ from generator import write
+ from cgenerator import CGeneratorOptions, COutputGenerator
+
+ # Generator Modifications
+ from generators.mock_icd_generator import MockICDGeneratorOptions, MockICDOutputGenerator
+ from generators.vulkan_tools_helper_file_generator import HelperFileOutputGenerator, HelperFileOutputGeneratorOptions
+ from generators.vulkaninfo_generator import VulkanInfoGenerator, VulkanInfoGeneratorOptions
+ # Temporary workaround for vkconventions python2 compatibility
+ import abc
+ abc.ABC = abc.ABCMeta('ABC', (object,), {})
+ from vkconventions import VulkanConventions
+
+ # This splits arguments which are space-separated lists
+ args.feature = [name for arg in args.feature for name in arg.split()]
+ args.extension = [name for arg in args.extension for name in arg.split()]
+
+ # create error/warning & diagnostic files
+ if (args.errfile):
+ errWarn = open(args.errfile, 'w', encoding='utf-8')
+ else:
+ errWarn = sys.stderr
+
+ if (args.diagfile):
+ diag = open(args.diagfile, 'w', encoding='utf-8')
+ else:
+ diag = None
+
+ # Create the API generator & generator options
+ (gen, options) = genTarget(args)
+
+ # Create the registry object with the specified generator and generator
+ # options. The options are set before XML loading as they may affect it.
+ reg = Registry(gen, options)
+
+ # Parse the specified registry XML into an ElementTree object
+ startTimer(args.time)
+ tree = etree.parse(args.registry)
+ endTimer(args.time, '* Time to make ElementTree =')
+
+ # Load the XML tree into the registry object
+ startTimer(args.time)
+ reg.loadElementTree(tree)
+ endTimer(args.time, '* Time to parse ElementTree =')
+
+ if (args.validate):
+ reg.validateGroups()
+
+ if (args.dump):
+ write('* Dumping registry to regdump.txt', file=sys.stderr)
+ reg.dumpReg(filehandle = open('regdump.txt', 'w', encoding='utf-8'))
+
+ # Finally, use the output generator to create the requested target
+ if (args.debug):
+ pdb.run('reg.apiGen()')
+ else:
+ startTimer(args.time)
+ reg.apiGen()
+ endTimer(args.time, '* Time to generate ' + options.filename + ' =')
+ genTarget(args)
diff --git a/tools/Vulkan-Tools/scripts/update_deps.py b/tools/Vulkan-Tools/scripts/update_deps.py
new file mode 100755
index 00000000..7d300865
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/update_deps.py
@@ -0,0 +1,804 @@
+#!/usr/bin/env python3
+
+# Copyright 2017 The Glslang Authors. All rights reserved.
+# Copyright (c) 2018-2023 Valve Corporation
+# Copyright (c) 2018-2023 LunarG, Inc.
+# Copyright (c) 2023-2023 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script was heavily leveraged from KhronosGroup/glslang
+# update_glslang_sources.py.
+"""update_deps.py
+
+Get and build dependent repositories using known-good commits.
+
+Purpose
+-------
+
+This program is intended to assist a developer of this repository
+(the "home" repository) by gathering and building the repositories that
+this home repository depend on. It also checks out each dependent
+repository at a "known-good" commit in order to provide stability in
+the dependent repositories.
+
+Known-Good JSON Database
+------------------------
+
+This program expects to find a file named "known-good.json" in the
+same directory as the program file. This JSON file is tailored for
+the needs of the home repository by including its dependent repositories.
+
+Program Options
+---------------
+
+See the help text (update_deps.py --help) for a complete list of options.
+
+Program Operation
+-----------------
+
+The program uses the user's current directory at the time of program
+invocation as the location for fetching and building the dependent
+repositories. The user can override this by using the "--dir" option.
+
+For example, a directory named "build" in the repository's root directory
+is a good place to put the dependent repositories because that directory
+is not tracked by Git. (See the .gitignore file.) The "external" directory
+may also be a suitable location.
+A user can issue:
+
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+
+or, to do the same thing, but using the --dir option:
+
+$ cd My-Repo
+$ mkdir build
+$ scripts/update_deps.py --dir=build
+
+With these commands, the "build" directory is considered the "top"
+directory where the program clones the dependent repositories. The
+JSON file configures the build and install working directories to be
+within this "top" directory.
+
+Note that the "dir" option can also specify an absolute path:
+
+$ cd My-Repo
+$ scripts/update_deps.py --dir=/tmp/deps
+
+The "top" dir is then /tmp/deps (Linux filesystem example) and is
+where this program will clone and build the dependent repositories.
+
+Helper CMake Config File
+------------------------
+
+When the program finishes building the dependencies, it writes a file
+named "helper.cmake" to the "top" directory that contains CMake commands
+for setting CMake variables for locating the dependent repositories.
+This helper file can be used to set up the CMake build files for this
+"home" repository.
+
+A complete sequence might look like:
+
+$ git clone git@github.com:My-Group/My-Repo.git
+$ cd My-Repo
+$ mkdir build
+$ cd build
+$ ../scripts/update_deps.py
+$ cmake -C helper.cmake ..
+$ cmake --build .
+
+JSON File Schema
+----------------
+
+There's no formal schema for the "known-good" JSON file, but here is
+a description of its elements. All elements are required except those
+marked as optional. Please see the "known_good.json" file for
+examples of all of these elements.
+
+- name
+
+The name of the dependent repository. This field can be referenced
+by the "deps.repo_name" structure to record a dependency.
+
+- api
+
+The name of the API the dependency is specific to (e.g. "vulkan").
+
+- url
+
+Specifies the URL of the repository.
+Example: https://github.com/KhronosGroup/Vulkan-Loader.git
+
+- sub_dir
+
+The directory where the program clones the repository, relative to
+the "top" directory.
+
+- build_dir
+
+The directory used to build the repository, relative to the "top"
+directory.
+
+- install_dir
+
+The directory used to store the installed build artifacts, relative
+to the "top" directory.
+
+- commit
+
+The commit used to checkout the repository. This can be a SHA-1
+object name or a refname used with the remote name "origin".
+
+- deps (optional)
+
+An array of pairs consisting of a CMake variable name and a
+repository name to specify a dependent repo and a "link" to
+that repo's install artifacts. For example:
+
+"deps" : [
+ {
+ "var_name" : "VULKAN_HEADERS_INSTALL_DIR",
+ "repo_name" : "Vulkan-Headers"
+ }
+]
+
+which represents that this repository depends on the Vulkan-Headers
+repository and uses the VULKAN_HEADERS_INSTALL_DIR CMake variable to
+specify the location where it expects to find the Vulkan-Headers install
+directory.
+Note that the "repo_name" element must match the "name" element of some
+other repository in the JSON file.
+
+- prebuild (optional)
+- prebuild_linux (optional) (For Linux and MacOS)
+- prebuild_windows (optional)
+
+A list of commands to execute before building a dependent repository.
+This is useful for repositories that require the execution of some
+sort of "update" script or need to clone an auxillary repository like
+googletest.
+
+The commands listed in "prebuild" are executed first, and then the
+commands for the specific platform are executed.
+
+- custom_build (optional)
+
+A list of commands to execute as a custom build instead of using
+the built in CMake way of building. Requires "build_step" to be
+set to "custom"
+
+You can insert the following keywords into the commands listed in
+"custom_build" if they require runtime information (like whether the
+build config is "Debug" or "Release").
+
+Keywords:
+{0} reference to a dictionary of repos and their attributes
+{1} reference to the command line arguments set before start
+{2} reference to the CONFIG_MAP value of config.
+
+Example:
+{2} returns the CONFIG_MAP value of config e.g. debug -> Debug
+{1}.config returns the config variable set when you ran update_dep.py
+{0}[Vulkan-Headers][repo_root] returns the repo_root variable from
+ the Vulkan-Headers GoodRepo object.
+
+- cmake_options (optional)
+
+A list of options to pass to CMake during the generation phase.
+
+- ci_only (optional)
+
+A list of environment variables where one must be set to "true"
+(case-insensitive) in order for this repo to be fetched and built.
+This list can be used to specify repos that should be built only in CI.
+
+- build_step (optional)
+
+Specifies if the dependent repository should be built or not. This can
+have a value of 'build', 'custom', or 'skip'. The dependent repositories are
+built by default.
+
+- build_platforms (optional)
+
+A list of platforms the repository will be built on.
+Legal options include:
+"windows"
+"linux"
+"darwin"
+"android"
+
+Builds on all platforms by default.
+
+Note
+----
+
+The "sub_dir", "build_dir", and "install_dir" elements are all relative
+to the effective "top" directory. Specifying absolute paths is not
+supported. However, the "top" directory specified with the "--dir"
+option can be a relative or absolute path.
+
+"""
+
+import argparse
+import json
+import os
+import os.path
+import subprocess
+import sys
+import platform
+import multiprocessing
+import shlex
+import shutil
+import stat
+import time
+
+KNOWN_GOOD_FILE_NAME = 'known_good.json'
+
+CONFIG_MAP = {
+ 'debug': 'Debug',
+ 'release': 'Release',
+ 'relwithdebinfo': 'RelWithDebInfo',
+ 'minsizerel': 'MinSizeRel'
+}
+
+# NOTE: CMake also uses the VERBOSE environment variable. This is intentional.
+VERBOSE = os.getenv("VERBOSE")
+
+DEVNULL = open(os.devnull, 'wb')
+
+
+def on_rm_error( func, path, exc_info):
+ """Error handler for recursively removing a directory. The
+ shutil.rmtree function can fail on Windows due to read-only files.
+ This handler will change the permissions for the file and continue.
+ """
+ os.chmod( path, stat.S_IWRITE )
+ os.unlink( path )
+
+def make_or_exist_dirs(path):
+ "Wrapper for os.makedirs that tolerates the directory already existing"
+ # Could use os.makedirs(path, exist_ok=True) if we drop python2
+ if not os.path.isdir(path):
+ os.makedirs(path)
+
+def command_output(cmd, directory):
+ # Runs a command in a directory and returns its standard output stream.
+ # Captures the standard error stream and prints it an error occurs.
+ # Raises a RuntimeError if the command fails to launch or otherwise fails.
+ if VERBOSE:
+ print('In {d}: {cmd}'.format(d=directory, cmd=cmd))
+
+ result = subprocess.run(cmd, cwd=directory, capture_output=True, text=True)
+
+ if result.returncode != 0:
+ print(f'{result.stderr}', file=sys.stderr)
+ raise RuntimeError(f'Failed to run {cmd} in {directory}')
+
+ if VERBOSE:
+ print(result.stdout)
+ return result.stdout
+
+def run_cmake_command(cmake_cmd):
+ # NOTE: Because CMake is an exectuable that runs executables
+ # stdout/stderr are mixed together. So this combines the outputs
+ # and prints them properly in case there is a non-zero exit code.
+ result = subprocess.run(cmake_cmd,
+ stdout = subprocess.PIPE,
+ stderr = subprocess.STDOUT,
+ text = True
+ )
+
+ if VERBOSE:
+ print(result.stdout)
+ print(f"CMake command: {cmake_cmd} ", flush=True)
+
+ if result.returncode != 0:
+ print(result.stdout, file=sys.stderr)
+ sys.exit(result.returncode)
+
+def escape(path):
+ return path.replace('\\', '/')
+
+class GoodRepo(object):
+ """Represents a repository at a known-good commit."""
+
+ def __init__(self, json, args):
+ """Initializes this good repo object.
+
+ Args:
+ 'json': A fully populated JSON object describing the repo.
+ 'args': Results from ArgumentParser
+ """
+ self._json = json
+ self._args = args
+ # Required JSON elements
+ self.name = json['name']
+ self.url = json['url']
+ self.sub_dir = json['sub_dir']
+ self.commit = json['commit']
+ # Optional JSON elements
+ self.build_dir = None
+ self.install_dir = None
+ if json.get('build_dir'):
+ self.build_dir = os.path.normpath(json['build_dir'])
+ if json.get('install_dir'):
+ self.install_dir = os.path.normpath(json['install_dir'])
+ self.deps = json['deps'] if ('deps' in json) else []
+ self.prebuild = json['prebuild'] if ('prebuild' in json) else []
+ self.prebuild_linux = json['prebuild_linux'] if (
+ 'prebuild_linux' in json) else []
+ self.prebuild_windows = json['prebuild_windows'] if (
+ 'prebuild_windows' in json) else []
+ self.custom_build = json['custom_build'] if ('custom_build' in json) else []
+ self.cmake_options = json['cmake_options'] if (
+ 'cmake_options' in json) else []
+ self.ci_only = json['ci_only'] if ('ci_only' in json) else []
+ self.build_step = json['build_step'] if ('build_step' in json) else 'build'
+ self.build_platforms = json['build_platforms'] if ('build_platforms' in json) else []
+ self.optional = set(json.get('optional', []))
+ self.api = json['api'] if ('api' in json) else None
+ # Absolute paths for a repo's directories
+ dir_top = os.path.abspath(args.dir)
+ self.repo_dir = os.path.join(dir_top, self.sub_dir)
+ if self.build_dir:
+ self.build_dir = os.path.join(dir_top, self.build_dir)
+ if self.install_dir:
+ self.install_dir = os.path.join(dir_top, self.install_dir)
+
+ # By default the target platform is the host platform.
+ target_platform = platform.system().lower()
+ # However, we need to account for cross-compiling.
+ for cmake_var in self._args.cmake_var:
+ if "android.toolchain.cmake" in cmake_var:
+ target_platform = 'android'
+
+ self.on_build_platform = False
+ if self.build_platforms == [] or target_platform in self.build_platforms:
+ self.on_build_platform = True
+
+ def Clone(self, retries=10, retry_seconds=60):
+ if VERBOSE:
+ print('Cloning {n} into {d}'.format(n=self.name, d=self.repo_dir))
+ for retry in range(retries):
+ make_or_exist_dirs(self.repo_dir)
+ try:
+ command_output(['git', 'clone', self.url, '.'], self.repo_dir)
+ # If we get here, we didn't raise an error
+ return
+ except RuntimeError as e:
+ print("Error cloning on iteration {}/{}: {}".format(retry + 1, retries, e))
+ if retry + 1 < retries:
+ if retry_seconds > 0:
+ print("Waiting {} seconds before trying again".format(retry_seconds))
+ time.sleep(retry_seconds)
+ if os.path.isdir(self.repo_dir):
+ print("Removing old tree {}".format(self.repo_dir))
+ shutil.rmtree(self.repo_dir, onerror=on_rm_error)
+ continue
+
+ # If we get here, we've exhausted our retries.
+ print("Failed to clone {} on all retries.".format(self.url))
+ raise e
+
+ def Fetch(self, retries=10, retry_seconds=60):
+ for retry in range(retries):
+ try:
+ command_output(['git', 'fetch', 'origin'], self.repo_dir)
+ # if we get here, we didn't raise an error, and we're done
+ return
+ except RuntimeError as e:
+ print("Error fetching on iteration {}/{}: {}".format(retry + 1, retries, e))
+ if retry + 1 < retries:
+ if retry_seconds > 0:
+ print("Waiting {} seconds before trying again".format(retry_seconds))
+ time.sleep(retry_seconds)
+ continue
+
+ # If we get here, we've exhausted our retries.
+ print("Failed to fetch {} on all retries.".format(self.url))
+ raise e
+
+ def Checkout(self):
+ if VERBOSE:
+ print('Checking out {n} in {d}'.format(n=self.name, d=self.repo_dir))
+
+ if os.path.exists(os.path.join(self.repo_dir, '.git')):
+ url_changed = command_output(['git', 'config', '--get', 'remote.origin.url'], self.repo_dir).strip() != self.url
+ else:
+ url_changed = False
+
+ if self._args.do_clean_repo or url_changed:
+ if os.path.isdir(self.repo_dir):
+ if VERBOSE:
+ print('Clearing directory {d}'.format(d=self.repo_dir))
+ shutil.rmtree(self.repo_dir, onerror = on_rm_error)
+ if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+ self.Clone()
+ self.Fetch()
+ if len(self._args.ref):
+ command_output(['git', 'checkout', self._args.ref], self.repo_dir)
+ else:
+ command_output(['git', 'checkout', self.commit], self.repo_dir)
+
+ if VERBOSE:
+ print(command_output(['git', 'status'], self.repo_dir))
+
+ def CustomPreProcess(self, cmd_str, repo_dict):
+ return cmd_str.format(repo_dict, self._args, CONFIG_MAP[self._args.config])
+
+ def PreBuild(self):
+ """Execute any prebuild steps from the repo root"""
+ for p in self.prebuild:
+ command_output(shlex.split(p), self.repo_dir)
+ if platform.system() == 'Linux' or platform.system() == 'Darwin':
+ for p in self.prebuild_linux:
+ command_output(shlex.split(p), self.repo_dir)
+ if platform.system() == 'Windows':
+ for p in self.prebuild_windows:
+ command_output(shlex.split(p), self.repo_dir)
+
+ def CustomBuild(self, repo_dict):
+ """Execute any custom_build steps from the repo root"""
+ for p in self.custom_build:
+ cmd = self.CustomPreProcess(p, repo_dict)
+ command_output(shlex.split(cmd), self.repo_dir)
+
+ def CMakeConfig(self, repos):
+ """Build CMake command for the configuration phase and execute it"""
+ if self._args.do_clean_build:
+ if os.path.isdir(self.build_dir):
+ shutil.rmtree(self.build_dir, onerror=on_rm_error)
+ if self._args.do_clean_install:
+ if os.path.isdir(self.install_dir):
+ shutil.rmtree(self.install_dir, onerror=on_rm_error)
+
+ # Create and change to build directory
+ make_or_exist_dirs(self.build_dir)
+ os.chdir(self.build_dir)
+
+ cmake_cmd = [
+ 'cmake', self.repo_dir,
+ '-DCMAKE_INSTALL_PREFIX=' + self.install_dir
+ ]
+
+ # Allow users to pass in arbitrary cache variables
+ for cmake_var in self._args.cmake_var:
+ pieces = cmake_var.split('=', 1)
+ cmake_cmd.append('-D{}={}'.format(pieces[0], pieces[1]))
+
+ # For each repo this repo depends on, generate a CMake variable
+ # definitions for "...INSTALL_DIR" that points to that dependent
+ # repo's install dir.
+ for d in self.deps:
+ dep_commit = [r for r in repos if r.name == d['repo_name']]
+ if len(dep_commit) and dep_commit[0].on_build_platform:
+ cmake_cmd.append('-D{var_name}={install_dir}'.format(
+ var_name=d['var_name'],
+ install_dir=dep_commit[0].install_dir))
+
+ # Add any CMake options
+ for option in self.cmake_options:
+ cmake_cmd.append(escape(option.format(**self.__dict__)))
+
+ # Set build config for single-configuration generators (this is a no-op on multi-config generators)
+ cmake_cmd.append(f'-D CMAKE_BUILD_TYPE={CONFIG_MAP[self._args.config]}')
+
+ # Use the CMake -A option to select the platform architecture
+ # without needing a Visual Studio generator.
+ if platform.system() == 'Windows' and not self._args.generator in ["Ninja", "MinGW Makefiles"]:
+ cmake_cmd.append('-A')
+ if self._args.arch.lower() == '64' or self._args.arch == 'x64' or self._args.arch == 'win64':
+ cmake_cmd.append('x64')
+ elif self._args.arch == 'arm64':
+ cmake_cmd.append('arm64')
+ else:
+ cmake_cmd.append('Win32')
+
+ # Apply a generator, if one is specified. This can be used to supply
+ # a specific generator for the dependent repositories to match
+ # that of the main repository.
+ if self._args.generator is not None:
+ cmake_cmd.extend(['-G', self._args.generator])
+
+ # Removes warnings related to unused CLI
+ # EX: Setting CMAKE_CXX_COMPILER for a C project
+ if not VERBOSE:
+ cmake_cmd.append("--no-warn-unused-cli")
+
+ run_cmake_command(cmake_cmd)
+
+ def CMakeBuild(self):
+ """Build CMake command for the build phase and execute it"""
+ cmake_cmd = ['cmake', '--build', self.build_dir, '--target', 'install', '--config', CONFIG_MAP[self._args.config]]
+ if self._args.do_clean:
+ cmake_cmd.append('--clean-first')
+
+ # Xcode / Ninja are parallel by default.
+ if self._args.generator != "Ninja" or self._args.generator != "Xcode":
+ cmake_cmd.append('--parallel')
+ cmake_cmd.append(format(multiprocessing.cpu_count()))
+
+ run_cmake_command(cmake_cmd)
+
+ def Build(self, repos, repo_dict):
+ """Build the dependent repo and time how long it took"""
+ if VERBOSE:
+ print('Building {n} in {d}'.format(n=self.name, d=self.repo_dir))
+ print('Build dir = {b}'.format(b=self.build_dir))
+ print('Install dir = {i}\n'.format(i=self.install_dir))
+
+ start = time.time()
+
+ # Run any prebuild commands
+ self.PreBuild()
+
+ if self.build_step == 'custom':
+ self.CustomBuild(repo_dict)
+ return
+
+ # Build and execute CMake command for creating build files
+ self.CMakeConfig(repos)
+
+ # Build and execute CMake command for the build
+ self.CMakeBuild()
+
+ total_time = time.time() - start
+
+ print(f"Installed {self.name} ({self.commit}) in {total_time} seconds", flush=True)
+
+ def IsOptional(self, opts):
+ return len(self.optional.intersection(opts)) > 0
+
+def GetGoodRepos(args):
+ """Returns the latest list of GoodRepo objects.
+
+ The known-good file is expected to be in the same
+ directory as this script unless overridden by the 'known_good_dir'
+ parameter.
+ """
+ if args.known_good_dir:
+ known_good_file = os.path.join( os.path.abspath(args.known_good_dir),
+ KNOWN_GOOD_FILE_NAME)
+ else:
+ known_good_file = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+ with open(known_good_file) as known_good:
+ return [
+ GoodRepo(repo, args)
+ for repo in json.loads(known_good.read())['repos']
+ ]
+
+
+def GetInstallNames(args):
+ """Returns the install names list.
+
+ The known-good file is expected to be in the same
+ directory as this script unless overridden by the 'known_good_dir'
+ parameter.
+ """
+ if args.known_good_dir:
+ known_good_file = os.path.join(os.path.abspath(args.known_good_dir),
+ KNOWN_GOOD_FILE_NAME)
+ else:
+ known_good_file = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), KNOWN_GOOD_FILE_NAME)
+ with open(known_good_file) as known_good:
+ install_info = json.loads(known_good.read())
+ if install_info.get('install_names'):
+ return install_info['install_names']
+ else:
+ return None
+
+
+def CreateHelper(args, repos, filename):
+ """Create a CMake config helper file.
+
+ The helper file is intended to be used with 'cmake -C <file>'
+ to build this home repo using the dependencies built by this script.
+
+ The install_names dictionary represents the CMake variables used by the
+ home repo to locate the install dirs of the dependent repos.
+ This information is baked into the CMake files of the home repo and so
+ this dictionary is kept with the repo via the json file.
+ """
+ install_names = GetInstallNames(args)
+ with open(filename, 'w') as helper_file:
+ for repo in repos:
+ # If the repo has an API tag and that does not match
+ # the target API then skip it
+ if repo.api is not None and repo.api != args.api:
+ continue
+ if install_names and repo.name in install_names and repo.on_build_platform:
+ helper_file.write('set({var} "{dir}" CACHE STRING "")\n'
+ .format(
+ var=install_names[repo.name],
+ dir=escape(repo.install_dir)))
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Get and build dependent repos at known-good commits')
+ parser.add_argument(
+ '--known_good_dir',
+ dest='known_good_dir',
+ help="Specify directory for known_good.json file.")
+ parser.add_argument(
+ '--dir',
+ dest='dir',
+ default='.',
+ help="Set target directory for repository roots. Default is \'.\'.")
+ parser.add_argument(
+ '--ref',
+ dest='ref',
+ default='',
+ help="Override 'commit' with git reference. E.g., 'origin/main'")
+ parser.add_argument(
+ '--no-build',
+ dest='do_build',
+ action='store_false',
+ help=
+ "Clone/update repositories and generate build files without performing compilation",
+ default=True)
+ parser.add_argument(
+ '--clean',
+ dest='do_clean',
+ action='store_true',
+ help="Clean files generated by compiler and linker before building",
+ default=False)
+ parser.add_argument(
+ '--clean-repo',
+ dest='do_clean_repo',
+ action='store_true',
+ help="Delete repository directory before building",
+ default=False)
+ parser.add_argument(
+ '--clean-build',
+ dest='do_clean_build',
+ action='store_true',
+ help="Delete build directory before building",
+ default=False)
+ parser.add_argument(
+ '--clean-install',
+ dest='do_clean_install',
+ action='store_true',
+ help="Delete install directory before building",
+ default=False)
+ parser.add_argument(
+ '--skip-existing-install',
+ dest='skip_existing_install',
+ action='store_true',
+ help="Skip build if install directory exists",
+ default=False)
+ parser.add_argument(
+ '--arch',
+ dest='arch',
+ choices=['32', '64', 'x86', 'x64', 'win32', 'win64', 'arm64'],
+ type=str.lower,
+ help="Set build files architecture (Visual Studio Generator Only)",
+ default='64')
+ parser.add_argument(
+ '--config',
+ dest='config',
+ choices=['debug', 'release', 'relwithdebinfo', 'minsizerel'],
+ type=str.lower,
+ help="Set build files configuration",
+ default='debug')
+ parser.add_argument(
+ '--api',
+ dest='api',
+ default='vulkan',
+ choices=['vulkan'],
+ help="Target API")
+ parser.add_argument(
+ '--generator',
+ dest='generator',
+ help="Set the CMake generator",
+ default=None)
+ parser.add_argument(
+ '--optional',
+ dest='optional',
+ type=lambda a: set(a.lower().split(',')),
+ help="Comma-separated list of 'optional' resources that may be skipped. Only 'tests' is currently supported as 'optional'",
+ default=set())
+ parser.add_argument(
+ '--cmake_var',
+ dest='cmake_var',
+ action='append',
+ metavar='VAR[=VALUE]',
+ help="Add CMake command line option -D'VAR'='VALUE' to the CMake generation command line; may be used multiple times",
+ default=[])
+
+ args = parser.parse_args()
+ save_cwd = os.getcwd()
+
+ # Create working "top" directory if needed
+ make_or_exist_dirs(args.dir)
+ abs_top_dir = os.path.abspath(args.dir)
+
+ repos = GetGoodRepos(args)
+ repo_dict = {}
+
+ print('Starting builds in {d}'.format(d=abs_top_dir))
+ for repo in repos:
+ # If the repo has an API tag and that does not match
+ # the target API then skip it
+ if repo.api is not None and repo.api != args.api:
+ continue
+
+ # If the repo has a platform whitelist, skip the repo
+ # unless we are building on a whitelisted platform.
+ if not repo.on_build_platform:
+ continue
+
+ # Skip building the repo if its install directory already exists
+ # and requested via an option. This is useful for cases where the
+ # install directory is restored from a cache that is known to be up
+ # to date.
+ if args.skip_existing_install and os.path.isdir(repo.install_dir):
+ print('Skipping build for repo {n} due to existing install directory'.format(n=repo.name))
+ continue
+
+ # Skip test-only repos if the --tests option was not passed in
+ if repo.IsOptional(args.optional):
+ continue
+
+ field_list = ('url',
+ 'sub_dir',
+ 'commit',
+ 'build_dir',
+ 'install_dir',
+ 'deps',
+ 'prebuild',
+ 'prebuild_linux',
+ 'prebuild_windows',
+ 'custom_build',
+ 'cmake_options',
+ 'ci_only',
+ 'build_step',
+ 'build_platforms',
+ 'repo_dir',
+ 'on_build_platform')
+ repo_dict[repo.name] = {field: getattr(repo, field) for field in field_list}
+
+ # If the repo has a CI whitelist, skip the repo unless
+ # one of the CI's environment variable is set to true.
+ if len(repo.ci_only):
+ do_build = False
+ for env in repo.ci_only:
+ if env not in os.environ:
+ continue
+ if os.environ[env].lower() == 'true':
+ do_build = True
+ break
+ if not do_build:
+ continue
+
+ # Clone/update the repository
+ repo.Checkout()
+
+ # Build the repository
+ if args.do_build and repo.build_step != 'skip':
+ repo.Build(repos, repo_dict)
+
+ # Need to restore original cwd in order for CreateHelper to find json file
+ os.chdir(save_cwd)
+ CreateHelper(args, repos, os.path.join(abs_top_dir, 'helper.cmake'))
+
+ sys.exit(0)
+
+
+if __name__ == '__main__':
+ main()