aboutsummaryrefslogtreecommitdiff
path: root/tools/Vulkan-Tools/scripts/generators
diff options
context:
space:
mode:
authorLizzy Fleckenstein <lizzy@vlhl.dev>2026-03-31 01:30:36 +0200
committerLizzy Fleckenstein <lizzy@vlhl.dev>2026-03-31 01:30:36 +0200
commit8e2ff15dbd3fe70fe2b52397b1eaba3fe2d7a5e8 (patch)
tree925fa596210d1a1f01e00e0743a643f4552e7a7a /tools/Vulkan-Tools/scripts/generators
parent1f17b4df127bd280e50d93a46ae93df704adc2b0 (diff)
parent90bf5bc4fd8bea0d300f6564af256a51a34124b8 (diff)
downloadusermoji-8e2ff15dbd3fe70fe2b52397b1eaba3fe2d7a5e8.tar.xz
add tools/Vulkan-Tools
Diffstat (limited to 'tools/Vulkan-Tools/scripts/generators')
-rw-r--r--tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py1452
-rw-r--r--tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py163
-rw-r--r--tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py1165
3 files changed, 2780 insertions, 0 deletions
diff --git a/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py b/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py
new file mode 100644
index 00000000..b1d81c42
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/mock_icd_generator.py
@@ -0,0 +1,1452 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2025 The Khronos Group Inc.
+# Copyright (c) 2015-2025 Valve Corporation
+# Copyright (c) 2015-2025 LunarG, Inc.
+# Copyright (c) 2015-2025 Google Inc.
+# Copyright (c) 2023-2025 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Tobin Ehlis <tobine@google.com>
+#
+# This script generates a Mock ICD that intercepts almost all Vulkan
+# functions. That layer is not intended to be useful or even compilable
+# in its initial state. Rather it's intended to be a starting point that
+# can be copied and customized to assist in creation of a new layer.
+
+from base_generator import BaseGenerator
+
+CUSTOM_C_INTERCEPTS = {
+'vkCreateInstance': '''
+ // TODO: If loader ver <=4 ICD must fail with VK_ERROR_INCOMPATIBLE_DRIVER for all vkCreateInstance calls with
+ // apiVersion set to > Vulkan 1.0 because the loader is still at interface version <= 4. Otherwise, the
+ // ICD should behave as normal.
+ if (loader_interface_version <= 4) {
+ return VK_ERROR_INCOMPATIBLE_DRIVER;
+ }
+ *pInstance = (VkInstance)CreateDispObjHandle();
+ for (auto& physical_device : physical_device_map[*pInstance])
+ physical_device = (VkPhysicalDevice)CreateDispObjHandle();
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return VK_SUCCESS;
+''',
+'vkDestroyInstance': '''
+ if (instance) {
+ for (const auto physical_device : physical_device_map.at(instance)) {
+ display_map.erase(physical_device);
+ DestroyDispObjHandle((void*)physical_device);
+ }
+ physical_device_map.erase(instance);
+ DestroyDispObjHandle((void*)instance);
+ }
+''',
+'vkAllocateCommandBuffers': '''
+ unique_lock_t lock(global_lock);
+ for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; ++i) {
+ pCommandBuffers[i] = (VkCommandBuffer)CreateDispObjHandle();
+ command_pool_buffer_map[pAllocateInfo->commandPool].push_back(pCommandBuffers[i]);
+ }
+ return VK_SUCCESS;
+''',
+'vkFreeCommandBuffers': '''
+ unique_lock_t lock(global_lock);
+ for (auto i = 0u; i < commandBufferCount; ++i) {
+ if (!pCommandBuffers[i]) {
+ continue;
+ }
+
+ for (auto& pair : command_pool_buffer_map) {
+ auto& cbs = pair.second;
+ auto it = std::find(cbs.begin(), cbs.end(), pCommandBuffers[i]);
+ if (it != cbs.end()) {
+ cbs.erase(it);
+ }
+ }
+
+ DestroyDispObjHandle((void*) pCommandBuffers[i]);
+ }
+''',
+'vkCreateCommandPool': '''
+ unique_lock_t lock(global_lock);
+ *pCommandPool = (VkCommandPool)global_unique_handle++;
+ command_pool_map[device].insert(*pCommandPool);
+ return VK_SUCCESS;
+''',
+'vkDestroyCommandPool': '''
+ // destroy command buffers for this pool
+ unique_lock_t lock(global_lock);
+ auto it = command_pool_buffer_map.find(commandPool);
+ if (it != command_pool_buffer_map.end()) {
+ for (auto& cb : it->second) {
+ DestroyDispObjHandle((void*) cb);
+ }
+ command_pool_buffer_map.erase(it);
+ }
+ command_pool_map[device].erase(commandPool);
+''',
+'vkEnumeratePhysicalDevices': '''
+ VkResult result_code = VK_SUCCESS;
+ if (pPhysicalDevices) {
+ const auto return_count = (std::min)(*pPhysicalDeviceCount, icd_physical_device_count);
+ for (uint32_t i = 0; i < return_count; ++i) pPhysicalDevices[i] = physical_device_map.at(instance)[i];
+ if (return_count < icd_physical_device_count) result_code = VK_INCOMPLETE;
+ *pPhysicalDeviceCount = return_count;
+ } else {
+ *pPhysicalDeviceCount = icd_physical_device_count;
+ }
+ return result_code;
+''',
+'vkCreateDevice': '''
+ *pDevice = (VkDevice)CreateDispObjHandle();
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return VK_SUCCESS;
+''',
+'vkDestroyDevice': '''
+ unique_lock_t lock(global_lock);
+ // First destroy sub-device objects
+ // Destroy Queues
+ for (auto queue_family_map_pair : queue_map[device]) {
+ for (auto index_queue_pair : queue_map[device][queue_family_map_pair.first]) {
+ DestroyDispObjHandle((void*)index_queue_pair.second);
+ }
+ }
+
+ for (auto& cp : command_pool_map[device]) {
+ for (auto& cb : command_pool_buffer_map[cp]) {
+ DestroyDispObjHandle((void*) cb);
+ }
+ command_pool_buffer_map.erase(cp);
+ }
+ command_pool_map[device].clear();
+
+ queue_map.erase(device);
+ buffer_map.erase(device);
+ image_memory_size_map.erase(device);
+ // Now destroy device
+ DestroyDispObjHandle((void*)device);
+ // TODO: If emulating specific device caps, will need to add intelligence here
+''',
+'vkGetDeviceQueue': '''
+ unique_lock_t lock(global_lock);
+ auto queue = queue_map[device][queueFamilyIndex][queueIndex];
+ if (queue) {
+ *pQueue = queue;
+ } else {
+ *pQueue = queue_map[device][queueFamilyIndex][queueIndex] = (VkQueue)CreateDispObjHandle();
+ }
+ // TODO: If emulating specific device caps, will need to add intelligence here
+ return;
+''',
+'vkGetDeviceQueue2': '''
+ GetDeviceQueue(device, pQueueInfo->queueFamilyIndex, pQueueInfo->queueIndex, pQueue);
+ // TODO: Add further support for GetDeviceQueue2 features
+''',
+'vkEnumerateInstanceLayerProperties': '''
+ return VK_SUCCESS;
+''',
+'vkEnumerateInstanceVersion': '''
+ *pApiVersion = VK_HEADER_VERSION_COMPLETE;
+ return VK_SUCCESS;
+''',
+'vkEnumerateDeviceLayerProperties': '''
+ return VK_SUCCESS;
+''',
+'vkEnumerateInstanceExtensionProperties': '''
+ // If requesting number of extensions, return that
+ if (!pLayerName) {
+ if (!pProperties) {
+ *pPropertyCount = (uint32_t)instance_extension_map.size();
+ } else {
+ uint32_t i = 0;
+ for (const auto &name_ver_pair : instance_extension_map) {
+ if (i == *pPropertyCount) {
+ break;
+ }
+ std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+ pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+ pProperties[i].specVersion = name_ver_pair.second;
+ ++i;
+ }
+ if (i != instance_extension_map.size()) {
+ return VK_INCOMPLETE;
+ }
+ }
+ }
+ // If requesting extension properties, fill in data struct for number of extensions
+ return VK_SUCCESS;
+''',
+'vkEnumerateDeviceExtensionProperties': '''
+ // If requesting number of extensions, return that
+ if (!pLayerName) {
+ if (!pProperties) {
+ *pPropertyCount = (uint32_t)device_extension_map.size();
+ } else {
+ uint32_t i = 0;
+ for (const auto &name_ver_pair : device_extension_map) {
+ if (i == *pPropertyCount) {
+ break;
+ }
+ std::strncpy(pProperties[i].extensionName, name_ver_pair.first.c_str(), sizeof(pProperties[i].extensionName));
+ pProperties[i].extensionName[sizeof(pProperties[i].extensionName) - 1] = 0;
+ pProperties[i].specVersion = name_ver_pair.second;
+ ++i;
+ }
+ *pPropertyCount = i;
+ if (i != device_extension_map.size()) {
+ return VK_INCOMPLETE;
+ }
+ }
+ }
+ // If requesting extension properties, fill in data struct for number of extensions
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfacePresentModesKHR': '''
+ // Currently always say that all present modes are supported
+ if (!pPresentModes) {
+ *pPresentModeCount = 6;
+ } else {
+ if (*pPresentModeCount >= 6) pPresentModes[5] = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR;
+ if (*pPresentModeCount >= 5) pPresentModes[4] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+ if (*pPresentModeCount >= 4) pPresentModes[3] = VK_PRESENT_MODE_FIFO_RELAXED_KHR;
+ if (*pPresentModeCount >= 3) pPresentModes[2] = VK_PRESENT_MODE_FIFO_KHR;
+ if (*pPresentModeCount >= 2) pPresentModes[1] = VK_PRESENT_MODE_MAILBOX_KHR;
+ if (*pPresentModeCount >= 1) pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+ *pPresentModeCount = *pPresentModeCount < 6 ? *pPresentModeCount : 6;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormatsKHR': '''
+ // Currently always say that RGBA8 & BGRA8 are supported
+ if (!pSurfaceFormats) {
+ *pSurfaceFormatCount = 2;
+ } else {
+ if (*pSurfaceFormatCount >= 2) {
+ pSurfaceFormats[1].format = VK_FORMAT_R8G8B8A8_UNORM;
+ pSurfaceFormats[1].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ if (*pSurfaceFormatCount >= 1) {
+ pSurfaceFormats[0].format = VK_FORMAT_B8G8R8A8_UNORM;
+ pSurfaceFormats[0].colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceFormats2KHR': '''
+ // Currently always say that RGBA8 & BGRA8 are supported
+ if (!pSurfaceFormats) {
+ *pSurfaceFormatCount = 2;
+ } else {
+ if (*pSurfaceFormatCount >= 2) {
+ pSurfaceFormats[1].pNext = nullptr;
+ pSurfaceFormats[1].surfaceFormat.format = VK_FORMAT_R8G8B8A8_UNORM;
+ pSurfaceFormats[1].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ if (*pSurfaceFormatCount >= 1) {
+ pSurfaceFormats[1].pNext = nullptr;
+ pSurfaceFormats[0].surfaceFormat.format = VK_FORMAT_B8G8R8A8_UNORM;
+ pSurfaceFormats[0].surfaceFormat.colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceSupportKHR': '''
+ // Currently say that all surface/queue combos are supported
+ *pSupported = VK_TRUE;
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilitiesKHR': '''
+ // In general just say max supported is available for requested surface
+ pSurfaceCapabilities->minImageCount = 1;
+ pSurfaceCapabilities->maxImageCount = 0;
+ pSurfaceCapabilities->currentExtent.width = 0xFFFFFFFF;
+ pSurfaceCapabilities->currentExtent.height = 0xFFFFFFFF;
+ pSurfaceCapabilities->minImageExtent.width = 1;
+ pSurfaceCapabilities->minImageExtent.height = 1;
+ pSurfaceCapabilities->maxImageExtent.width = 0xFFFF;
+ pSurfaceCapabilities->maxImageExtent.height = 0xFFFF;
+ pSurfaceCapabilities->maxImageArrayLayers = 128;
+ pSurfaceCapabilities->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR |
+ VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR |
+ VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR |
+ VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR;
+ pSurfaceCapabilities->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
+ pSurfaceCapabilities->supportedCompositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
+ VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR |
+ VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR |
+ VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
+ pSurfaceCapabilities->supportedUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+ VK_IMAGE_USAGE_TRANSFER_DST_BIT |
+ VK_IMAGE_USAGE_SAMPLED_BIT |
+ VK_IMAGE_USAGE_STORAGE_BIT |
+ VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT |
+ VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSurfaceCapabilities2KHR': '''
+ GetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, pSurfaceInfo->surface, &pSurfaceCapabilities->surfaceCapabilities);
+
+ auto *present_mode_compatibility = lvl_find_mod_in_chain<VkSurfacePresentModeCompatibilityEXT>(pSurfaceCapabilities->pNext);
+ if (present_mode_compatibility) {
+ if (!present_mode_compatibility->pPresentModes) {
+ present_mode_compatibility->presentModeCount = 3;
+ } else {
+ // arbitrary
+ present_mode_compatibility->pPresentModes[0] = VK_PRESENT_MODE_IMMEDIATE_KHR;
+ present_mode_compatibility->pPresentModes[1] = VK_PRESENT_MODE_FIFO_KHR;
+ present_mode_compatibility->pPresentModes[2] = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetInstanceProcAddr': '''
+ if (!negotiate_loader_icd_interface_called) {
+ loader_interface_version = 0;
+ }
+ const auto &item = name_to_funcptr_map.find(pName);
+ if (item != name_to_funcptr_map.end()) {
+ return reinterpret_cast<PFN_vkVoidFunction>(item->second);
+ }
+ // Mock should intercept all functions so if we get here just return null
+ return nullptr;
+''',
+'vkGetDeviceProcAddr': '''
+ return GetInstanceProcAddr(nullptr, pName);
+''',
+'vkGetPhysicalDeviceMemoryProperties': '''
+ pMemoryProperties->memoryTypeCount = 6;
+ // Host visible Coherent
+ pMemoryProperties->memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ pMemoryProperties->memoryTypes[0].heapIndex = 0;
+ // Host visible Cached
+ pMemoryProperties->memoryTypes[1].propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
+ pMemoryProperties->memoryTypes[1].heapIndex = 0;
+ // Device local and Host visible
+ pMemoryProperties->memoryTypes[2].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
+ pMemoryProperties->memoryTypes[2].heapIndex = 1;
+ // Device local lazily
+ pMemoryProperties->memoryTypes[3].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
+ pMemoryProperties->memoryTypes[3].heapIndex = 1;
+ // Device local protected
+ pMemoryProperties->memoryTypes[4].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_PROTECTED_BIT;
+ pMemoryProperties->memoryTypes[4].heapIndex = 1;
+ // Device local only
+ pMemoryProperties->memoryTypes[5].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
+ pMemoryProperties->memoryTypes[5].heapIndex = 1;
+ pMemoryProperties->memoryHeapCount = 2;
+ pMemoryProperties->memoryHeaps[0].flags = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT;
+ pMemoryProperties->memoryHeaps[0].size = 8000000000;
+ pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
+ pMemoryProperties->memoryHeaps[1].size = 8000000000;
+''',
+'vkGetPhysicalDeviceMemoryProperties2KHR': '''
+ GetPhysicalDeviceMemoryProperties(physicalDevice, &pMemoryProperties->memoryProperties);
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties': '''
+ if (pQueueFamilyProperties) {
+ std::vector<VkQueueFamilyProperties2KHR> props2(*pQueueFamilyPropertyCount, {
+ VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR});
+ GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, props2.data());
+ for (uint32_t i = 0; i < *pQueueFamilyPropertyCount; ++i) {
+ pQueueFamilyProperties[i] = props2[i].queueFamilyProperties;
+ }
+ } else {
+ GetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, nullptr);
+ }
+''',
+'vkGetPhysicalDeviceQueueFamilyProperties2KHR': '''
+ if (pQueueFamilyProperties) {
+ if (*pQueueFamilyPropertyCount >= 1) {
+ auto props = &pQueueFamilyProperties[0].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT
+ | VK_QUEUE_SPARSE_BINDING_BIT | VK_QUEUE_PROTECTED_BIT;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+ }
+ if (*pQueueFamilyPropertyCount >= 2) {
+ auto props = &pQueueFamilyProperties[1].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_TRANSFER_BIT | VK_QUEUE_PROTECTED_BIT | VK_QUEUE_VIDEO_DECODE_BIT_KHR;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+
+ auto status_query_props = lvl_find_mod_in_chain<VkQueueFamilyQueryResultStatusPropertiesKHR>(pQueueFamilyProperties[1].pNext);
+ if (status_query_props) {
+ status_query_props->queryResultStatusSupport = VK_TRUE;
+ }
+ auto video_props = lvl_find_mod_in_chain<VkQueueFamilyVideoPropertiesKHR>(pQueueFamilyProperties[1].pNext);
+ if (video_props) {
+ video_props->videoCodecOperations = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR;
+ }
+ }
+ if (*pQueueFamilyPropertyCount >= 3) {
+ auto props = &pQueueFamilyProperties[2].queueFamilyProperties;
+ props->queueFlags = VK_QUEUE_TRANSFER_BIT | VK_QUEUE_PROTECTED_BIT | VK_QUEUE_VIDEO_ENCODE_BIT_KHR;
+ props->queueCount = 1;
+ props->timestampValidBits = 16;
+ props->minImageTransferGranularity = {1,1,1};
+
+ auto status_query_props = lvl_find_mod_in_chain<VkQueueFamilyQueryResultStatusPropertiesKHR>(pQueueFamilyProperties[2].pNext);
+ if (status_query_props) {
+ status_query_props->queryResultStatusSupport = VK_TRUE;
+ }
+ auto video_props = lvl_find_mod_in_chain<VkQueueFamilyVideoPropertiesKHR>(pQueueFamilyProperties[2].pNext);
+ if (video_props) {
+ video_props->videoCodecOperations = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR
+ | VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR;
+ }
+ }
+ if (*pQueueFamilyPropertyCount > 3) {
+ *pQueueFamilyPropertyCount = 3;
+ }
+ } else {
+ *pQueueFamilyPropertyCount = 3;
+ }
+''',
+'vkGetPhysicalDeviceFeatures': '''
+ uint32_t num_bools = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
+ VkBool32 *bool_array = &pFeatures->robustBufferAccess;
+ SetBoolArrayTrue(bool_array, num_bools);
+''',
+'vkGetPhysicalDeviceFeatures2KHR': '''
+ GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
+ uint32_t num_bools = 0; // Count number of VkBool32s in extension structs
+ VkBool32* feat_bools = nullptr;
+ auto vk_1_1_features = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan11Features>(pFeatures->pNext);
+ if (vk_1_1_features) {
+ vk_1_1_features->protectedMemory = VK_TRUE;
+ }
+ auto vk_1_3_features = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan13Features>(pFeatures->pNext);
+ if (vk_1_3_features) {
+ vk_1_3_features->synchronization2 = VK_TRUE;
+ }
+ auto prot_features = lvl_find_mod_in_chain<VkPhysicalDeviceProtectedMemoryFeatures>(pFeatures->pNext);
+ if (prot_features) {
+ prot_features->protectedMemory = VK_TRUE;
+ }
+ auto sync2_features = lvl_find_mod_in_chain<VkPhysicalDeviceSynchronization2FeaturesKHR>(pFeatures->pNext);
+ if (sync2_features) {
+ sync2_features->synchronization2 = VK_TRUE;
+ }
+ auto video_maintenance1_features = lvl_find_mod_in_chain<VkPhysicalDeviceVideoMaintenance1FeaturesKHR>(pFeatures->pNext);
+ if (video_maintenance1_features) {
+ video_maintenance1_features->videoMaintenance1 = VK_TRUE;
+ }
+ const auto *desc_idx_features = lvl_find_in_chain<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>(pFeatures->pNext);
+ if (desc_idx_features) {
+ const auto bool_size = sizeof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT) - offsetof(VkPhysicalDeviceDescriptorIndexingFeaturesEXT, shaderInputAttachmentArrayDynamicIndexing);
+ num_bools = bool_size/sizeof(VkBool32);
+ feat_bools = (VkBool32*)&desc_idx_features->shaderInputAttachmentArrayDynamicIndexing;
+ SetBoolArrayTrue(feat_bools, num_bools);
+ }
+ const auto *blendop_features = lvl_find_in_chain<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT>(pFeatures->pNext);
+ if (blendop_features) {
+ const auto bool_size = sizeof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT) - offsetof(VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT, advancedBlendCoherentOperations);
+ num_bools = bool_size/sizeof(VkBool32);
+ feat_bools = (VkBool32*)&blendop_features->advancedBlendCoherentOperations;
+ SetBoolArrayTrue(feat_bools, num_bools);
+ }
+ const auto *host_image_copy_features = lvl_find_in_chain<VkPhysicalDeviceHostImageCopyFeaturesEXT>(pFeatures->pNext);
+ if (host_image_copy_features) {
+ feat_bools = (VkBool32*)&host_image_copy_features->hostImageCopy;
+ SetBoolArrayTrue(feat_bools, 1);
+ }
+''',
+'vkGetPhysicalDeviceFormatProperties': '''
+ if (VK_FORMAT_UNDEFINED == format) {
+ *pFormatProperties = { 0x0, 0x0, 0x0 };
+ } else {
+ // Default to a color format, skip DS bit
+ *pFormatProperties = { 0x00FFFDFF, 0x00FFFDFF, 0x00FFFDFF };
+ switch (format) {
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ case VK_FORMAT_S8_UINT:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ // Don't set color bits for DS formats
+ *pFormatProperties = { 0x00FFFE7F, 0x00FFFE7F, 0x00FFFE7F };
+ break;
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
+ case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
+ case VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16:
+ case VK_FORMAT_G8_B8R8_2PLANE_444_UNORM:
+ // Set decode/encode bits for these formats
+ *pFormatProperties = { 0x1EFFFDFF, 0x1EFFFDFF, 0x00FFFDFF };
+ break;
+ default:
+ break;
+ }
+ }
+''',
+'vkGetPhysicalDeviceFormatProperties2KHR': '''
+ GetPhysicalDeviceFormatProperties(physicalDevice, format, &pFormatProperties->formatProperties);
+ VkFormatProperties3KHR *props_3 = lvl_find_mod_in_chain<VkFormatProperties3KHR>(pFormatProperties->pNext);
+ if (props_3) {
+ props_3->linearTilingFeatures = pFormatProperties->formatProperties.linearTilingFeatures;
+ props_3->optimalTilingFeatures = pFormatProperties->formatProperties.optimalTilingFeatures;
+ props_3->bufferFeatures = pFormatProperties->formatProperties.bufferFeatures;
+ props_3->optimalTilingFeatures |= VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT;
+ }
+''',
+'vkGetPhysicalDeviceImageFormatProperties': '''
+ // A hardcoded unsupported format
+ if (format == VK_FORMAT_E5B9G9R9_UFLOAT_PACK32) {
+ return VK_ERROR_FORMAT_NOT_SUPPORTED;
+ }
+
+ // TODO: Just hard-coding some values for now
+ // TODO: If tiling is linear, limit the mips, levels, & sample count
+ if (VK_IMAGE_TILING_LINEAR == tiling) {
+ *pImageFormatProperties = { { 4096, 4096, 256 }, 1, 1, VK_SAMPLE_COUNT_1_BIT, 4294967296 };
+ } else {
+ // We hard-code support for all sample counts except 64 bits.
+ *pImageFormatProperties = { { 4096, 4096, 256 }, 12, 256, 0x7F & ~VK_SAMPLE_COUNT_64_BIT, 4294967296 };
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceImageFormatProperties2KHR': '''
+ auto *external_image_prop = lvl_find_mod_in_chain<VkExternalImageFormatProperties>(pImageFormatProperties->pNext);
+ auto *external_image_format = lvl_find_in_chain<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo->pNext);
+ if (external_image_prop && external_image_format) {
+ external_image_prop->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT;
+ external_image_prop->externalMemoryProperties.compatibleHandleTypes = external_image_format->handleType;
+ }
+
+ GetPhysicalDeviceImageFormatProperties(physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags, &pImageFormatProperties->imageFormatProperties);
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceSparseImageFormatProperties': '''
+ if (!pProperties) {
+ *pPropertyCount = 1;
+ } else {
+ // arbitrary
+ pProperties->imageGranularity = {4, 4, 4};
+ pProperties->flags = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT;
+ switch (format) {
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_D32_SFLOAT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
+ break;
+ case VK_FORMAT_S8_UINT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
+ break;
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
+ break;
+ default:
+ pProperties->aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+ break;
+ }
+ }
+''',
+'vkGetPhysicalDeviceSparseImageFormatProperties2KHR': '''
+ if (pPropertyCount && pProperties) {
+ GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage, pFormatInfo->tiling, pPropertyCount, &pProperties->properties);
+ } else {
+ GetPhysicalDeviceSparseImageFormatProperties(physicalDevice, pFormatInfo->format, pFormatInfo->type, pFormatInfo->samples, pFormatInfo->usage, pFormatInfo->tiling, pPropertyCount, nullptr);
+ }
+''',
+'vkGetPhysicalDeviceProperties': '''
+ pProperties->apiVersion = VK_HEADER_VERSION_COMPLETE;
+ pProperties->driverVersion = 1;
+ pProperties->vendorID = 0xba5eba11;
+ pProperties->deviceID = 0xf005ba11;
+ pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
+ //std::string devName = "Vulkan Mock Device";
+ strcpy(pProperties->deviceName, "Vulkan Mock Device");
+ pProperties->pipelineCacheUUID[0] = 18;
+ pProperties->limits = SetLimits(&pProperties->limits);
+ pProperties->sparseProperties = { VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE, VK_TRUE };
+''',
+'vkGetPhysicalDeviceProperties2KHR': '''
+ // The only value that need to be set are those the Profile layer can't set
+ // see https://github.com/KhronosGroup/Vulkan-Profiles/issues/352
+ // All values set are arbitrary
+ GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
+
+ auto *props_11 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan11Properties>(pProperties->pNext);
+ if (props_11) {
+ props_11->protectedNoFault = VK_FALSE;
+ }
+
+ auto *props_12 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan12Properties>(pProperties->pNext);
+ if (props_12) {
+ props_12->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ props_12->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ }
+
+ auto *props_13 = lvl_find_mod_in_chain<VkPhysicalDeviceVulkan13Properties>(pProperties->pNext);
+ if (props_13) {
+ props_13->storageTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ props_13->uniformTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ props_13->storageTexelBufferOffsetAlignmentBytes = 16;
+ props_13->uniformTexelBufferOffsetAlignmentBytes = 16;
+ }
+
+ auto *protected_memory_props = lvl_find_mod_in_chain<VkPhysicalDeviceProtectedMemoryProperties>(pProperties->pNext);
+ if (protected_memory_props) {
+ protected_memory_props->protectedNoFault = VK_FALSE;
+ }
+
+ auto *float_controls_props = lvl_find_mod_in_chain<VkPhysicalDeviceFloatControlsProperties>(pProperties->pNext);
+ if (float_controls_props) {
+ float_controls_props->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ float_controls_props->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
+ }
+
+ auto *conservative_raster_props = lvl_find_mod_in_chain<VkPhysicalDeviceConservativeRasterizationPropertiesEXT>(pProperties->pNext);
+ if (conservative_raster_props) {
+ conservative_raster_props->primitiveOverestimationSize = 0.00195313f;
+ conservative_raster_props->conservativePointAndLineRasterization = VK_TRUE;
+ conservative_raster_props->degenerateTrianglesRasterized = VK_TRUE;
+ conservative_raster_props->degenerateLinesRasterized = VK_TRUE;
+ }
+
+ auto *rt_pipeline_props = lvl_find_mod_in_chain<VkPhysicalDeviceRayTracingPipelinePropertiesKHR>(pProperties->pNext);
+ if (rt_pipeline_props) {
+ rt_pipeline_props->shaderGroupHandleSize = 32;
+ rt_pipeline_props->shaderGroupBaseAlignment = 64;
+ rt_pipeline_props->shaderGroupHandleCaptureReplaySize = 32;
+ }
+
+ auto *rt_pipeline_nv_props = lvl_find_mod_in_chain<VkPhysicalDeviceRayTracingPropertiesNV>(pProperties->pNext);
+ if (rt_pipeline_nv_props) {
+ rt_pipeline_nv_props->shaderGroupHandleSize = 32;
+ rt_pipeline_nv_props->shaderGroupBaseAlignment = 64;
+ }
+
+ auto *texel_buffer_props = lvl_find_mod_in_chain<VkPhysicalDeviceTexelBufferAlignmentProperties>(pProperties->pNext);
+ if (texel_buffer_props) {
+ texel_buffer_props->storageTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ texel_buffer_props->uniformTexelBufferOffsetSingleTexelAlignment = VK_TRUE;
+ texel_buffer_props->storageTexelBufferOffsetAlignmentBytes = 16;
+ texel_buffer_props->uniformTexelBufferOffsetAlignmentBytes = 16;
+ }
+
+ auto *descriptor_buffer_props = lvl_find_mod_in_chain<VkPhysicalDeviceDescriptorBufferPropertiesEXT>(pProperties->pNext);
+ if (descriptor_buffer_props) {
+ descriptor_buffer_props->combinedImageSamplerDescriptorSingleArray = VK_TRUE;
+ descriptor_buffer_props->bufferlessPushDescriptors = VK_TRUE;
+ descriptor_buffer_props->allowSamplerImageViewPostSubmitCreation = VK_TRUE;
+ descriptor_buffer_props->descriptorBufferOffsetAlignment = 4;
+ }
+
+ auto *mesh_shader_props = lvl_find_mod_in_chain<VkPhysicalDeviceMeshShaderPropertiesEXT>(pProperties->pNext);
+ if (mesh_shader_props) {
+ mesh_shader_props->meshOutputPerVertexGranularity = 32;
+ mesh_shader_props->meshOutputPerPrimitiveGranularity = 32;
+ mesh_shader_props->prefersLocalInvocationVertexOutput = VK_TRUE;
+ mesh_shader_props->prefersLocalInvocationPrimitiveOutput = VK_TRUE;
+ mesh_shader_props->prefersCompactVertexOutput = VK_TRUE;
+ mesh_shader_props->prefersCompactPrimitiveOutput = VK_TRUE;
+ }
+
+ auto *fragment_density_map2_props = lvl_find_mod_in_chain<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT>(pProperties->pNext);
+ if (fragment_density_map2_props) {
+ fragment_density_map2_props->subsampledLoads = VK_FALSE;
+ fragment_density_map2_props->subsampledCoarseReconstructionEarlyAccess = VK_FALSE;
+ fragment_density_map2_props->maxSubsampledArrayLayers = 2;
+ fragment_density_map2_props->maxDescriptorSetSubsampledSamplers = 1;
+ }
+
+ auto *maintenance3_props = lvl_find_mod_in_chain<VkPhysicalDeviceMaintenance3Properties>(pProperties->pNext);
+ if (maintenance3_props) {
+ maintenance3_props->maxMemoryAllocationSize = 1073741824;
+ maintenance3_props->maxPerSetDescriptors = 1024;
+ }
+
+ const uint32_t num_copy_layouts = 5;
+ const VkImageLayout HostCopyLayouts[]{
+ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_GENERAL,
+ VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+ VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+ VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+ };
+
+ auto *host_image_copy_props = lvl_find_mod_in_chain<VkPhysicalDeviceHostImageCopyPropertiesEXT>(pProperties->pNext);
+ if (host_image_copy_props){
+ if (host_image_copy_props->pCopyDstLayouts == nullptr) host_image_copy_props->copyDstLayoutCount = num_copy_layouts;
+ else {
+ uint32_t num_layouts = (std::min)(host_image_copy_props->copyDstLayoutCount, num_copy_layouts);
+ for (uint32_t i = 0; i < num_layouts; i++) {
+ host_image_copy_props->pCopyDstLayouts[i] = HostCopyLayouts[i];
+ }
+ }
+ if (host_image_copy_props->pCopySrcLayouts == nullptr) host_image_copy_props->copySrcLayoutCount = num_copy_layouts;
+ else {
+ uint32_t num_layouts = (std::min)(host_image_copy_props->copySrcLayoutCount, num_copy_layouts);
+ for (uint32_t i = 0; i < num_layouts; i++) {
+ host_image_copy_props->pCopySrcLayouts[i] = HostCopyLayouts[i];
+ }
+ }
+ }
+
+ auto *driver_properties = lvl_find_mod_in_chain<VkPhysicalDeviceDriverProperties>(pProperties->pNext);
+ if (driver_properties) {
+ std::strncpy(driver_properties->driverName, "Vulkan Mock Device", VK_MAX_DRIVER_NAME_SIZE);
+#if defined(GIT_BRANCH_NAME) && defined(GIT_TAG_INFO)
+ std::strncpy(driver_properties->driverInfo, "Branch: " GIT_BRANCH_NAME " Tag Info: " GIT_TAG_INFO, VK_MAX_DRIVER_INFO_SIZE);
+#else
+ std::strncpy(driver_properties->driverInfo, "Branch: --unknown-- Tag Info: --unknown--", VK_MAX_DRIVER_INFO_SIZE);
+#endif
+ }
+
+ auto *layered_properties = lvl_find_mod_in_chain<VkPhysicalDeviceLayeredApiPropertiesListKHR>(pProperties->pNext);
+ if (layered_properties) {
+ layered_properties->layeredApiCount = 1;
+ if (layered_properties->pLayeredApis) {
+ layered_properties->pLayeredApis[0] = VkPhysicalDeviceLayeredApiPropertiesKHR{
+ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR,
+ nullptr,
+ 0xba5eba11,
+ 0xf005ba11,
+ VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR
+ };
+ std::strncpy(layered_properties->pLayeredApis[0].deviceName, "Fake Driver", VK_MAX_PHYSICAL_DEVICE_NAME_SIZE);
+ }
+ }
+''',
+'vkGetPhysicalDeviceExternalSemaphoreProperties':'''
+ // Hard code support for all handle types and features
+ pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0x1F;
+ pExternalSemaphoreProperties->compatibleHandleTypes = 0x1F;
+ pExternalSemaphoreProperties->externalSemaphoreFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalSemaphorePropertiesKHR':'''
+ GetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
+''',
+'vkGetPhysicalDeviceExternalFenceProperties':'''
+ // Hard-code support for all handle types and features
+ pExternalFenceProperties->exportFromImportedHandleTypes = 0xF;
+ pExternalFenceProperties->compatibleHandleTypes = 0xF;
+ pExternalFenceProperties->externalFenceFeatures = 0x3;
+''',
+'vkGetPhysicalDeviceExternalFencePropertiesKHR':'''
+ GetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
+''',
+'vkGetPhysicalDeviceExternalBufferProperties':'''
+ constexpr VkExternalMemoryHandleTypeFlags supported_flags = 0x1FF;
+ if (pExternalBufferInfo->handleType & VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
+ // Can't have dedicated memory with AHB
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT | VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = pExternalBufferInfo->handleType;
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = pExternalBufferInfo->handleType;
+ } else if (pExternalBufferInfo->handleType & supported_flags) {
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0x7;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = supported_flags;
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = supported_flags;
+ } else {
+ pExternalBufferProperties->externalMemoryProperties.externalMemoryFeatures = 0;
+ pExternalBufferProperties->externalMemoryProperties.exportFromImportedHandleTypes = 0;
+ // According to spec, handle type is always compatible with itself. Even if export/import
+ // not supported, it's important to properly implement self-compatibility property since
+ // application's control flow can rely on this.
+ pExternalBufferProperties->externalMemoryProperties.compatibleHandleTypes = pExternalBufferInfo->handleType;
+ }
+''',
+'vkGetPhysicalDeviceExternalBufferPropertiesKHR':'''
+ GetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
+''',
+'vkGetBufferMemoryRequirements': '''
+ // TODO: Just hard-coding reqs for now
+ pMemoryRequirements->size = 4096;
+ pMemoryRequirements->alignment = 1;
+ pMemoryRequirements->memoryTypeBits = 0xFFFF;
+ // Return a better size based on the buffer size from the create info.
+ unique_lock_t lock(global_lock);
+ auto d_iter = buffer_map.find(device);
+ if (d_iter != buffer_map.end()) {
+ auto iter = d_iter->second.find(buffer);
+ if (iter != d_iter->second.end()) {
+ pMemoryRequirements->size = ((iter->second.size + 4095) / 4096) * 4096;
+ }
+ }
+''',
+'vkGetBufferMemoryRequirements2KHR': '''
+ GetBufferMemoryRequirements(device, pInfo->buffer, &pMemoryRequirements->memoryRequirements);
+''',
+'vkGetDeviceBufferMemoryRequirements': '''
+ // TODO: Just hard-coding reqs for now
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF;
+
+ // Return a size based on the buffer size from the create info.
+ pMemoryRequirements->memoryRequirements.size = ((pInfo->pCreateInfo->size + 4095) / 4096) * 4096;
+''',
+'vkGetDeviceBufferMemoryRequirementsKHR': '''
+ GetDeviceBufferMemoryRequirements(device, pInfo, pMemoryRequirements);
+''',
+'vkGetImageMemoryRequirements': '''
+ pMemoryRequirements->size = 0;
+ pMemoryRequirements->alignment = 1;
+
+ unique_lock_t lock(global_lock);
+ auto d_iter = image_memory_size_map.find(device);
+ if(d_iter != image_memory_size_map.end()){
+ auto iter = d_iter->second.find(image);
+ if (iter != d_iter->second.end()) {
+ pMemoryRequirements->size = iter->second;
+ }
+ }
+ // Here we hard-code that the memory type at index 3 doesn't support this image.
+ pMemoryRequirements->memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+''',
+'vkGetImageMemoryRequirements2KHR': '''
+ GetImageMemoryRequirements(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
+''',
+'vkGetDeviceImageMemoryRequirements': '''
+ pMemoryRequirements->memoryRequirements.size = GetImageSizeFromCreateInfo(pInfo->pCreateInfo);
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ // Here we hard-code that the memory type at index 3 doesn't support this image.
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF & ~(0x1 << 3);
+''',
+'vkGetDeviceImageMemoryRequirementsKHR': '''
+ GetDeviceImageMemoryRequirements(device, pInfo, pMemoryRequirements);
+''',
+'vkMapMemory': '''
+ unique_lock_t lock(global_lock);
+ if (VK_WHOLE_SIZE == size) {
+ if (allocated_memory_size_map.count(memory) != 0)
+ size = allocated_memory_size_map[memory] - offset;
+ else
+ size = 0x10000;
+ }
+ void* map_addr = malloc((size_t)size);
+ mapped_memory_map[memory].push_back(map_addr);
+ *ppData = map_addr;
+ return VK_SUCCESS;
+''',
+'vkMapMemory2KHR': '''
+ return MapMemory(device, pMemoryMapInfo->memory, pMemoryMapInfo->offset, pMemoryMapInfo->size, pMemoryMapInfo->flags, ppData);
+''',
+'vkUnmapMemory': '''
+ unique_lock_t lock(global_lock);
+ for (auto map_addr : mapped_memory_map[memory]) {
+ free(map_addr);
+ }
+ mapped_memory_map.erase(memory);
+''',
+'vkUnmapMemory2KHR': '''
+ UnmapMemory(device, pMemoryUnmapInfo->memory);
+ return VK_SUCCESS;
+''',
+'vkGetImageSubresourceLayout': '''
+ // Need safe values. Callers are computing memory offsets from pLayout, with no return code to flag failure.
+ *pLayout = VkSubresourceLayout(); // Default constructor zero values.
+''',
+'vkCreateSwapchainKHR': '''
+ unique_lock_t lock(global_lock);
+ *pSwapchain = (VkSwapchainKHR)global_unique_handle++;
+ for(uint32_t i = 0; i < icd_swapchain_image_count; ++i){
+ swapchain_image_map[*pSwapchain][i] = (VkImage)global_unique_handle++;
+ }
+ return VK_SUCCESS;
+''',
+'vkDestroySwapchainKHR': '''
+ unique_lock_t lock(global_lock);
+ swapchain_image_map.clear();
+''',
+'vkGetSwapchainImagesKHR': '''
+ if (!pSwapchainImages) {
+ *pSwapchainImageCount = icd_swapchain_image_count;
+ } else {
+ unique_lock_t lock(global_lock);
+ for (uint32_t img_i = 0; img_i < (std::min)(*pSwapchainImageCount, icd_swapchain_image_count); ++img_i){
+ pSwapchainImages[img_i] = swapchain_image_map.at(swapchain)[img_i];
+ }
+
+ if (*pSwapchainImageCount < icd_swapchain_image_count) return VK_INCOMPLETE;
+ else if (*pSwapchainImageCount > icd_swapchain_image_count) *pSwapchainImageCount = icd_swapchain_image_count;
+ }
+ return VK_SUCCESS;
+''',
+'vkAcquireNextImageKHR': '''
+ *pImageIndex = 0;
+ return VK_SUCCESS;
+''',
+'vkAcquireNextImage2KHR': '''
+ *pImageIndex = 0;
+ return VK_SUCCESS;
+''',
+'vkCreateBuffer': '''
+ unique_lock_t lock(global_lock);
+ *pBuffer = (VkBuffer)global_unique_handle++;
+ buffer_map[device][*pBuffer] = {
+ pCreateInfo->size,
+ current_available_address
+ };
+ current_available_address += pCreateInfo->size;
+ // Always align to next 64-bit pointer
+ const uint64_t alignment = current_available_address % 64;
+ if (alignment != 0) {
+ current_available_address += (64 - alignment);
+ }
+ return VK_SUCCESS;
+''',
+'vkDestroyBuffer': '''
+ unique_lock_t lock(global_lock);
+ buffer_map[device].erase(buffer);
+''',
+'vkCreateImage': '''
+ unique_lock_t lock(global_lock);
+ *pImage = (VkImage)global_unique_handle++;
+ image_memory_size_map[device][*pImage] = GetImageSizeFromCreateInfo(pCreateInfo);
+ return VK_SUCCESS;
+''',
+'vkDestroyImage': '''
+ unique_lock_t lock(global_lock);
+ image_memory_size_map[device].erase(image);
+''',
+'vkEnumeratePhysicalDeviceGroupsKHR': '''
+ if (!pPhysicalDeviceGroupProperties) {
+ *pPhysicalDeviceGroupCount = 1;
+ } else {
+ // arbitrary
+ pPhysicalDeviceGroupProperties->physicalDeviceCount = 1;
+ pPhysicalDeviceGroupProperties->physicalDevices[0] = physical_device_map.at(instance)[0];
+ pPhysicalDeviceGroupProperties->subsetAllocation = VK_FALSE;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceMultisamplePropertiesEXT': '''
+ if (pMultisampleProperties) {
+ // arbitrary
+ pMultisampleProperties->maxSampleLocationGridSize = {32, 32};
+ }
+''',
+'vkGetPhysicalDeviceFragmentShadingRatesKHR': '''
+ if (!pFragmentShadingRates) {
+ *pFragmentShadingRateCount = 1;
+ } else {
+ // arbitrary
+ pFragmentShadingRates->sampleCounts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
+ pFragmentShadingRates->fragmentSize = {8, 8};
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCalibrateableTimeDomainsEXT': '''
+ if (!pTimeDomains) {
+ *pTimeDomainCount = 1;
+ } else {
+ // arbitrary
+ *pTimeDomains = VK_TIME_DOMAIN_DEVICE_EXT;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCalibrateableTimeDomainsKHR': '''
+ if (!pTimeDomains) {
+ *pTimeDomainCount = 1;
+ } else {
+ // arbitrary
+ *pTimeDomains = VK_TIME_DOMAIN_DEVICE_KHR;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetFenceWin32HandleKHR': '''
+ *pHandle = (HANDLE)0x12345678;
+ return VK_SUCCESS;
+''',
+'vkGetFenceFdKHR': '''
+ *pFd = 0x42;
+ return VK_SUCCESS;
+''',
+'vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR': '''
+ if (!pCounters) {
+ *pCounterCount = 3;
+ } else {
+ if (*pCounterCount == 0){
+ return VK_INCOMPLETE;
+ }
+ // arbitrary
+ pCounters[0].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[0].scope = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR;
+ pCounters[0].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[0].uuid[0] = 0x01;
+ if (*pCounterCount == 1){
+ return VK_INCOMPLETE;
+ }
+ pCounters[1].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[1].scope = VK_QUERY_SCOPE_RENDER_PASS_KHR;
+ pCounters[1].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[1].uuid[0] = 0x02;
+ if (*pCounterCount == 2){
+ return VK_INCOMPLETE;
+ }
+ pCounters[2].unit = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR;
+ pCounters[2].scope = VK_QUERY_SCOPE_COMMAND_KHR;
+ pCounters[2].storage = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR;
+ pCounters[2].uuid[0] = 0x03;
+ *pCounterCount = 3;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR': '''
+ if (pNumPasses) {
+ // arbitrary
+ *pNumPasses = 1;
+ }
+''',
+'vkGetShaderModuleIdentifierEXT': '''
+ if (pIdentifier) {
+ // arbitrary
+ pIdentifier->identifierSize = 1;
+ pIdentifier->identifier[0] = 0x01;
+ }
+''',
+'vkGetImageSparseMemoryRequirements': '''
+ if (!pSparseMemoryRequirements) {
+ *pSparseMemoryRequirementCount = 1;
+ } else {
+ // arbitrary
+ pSparseMemoryRequirements->imageMipTailFirstLod = 0;
+ pSparseMemoryRequirements->imageMipTailSize = 8;
+ pSparseMemoryRequirements->imageMipTailOffset = 0;
+ pSparseMemoryRequirements->imageMipTailStride = 4;
+ pSparseMemoryRequirements->formatProperties.imageGranularity = {4, 4, 4};
+ pSparseMemoryRequirements->formatProperties.flags = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT;
+ // Would need to track the VkImage to know format for better value here
+ pSparseMemoryRequirements->formatProperties.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT | VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_METADATA_BIT;
+ }
+
+''',
+'vkGetImageSparseMemoryRequirements2KHR': '''
+ if (pSparseMemoryRequirementCount && pSparseMemoryRequirements) {
+ GetImageSparseMemoryRequirements(device, pInfo->image, pSparseMemoryRequirementCount, &pSparseMemoryRequirements->memoryRequirements);
+ } else {
+ GetImageSparseMemoryRequirements(device, pInfo->image, pSparseMemoryRequirementCount, nullptr);
+ }
+''',
+'vkGetBufferDeviceAddress': '''
+ VkDeviceAddress address = 0;
+ auto d_iter = buffer_map.find(device);
+ if (d_iter != buffer_map.end()) {
+ auto iter = d_iter->second.find(pInfo->buffer);
+ if (iter != d_iter->second.end()) {
+ address = iter->second.address;
+ }
+ }
+ return address;
+''',
+'vkGetBufferDeviceAddressKHR': '''
+ return GetBufferDeviceAddress(device, pInfo);
+''',
+'vkGetBufferDeviceAddressEXT': '''
+ return GetBufferDeviceAddress(device, pInfo);
+''',
+'vkGetDescriptorSetLayoutSizeEXT': '''
+ // Need to give something non-zero
+ *pLayoutSizeInBytes = 4;
+''',
+'vkGetAccelerationStructureBuildSizesKHR': '''
+ // arbitrary
+ pSizeInfo->accelerationStructureSize = 4;
+ pSizeInfo->updateScratchSize = 4;
+ pSizeInfo->buildScratchSize = 4;
+''',
+'vkGetAccelerationStructureMemoryRequirementsNV': '''
+ // arbitrary
+ pMemoryRequirements->memoryRequirements.size = 4096;
+ pMemoryRequirements->memoryRequirements.alignment = 1;
+ pMemoryRequirements->memoryRequirements.memoryTypeBits = 0xFFFF;
+''',
+'vkGetAccelerationStructureDeviceAddressKHR': '''
+ // arbitrary - need to be aligned to 256 bytes
+ return 0x262144;
+''',
+'vkGetVideoSessionMemoryRequirementsKHR': '''
+ if (!pMemoryRequirements) {
+ *pMemoryRequirementsCount = 1;
+ } else {
+ // arbitrary
+ pMemoryRequirements[0].memoryBindIndex = 0;
+ pMemoryRequirements[0].memoryRequirements.size = 4096;
+ pMemoryRequirements[0].memoryRequirements.alignment = 1;
+ pMemoryRequirements[0].memoryRequirements.memoryTypeBits = 0xFFFF;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR': '''
+ if (!pProperties) {
+ *pPropertyCount = 2;
+ } else {
+ // arbitrary
+ pProperties[0].MSize = 16;
+ pProperties[0].NSize = 16;
+ pProperties[0].KSize = 16;
+ pProperties[0].AType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].BType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].CType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].ResultType = VK_COMPONENT_TYPE_UINT32_KHR;
+ pProperties[0].saturatingAccumulation = VK_FALSE;
+ pProperties[0].scope = VK_SCOPE_SUBGROUP_KHR;
+
+ pProperties[1] = pProperties[0];
+ pProperties[1].scope = VK_SCOPE_DEVICE_KHR;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceVideoCapabilitiesKHR': '''
+ return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR;
+''',
+'vkGetPhysicalDeviceVideoFormatPropertiesKHR': '''
+ return VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR;
+''',
+'vkGetDescriptorSetLayoutSupport':'''
+ if (pSupport) {
+ pSupport->supported = VK_TRUE;
+ }
+''',
+'vkGetDescriptorSetLayoutSupportKHR':'''
+ GetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
+''',
+'vkGetRenderAreaGranularity': '''
+ pGranularity->width = 1;
+ pGranularity->height = 1;
+''',
+'vkGetMemoryFdKHR': '''
+ *pFd = 1;
+ return VK_SUCCESS;
+''',
+'vkGetMemoryHostPointerPropertiesEXT': '''
+ pMemoryHostPointerProperties->memoryTypeBits = 1 << 5; // DEVICE_LOCAL only type
+ return VK_SUCCESS;
+''',
+'vkGetAndroidHardwareBufferPropertiesANDROID': '''
+ pProperties->allocationSize = 65536;
+ pProperties->memoryTypeBits = 1 << 5; // DEVICE_LOCAL only type
+
+ auto *format_prop = lvl_find_mod_in_chain<VkAndroidHardwareBufferFormatPropertiesANDROID>(pProperties->pNext);
+ if (format_prop) {
+ // Likley using this format
+ format_prop->format = VK_FORMAT_R8G8B8A8_UNORM;
+ format_prop->externalFormat = 37;
+ }
+
+ auto *format_resolve_prop = lvl_find_mod_in_chain<VkAndroidHardwareBufferFormatResolvePropertiesANDROID>(pProperties->pNext);
+ if (format_resolve_prop) {
+ format_resolve_prop->colorAttachmentFormat = VK_FORMAT_R8G8B8A8_UNORM;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPhysicalDeviceDisplayPropertiesKHR': '''
+ if (!pProperties) {
+ *pPropertyCount = 1;
+ } else {
+ unique_lock_t lock(global_lock);
+ pProperties[0].display = (VkDisplayKHR)global_unique_handle++;
+ display_map[physicalDevice].insert(pProperties[0].display);
+ }
+ return VK_SUCCESS;
+''',
+'vkRegisterDisplayEventEXT': '''
+ unique_lock_t lock(global_lock);
+ *pFence = (VkFence)global_unique_handle++;
+ return VK_SUCCESS;
+''',
+'vkQueueSubmit': '''
+ // Special way to cause DEVICE_LOST
+ // Picked VkExportFenceCreateInfo because needed some struct that wouldn't get cleared by validation Safe Struct
+ // ... TODO - It would be MUCH nicer to have a layer or other setting control when this occured
+ // For now this is used to allow Validation Layers test reacting to device losts
+ if (submitCount > 0 && pSubmits) {
+ auto pNext = reinterpret_cast<const VkBaseInStructure *>(pSubmits[0].pNext);
+ if (pNext && pNext->sType == VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO && pNext->pNext == nullptr) {
+ return VK_ERROR_DEVICE_LOST;
+ }
+ }
+ return VK_SUCCESS;
+''',
+'vkGetMemoryWin32HandlePropertiesKHR': '''
+ pMemoryWin32HandleProperties->memoryTypeBits = 0xFFFF;
+ return VK_SUCCESS;
+''',
+'vkCreatePipelineBinariesKHR': '''
+ unique_lock_t lock(global_lock);
+ if (pBinaries->pPipelineBinaries != nullptr)
+ {
+ for (uint32_t i = 0; i < pBinaries->pipelineBinaryCount; ++i) {
+ pBinaries->pPipelineBinaries[i] = (VkPipelineBinaryKHR)global_unique_handle++;
+ }
+ }
+ else
+ {
+ // In this case, we need to return a return count, let's set it to 3
+ pBinaries->pipelineBinaryCount = 3;
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPipelineKeyKHR': '''
+ if (pPipelineKey != nullptr)
+ {
+ pPipelineKey->keySize = 16;
+ std::memset(pPipelineKey->key, 0x12, pPipelineKey->keySize);
+ }
+ return VK_SUCCESS;
+''',
+'vkGetPipelineBinaryDataKHR': '''
+ static uint32_t fake_size = 64;
+ if (pPipelineBinaryDataSize != nullptr)
+ {
+ if (pPipelineBinaryData == nullptr)
+ {
+ *pPipelineBinaryDataSize = fake_size;
+ }
+ else
+ {
+ std::memset(pPipelineBinaryData, 0xABCD, fake_size);
+ }
+ }
+ return VK_SUCCESS;
+'''
+}
+
+# MockICDOutputGenerator
+# Generates a mock vulkan ICD.
+# This is intended to be a minimal replacement for a vulkan device in order
+# to enable testing of Vulkan applications and layers
+#
+class MockICDOutputGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+
+ # Ignore extensions that ICDs should not implement or are not safe to report
+ self.ignore_exts = ['VK_EXT_validation_cache', 'VK_KHR_portability_subset']
+
+ # Dispatchable handles
+ self.dispatchable_handles = ['VkInstance','VkPhysicalDevice', 'VkDevice', 'VkCommandBuffer', 'VkQueue']
+
+ def generate_function_declarations(self, out):
+
+ out.append('#include <stdint.h>\n')
+ out.append('#include <cstring>\n')
+ out.append('#include <string>\n')
+ out.append('#include <unordered_map>\n')
+ out.append('#include <vulkan/vulkan.h>\n')
+ out.append('\n')
+ out.append('namespace vkmock {\n')
+ out.append('// Map of instance extension name to version\n')
+ out.append('static const std::unordered_map<std::string, uint32_t> instance_extension_map = {\n')
+ for ext in [x for x in self.vk.extensions.values() if x.instance and x.name not in self.ignore_exts]:
+ if ext.protect:
+ out.append(f'#ifdef {ext.protect}\n')
+ out.append(f' {{"{ext.name}", {ext.specVersion}}},\n')
+ if ext.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+ out.append('// Map of device extension name to version\n')
+ out.append('static const std::unordered_map<std::string, uint32_t> device_extension_map = {\n')
+ for ext in [x for x in self.vk.extensions.values() if x.device and x.name not in self.ignore_exts]:
+ if ext.protect:
+ out.append(f'#ifdef {ext.protect}\n')
+ out.append(f' {{"{ext.name}", {ext.specVersion}}},\n')
+ if ext.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+
+ current_protect = None
+ for name, cmd in self.vk.commands.items():
+ prepend_newline = '\n'
+ if cmd.protect != current_protect:
+ if current_protect is not None:
+ out.append(f'#endif /* {current_protect} */\n')
+ prepend_newline = ''
+ if current_protect is not None and cmd.protect is not None:
+ out.append('\n')
+ if cmd.protect is not None:
+ out.append(f'#ifdef {cmd.protect}\n')
+ current_protect = cmd.protect
+ out.append(f'{prepend_newline}static {cmd.cPrototype.replace(name, name[2:])}\n')
+ if current_protect is not None:
+ out.append('#endif\n')
+
+ # record intercepted procedures
+ out.append('// Map of all APIs to be intercepted by this layer\n')
+ out.append('static const std::unordered_map<std::string, void*> name_to_funcptr_map = {\n')
+ for name, cmd in self.vk.commands.items():
+ if cmd.protect:
+ out.append(f'#ifdef {cmd.protect}\n')
+ out.append(f' {{"{name}", (void*){name[2:]}}},\n')
+ if cmd.protect:
+ out.append('#endif\n')
+ out.append('};\n')
+
+ def generate_function_definitions(self, out):
+ out.append('#include "mock_icd.h"\n')
+ out.append('#include "function_declarations.h"\n')
+ out.append('namespace vkmock {\n')
+
+ manual_functions = [
+ # Include functions here to be intercepted w/ manually implemented function bodies
+ 'vkGetDeviceProcAddr',
+ 'vkGetInstanceProcAddr',
+ 'vkCreateDevice',
+ 'vkDestroyDevice',
+ 'vkCreateInstance',
+ 'vkDestroyInstance',
+ 'vkFreeCommandBuffers',
+ 'vkAllocateCommandBuffers',
+ 'vkDestroyCommandPool',
+ #'vkCreateDebugReportCallbackEXT',
+ #'vkDestroyDebugReportCallbackEXT',
+ 'vkEnumerateInstanceLayerProperties',
+ 'vkEnumerateInstanceVersion',
+ 'vkEnumerateInstanceExtensionProperties',
+ 'vkEnumerateDeviceLayerProperties',
+ 'vkEnumerateDeviceExtensionProperties',
+ 'vkGetPipelineKeyKHR',
+ 'vkGetPipelineBinaryDataKHR',
+ ]
+
+ current_protect = None
+ for name, cmd in self.vk.commands.items():
+ if cmd.protect != current_protect:
+ if current_protect is not None:
+ out.append(f'#endif /* {current_protect} */\n')
+ if current_protect is not None and cmd.protect is not None:
+ out.append('\n')
+ if cmd.protect is not None:
+ out.append(f'#ifdef {cmd.protect}\n')
+ current_protect = cmd.protect
+
+ if name in manual_functions:
+ if name not in CUSTOM_C_INTERCEPTS:
+ out.append(f'static {cmd.cPrototype.replace(name, name[2:])}\n')
+ out.append('// TODO: Implement custom intercept body\n')
+ else:
+ out.append(f'static {cmd.cPrototype[:-1].replace(name, name[2:])}\n')
+ out.append(f'{{{CUSTOM_C_INTERCEPTS[name]}}}\n')
+ continue
+
+ out.append(f'static {cmd.cPrototype[:-1].replace(name, name[2:])}\n')
+ if name in CUSTOM_C_INTERCEPTS:
+ out.append(f'{{{CUSTOM_C_INTERCEPTS[name]}}}\n')
+ continue
+
+ # if the name w/ KHR postfix is in the CUSTOM_C_INTERCEPTS
+ # Call the KHR custom version instead of generating separate code
+ khr_name = name + "KHR"
+ if khr_name in CUSTOM_C_INTERCEPTS:
+ return_string = ''
+ if cmd.returnType != 'void':
+ return_string = 'return '
+
+ param_names = []
+ for param in cmd.params:
+ param_names.append(param.name)
+ out.append(f'{{\n {return_string}{khr_name[2:]}({", ".join(param_names)});\n}}\n')
+ continue
+ out.append('{\n')
+
+ # GET THE TYPE OF FUNCTION
+ if any(name.startswith(ftxt) for ftxt in ('vkCreate', 'vkAllocate')):
+ # Get last param
+ last_param = cmd.params[-1]
+ lp_txt = last_param.name
+ lp_len = None
+ if last_param.length is not None:
+ lp_len = last_param.length
+ lp_len = lp_len.replace('::', '->')
+ lp_type = last_param.type
+ handle_type = 'dispatchable'
+ allocator_txt = 'CreateDispObjHandle()'
+ if lp_type not in self.dispatchable_handles:
+ handle_type = 'non-' + handle_type
+ allocator_txt = 'global_unique_handle++'
+ # Need to lock in both cases
+ out.append(' unique_lock_t lock(global_lock);\n')
+ if lp_len is not None:
+ #print("%s last params (%s) has len %s" % (handle_type, lp_txt, lp_len))
+ out.append(f' for (uint32_t i = 0; i < {lp_len}; ++i) {{\n')
+ out.append(f' {lp_txt}[i] = ({lp_type}){allocator_txt};\n')
+ out.append(' }\n')
+ else:
+ #print("Single %s last param is '%s' w/ type '%s'" % (handle_type, lp_txt, lp_type))
+ if 'AllocateMemory' in name:
+ # Store allocation size in case it's mapped
+ out.append(' allocated_memory_size_map[(VkDeviceMemory)global_unique_handle] = pAllocateInfo->allocationSize;\n')
+ out.append(f' *{lp_txt} = ({lp_type}){allocator_txt};\n')
+ elif True in [ftxt in name for ftxt in ['Destroy', 'Free']]:
+ out.append('//Destroy object\n')
+ if 'FreeMemory' in name:
+ # If the memory is mapped, unmap it
+ out.append(' UnmapMemory(device, memory);\n')
+ # Remove from allocation map
+ out.append(' unique_lock_t lock(global_lock);\n')
+ out.append(' allocated_memory_size_map.erase(memory);\n')
+ else:
+ out.append('//Not a CREATE or DESTROY function\n')
+
+ # Return result variable, if any.
+ if cmd.returnType != 'void':
+ if name == 'vkGetEventStatus':
+ out.append(' return VK_EVENT_SET;\n')
+ else:
+ out.append(' return VK_SUCCESS;\n')
+ out.append('}\n')
+ if current_protect is not None:
+ out.append('#endif\n')
+
+ def generate(self):
+ out = []
+ out.append('''/*
+** Copyright (c) 2015-2025 The Khronos Group Inc.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+#pragma once
+''')
+
+ if self.filename == "function_declarations.h":
+ self.generate_function_declarations(out)
+ else:
+ self.generate_function_definitions(out)
+
+ out.append('\n')
+ out.append('} // namespace vkmock\n')
+ out.append('\n')
+ self.write(''.join(out))
diff --git a/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py b/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py
new file mode 100644
index 00000000..baf3e816
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/vulkan_tools_helper_file_generator.py
@@ -0,0 +1,163 @@
+#!/usr/bin/python3 -i
+#
+# Copyright (c) 2015-2021 The Khronos Group Inc.
+# Copyright (c) 2015-2021 Valve Corporation
+# Copyright (c) 2015-2021 LunarG, Inc.
+# Copyright (c) 2015-2021 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Mark Lobodzinski <mark@lunarg.com>
+# Author: Tobin Ehlis <tobine@google.com>
+# Author: John Zulauf <jzulauf@lunarg.com>
+
+from base_generator import BaseGenerator
+
+# HelperFileOutputGenerator - subclass of OutputGenerator. Outputs Vulkan helper files
+class HelperFileOutputGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+
+
+ def generate(self):
+ out = []
+
+ # File Comment
+ out.append('// *** THIS FILE IS GENERATED - DO NOT EDIT ***\n')
+ out.append('// See vulkan_tools_helper_file_generator.py for modifications\n')
+
+ # Copyright Notice
+ out.append('''
+
+/***************************************************************************
+ *
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ * Copyright (c) 2015-2017 Google Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Mark Lobodzinski <mark@lunarg.com>
+ * Author: Courtney Goeltzenleuchter <courtneygo@google.com>
+ * Author: Tobin Ehlis <tobine@google.com>
+ * Author: Chris Forbes <chrisforbes@google.com>
+ * Author: John Zulauf<jzulauf@lunarg.com>
+ *
+ ****************************************************************************/
+''')
+
+ # Generate header
+ out.append('''
+#pragma once
+#include <vulkan/vulkan.h>
+
+// These empty generic templates are specialized for each type with sType
+// members and for each sType -- providing a two way map between structure
+// types and sTypes
+
+template <VkStructureType id> struct LvlSTypeMap {};
+template <typename T> struct LvlTypeMap {};
+
+''')
+
+ # Generate the specializations for each type and stype
+ for struct in self.vk.structs.values():
+ if struct.sType is None:
+ continue
+
+ if struct.protect is not None:
+ out.append(f'#ifdef {struct.protect}\n')
+
+ out.append(f'// Map type {struct.name} to id {struct.sType}\n')
+ out.append(f'template <> struct LvlTypeMap<{struct.name}> {{\n')
+ out.append(f' static const VkStructureType kSType = {struct.sType};\n')
+ out.append('};\n\n')
+
+
+ out.append(f'template <> struct LvlSTypeMap<{struct.sType}> {{\n')
+ out.append(f' typedef {struct.name} Type;\n')
+ out.append('};\n\n')
+
+ if struct.protect is not None:
+ out.append(f'#endif // {struct.protect}\n')
+
+ # Define the utilities (here so any renaming stays consistent), if this grows large, refactor to a fixed .h file
+
+ out.append('''// Header "base class" for pNext chain traversal
+struct LvlGenericHeader {
+ VkStructureType sType;
+ const LvlGenericHeader *pNext;
+};
+struct LvlGenericModHeader {
+ VkStructureType sType;
+ LvlGenericModHeader *pNext;
+};
+
+// Find an entry of the given type in the pNext chain
+template <typename T> const T *lvl_find_in_chain(const void *next) {
+ const LvlGenericHeader *current = reinterpret_cast<const LvlGenericHeader *>(next);
+ const T *found = nullptr;
+ while (current) {
+ if (LvlTypeMap<T>::kSType == current->sType) {
+ found = reinterpret_cast<const T*>(current);
+ current = nullptr;
+ } else {
+ current = current->pNext;
+ }
+ }
+ return found;
+}
+// Find an entry of the given type in the pNext chain
+template <typename T> T *lvl_find_mod_in_chain(void *next) {
+ LvlGenericModHeader *current = reinterpret_cast<LvlGenericModHeader *>(next);
+ T *found = nullptr;
+ while (current) {
+ if (LvlTypeMap<T>::kSType == current->sType) {
+ found = reinterpret_cast<T*>(current);
+ current = nullptr;
+ } else {
+ current = current->pNext;
+ }
+ }
+ return found;
+}
+
+// Init the header of an sType struct with pNext
+template <typename T> T lvl_init_struct(void *p_next) {
+ T out = {};
+ out.sType = LvlTypeMap<T>::kSType;
+ out.pNext = p_next;
+ return out;
+}
+
+// Init the header of an sType struct
+template <typename T> T lvl_init_struct() {
+ T out = {};
+ out.sType = LvlTypeMap<T>::kSType;
+ return out;
+}
+''')
+
+ self.write(''.join(out))
+
diff --git a/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py b/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py
new file mode 100644
index 00000000..472459ac
--- /dev/null
+++ b/tools/Vulkan-Tools/scripts/generators/vulkaninfo_generator.py
@@ -0,0 +1,1165 @@
+#!/usr/bin/python3
+#
+# Copyright (c) 2019-2026 Valve Corporation
+# Copyright (c) 2019-2026 LunarG, Inc.
+# Copyright (c) 2019-2022 Google Inc.
+# Copyright (c) 2023-2024 RasterGrid Kft.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: Charles Giessen <charles@lunarg.com>
+
+from base_generator import BaseGenerator
+
+from collections import OrderedDict
+
+LICENSE_HEADER = '''
+/*
+ * Copyright (c) 2019-2026 The Khronos Group Inc.
+ * Copyright (c) 2019-2026 Valve Corporation
+ * Copyright (c) 2019-2026 LunarG, Inc.
+ * Copyright (c) 2023-2024 RasterGrid Kft.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Author: Charles Giessen <charles@lunarg.com>
+ *
+ */
+
+/*
+ * This file is generated from the Khronos Vulkan XML API Registry.
+ */
+'''
+
+CUSTOM_FORMATTERS = r'''
+template <typename T>
+std::string to_hex_str(const T i) {
+ std::stringstream stream;
+ stream << "0x" << std::setfill('0') << std::setw(sizeof(T)) << std::hex << i;
+ return stream.str();
+}
+
+template <typename T>
+std::string to_hex_str(Printer &p, const T i) {
+ if (p.Type() == OutputType::json)
+ return std::to_string(i);
+ else if (p.Type() == OutputType::vkconfig_output)
+ return std::string("\"") + to_hex_str(i) + std::string("\"");
+ else
+ return to_hex_str(i);
+}
+
+'''
+
+
+# used in the .cpp code
+STRUCTURES_TO_GEN = ['VkExtent3D', 'VkExtent2D', 'VkPhysicalDeviceLimits', 'VkPhysicalDeviceFeatures', 'VkPhysicalDeviceSparseProperties',
+ 'VkSurfaceCapabilitiesKHR', 'VkSurfaceFormatKHR', 'VkLayerProperties', 'VkPhysicalDeviceToolProperties', 'VkFormatProperties',
+ 'VkSurfacePresentScalingCapabilitiesKHR', 'VkSurfacePresentModeCompatibilityKHR', 'VkPhysicalDeviceHostImageCopyProperties',
+ 'VkVideoProfileInfoKHR', 'VkVideoCapabilitiesKHR', 'VkVideoFormatPropertiesKHR', 'VkCooperativeMatrixPropertiesKHR',
+ 'VkPhysicalDeviceFragmentShadingRateKHR', 'VkMultisamplePropertiesEXT',
+ 'VkDisplayPropertiesKHR', 'VkDisplayPlanePropertiesKHR', 'VkDisplayPlaneCapabilitiesKHR', 'VkDisplayModePropertiesKHR',
+ 'VkDisplayModeParametersKHR']
+
+ENUMS_TO_GEN = ['VkResult', 'VkFormat', 'VkPresentModeKHR',
+ 'VkPhysicalDeviceType', 'VkImageTiling', 'VkTimeDomainKHR']
+FLAGS_TO_GEN = ['VkSurfaceTransformFlagsKHR', 'VkCompositeAlphaFlagsKHR', 'VkSurfaceCounterFlagsEXT', 'VkQueueFlags',
+ 'VkDeviceGroupPresentModeFlagsKHR', 'VkFormatFeatureFlags', 'VkFormatFeatureFlags2', 'VkMemoryPropertyFlags', 'VkMemoryHeapFlags']
+FLAG_STRINGS_TO_GEN = ['VkQueueFlags']
+
+STRUCT_SHORT_VERSIONS_TO_GEN = ['VkExtent3D', 'VkExtent2D']
+
+STRUCT_COMPARISONS_TO_GEN = ['VkSurfaceFormatKHR', 'VkSurfaceFormat2KHR', 'VkSurfaceCapabilitiesKHR',
+ 'VkSurfaceCapabilities2KHR', 'VkSurfaceCapabilities2EXT']
+# don't generate these structures
+STRUCT_BLACKLIST = ['VkVideoProfileListInfoKHR', 'VkDrmFormatModifierPropertiesListEXT', 'VkDrmFormatModifierPropertiesEXT', 'VkDrmFormatModifierPropertiesList2EXT']
+# These structures are only used in version 1.1, otherwise they are included in the promoted structs
+STRUCT_1_1_LIST = ['VkPhysicalDeviceProtectedMemoryFeatures', 'VkPhysicalDeviceShaderDrawParametersFeatures', 'VkPhysicalDeviceSubgroupProperties', 'VkPhysicalDeviceProtectedMemoryProperties']
+
+# generate these structures such that they only print when not in json mode (as json wants them separate)
+PORTABILITY_STRUCTS = ['VkPhysicalDevicePortabilitySubsetFeaturesKHR', 'VkPhysicalDevicePortabilitySubsetPropertiesKHR']
+
+# iostream or custom outputter handles these types
+PREDEFINED_TYPES = ['char', 'VkBool32', 'uint32_t', 'uint8_t', 'int32_t',
+ 'float', 'uint64_t', 'size_t', 'VkDeviceSize', 'int64_t']
+
+NAMES_TO_IGNORE = ['sType', 'pNext', 'displayMode', 'display', 'currentDisplay']
+
+EXTENSION_TYPE_INSTANCE = 'instance'
+EXTENSION_TYPE_DEVICE = 'device'
+EXTENSION_TYPE_BOTH = 'both'
+
+# Types that need pNext Chains built. 'extends' is the xml tag used in the structextends member. 'type' can be device, instance, or both
+EXTENSION_CATEGORIES = OrderedDict((
+ ('phys_device_props2',
+ {'extends': 'VkPhysicalDeviceProperties2',
+ 'type': EXTENSION_TYPE_BOTH,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': True,
+ 'ignore_vendor_exclusion': False}),
+ ('phys_device_mem_props2',
+ {'extends': 'VkPhysicalDeviceMemoryProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': False,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('phys_device_features2',
+ {'extends': 'VkPhysicalDeviceFeatures2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': True,
+ 'ignore_vendor_exclusion': False}),
+ ('surface_capabilities2',
+ {'extends': 'VkSurfaceCapabilities2KHR',
+ 'type': EXTENSION_TYPE_BOTH,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False,
+ 'exclude': ['VkSurfacePresentScalingCapabilitiesKHR', 'VkSurfacePresentModeCompatibilityKHR']}),
+ ('format_properties2',
+ {'extends': 'VkFormatProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('queue_properties2',
+ {'extends': 'VkQueueFamilyProperties2',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': False}),
+ ('video_profile_info',
+ {'extends': 'VkVideoProfileInfoKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True}),
+ ('video_capabilities',
+ {'extends': 'VkVideoCapabilitiesKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True,}),
+ ('video_format_properties',
+ {'extends': 'VkVideoFormatPropertiesKHR',
+ 'type': EXTENSION_TYPE_DEVICE,
+ 'print_iterator': True,
+ 'can_show_promoted_structs': False,
+ 'ignore_vendor_exclusion': True})
+ ))
+class VulkanInfoGenerator(BaseGenerator):
+ def __init__(self):
+ BaseGenerator.__init__(self)
+ self.format_ranges = []
+
+ def generate(self):
+ self.findFormatRanges()
+
+ # gather the types that are needed to generate
+ types_to_gen = set()
+ types_to_gen.update(ENUMS_TO_GEN)
+ types_to_gen.update(FLAGS_TO_GEN)
+ types_to_gen.update(STRUCTURES_TO_GEN)
+
+ extension_types = {}
+ for key, ext_info in EXTENSION_CATEGORIES.items():
+ extension_types[key] = []
+
+ for extended_struct in self.vk.structs[ext_info.get('extends')].extendedBy:
+ if ext_info.get('exclude') is not None and extended_struct in ext_info.get('exclude'):
+ continue
+ elif ext_info.get('ignore_vendor_exclusion'):
+ extension_types[key].append(extended_struct)
+ continue
+ vendor_tags = []
+ for extension in self.vk.structs[extended_struct].extensions:
+ vendor_tags.append(extension.split('_')[1])
+ if len(vendor_tags) == 0 or 'KHR' in vendor_tags or 'EXT' in vendor_tags:
+ extension_types[key].append(extended_struct)
+ extension_types[key] = sorted(extension_types[key])
+ types_to_gen.update(extension_types[key])
+
+ # find all the types that need
+ types_to_gen.update(self.findAllTypesToGen(types_to_gen))
+
+ types_to_gen = sorted(types_to_gen)
+
+ comparison_types_to_gen = set()
+ comparison_types_to_gen.update(STRUCT_COMPARISONS_TO_GEN)
+ comparison_types_to_gen.update(self.findAllTypesToGen(comparison_types_to_gen))
+ comparison_types_to_gen = sorted(comparison_types_to_gen)
+
+
+ # print the types gathered
+ out = []
+ out.append(LICENSE_HEADER + '\n')
+ out.append('#include "vulkaninfo.h"\n')
+ out.append('#include "outputprinter.h"\n')
+ out.append(CUSTOM_FORMATTERS)
+
+ out.extend(self.genVideoEnums())
+
+ for enum in (e for e in types_to_gen if e in self.vk.enums):
+ out.extend(self.PrintEnumToString(self.vk.enums[enum]))
+ out.extend(self.PrintEnum(self.vk.enums[enum]))
+
+ # Need to go through all flags to find if they or their associated bitmask needs printing
+ # This is because both bitmask and flag types are generated in PrintBitMask
+ for name in (x for x in sorted(self.vk.flags.keys()) if x in types_to_gen or self.vk.flags[x].bitmaskName in types_to_gen):
+ bitmask = self.vk.bitmasks[self.vk.flags[name].bitmaskName]
+
+ out.extend(self.PrintBitMask(bitmask, bitmask.flagName))
+
+ if bitmask.flagName in FLAG_STRINGS_TO_GEN:
+ out.extend(self.PrintBitMaskToString(bitmask, bitmask.flagName))
+ # make sure dump functions for nested structures are declared before use
+ for s in (x for x in types_to_gen if x in self.vk.structs and x not in STRUCT_BLACKLIST):
+ out.extend(self.PrintStructure(self.vk.structs[s], True))
+ for s in (x for x in types_to_gen if x in self.vk.structs and x not in STRUCT_BLACKLIST):
+ out.extend(self.PrintStructure(self.vk.structs[s], False))
+
+ for key, value in EXTENSION_CATEGORIES.items():
+ out.extend(self.PrintChainStruct(key, extension_types[key], value))
+
+ for s in (x for x in comparison_types_to_gen if x in self.vk.structs):
+ out.extend(self.PrintStructComparisonForwardDecl(self.vk.structs[s]))
+ for s in (x for x in comparison_types_to_gen if x in self.vk.structs):
+ out.extend(self.PrintStructComparison(self.vk.structs[s]))
+ for s in (x for x in types_to_gen if x in self.vk.structs and x in STRUCT_SHORT_VERSIONS_TO_GEN):
+ out.extend(self.PrintStructShort(self.vk.structs[s]))
+
+ out.append('auto format_ranges = std::array{\n')
+ for f in self.format_ranges:
+ out.append(f' FormatRange{{{f.minimum_instance_version}, {self.vk.extensions[f.extensions[0]].nameString if len(f.extensions) > 0 else "nullptr"}, ')
+ out.append(f'static_cast<VkFormat>({f.first_format}), static_cast<VkFormat>({f.last_format})}},\n')
+ out.append('};\n')
+
+ out.extend(self.genVideoProfileUtils())
+
+ self.write(''.join(out))
+
+
+ def genVideoEnums(self):
+ out = []
+ for enum in self.vk.videoStd.enums.values():
+ out.append(f'std::string {enum.name}String({enum.name} value) {{\n')
+ out.append(' switch (value) {\n')
+ for field in enum.fields:
+ # Ignore aliases
+ if field.value is not None:
+ out.append(f' case {field.name}: return "{field.name}";\n')
+ out.append(f' default: return std::string("UNKNOWN_{enum.name}_value") + std::to_string(value);\n')
+ out.append(' }\n}\n')
+ out.append(f'void Dump{enum.name}(Printer &p, std::string name, {enum.name} value) {{\n')
+ out.append(f' p.PrintKeyString(name, {enum.name}String(value));\n}}\n')
+ return out
+
+
+ # Utility to get the extension / version precondition of a list of type names
+ def GetTypesPrecondition(self, typelist, indent):
+ indent = ' ' * indent
+ out = []
+ extEnables = []
+ for typename in typelist:
+ extEnables.extend(self.vk.structs[typename].extensions)
+
+ version = None
+ for typename in typelist:
+ for v in self.vk.versions.values():
+ if typename in v.name:
+ if version is not None and (v.major > version.major or (v.major == version.major and v.minor > version.minor)):
+ version = v
+
+
+ has_version = version is not None
+ has_extNameStr = len(extEnables) > 0
+ if has_version or has_extNameStr:
+ out.append(f'{indent}if (')
+ has_printed_condition = False
+ if has_extNameStr:
+ for ext in extEnables:
+ if has_printed_condition:
+ out.append(f'\n{indent} || ')
+ else:
+ has_printed_condition = True
+ if has_version:
+ out.append('(')
+ if self.vk.extensions[ext].device:
+ out.append(f'gpu.CheckPhysicalDeviceExtensionIncluded({self.vk.extensions[ext].nameString})')
+ else:
+ assert False, 'Should never get here'
+ if has_version:
+ if has_printed_condition:
+ out.append(f'\n{indent} || (gpu.api_version >= {version.nameApi})')
+ else:
+ out.append(f'gpu.api_version >= {version.nameApi}')
+ out.append(') {\n')
+ else:
+ out = f'{indent}{{\n'
+ return out
+
+ # Utility to construct a capability prerequisite condition evaluation expression
+ def GetRequiredCapsCondition(self, structName, memberName, memberRef, value):
+ condition = ''
+ requiredCapStructDef = self.vk.structs[structName]
+ for member in requiredCapStructDef.members:
+ if member.name == memberName:
+ if member.type in self.vk.flags:
+ # Check that the flags contain all the required values
+ def genExpressionFromValue(value):
+ return value if value == "" else f"({memberRef} & {value}) != 0"
+
+ for char in condition:
+ if char in ['(', ')', '+', ',']:
+ condition += genExpressionFromValue(value)
+ value = ""
+ if char == '+':
+ # '+' means AND
+ condition += ' && '
+ elif char == ',':
+ # ',' means OR
+ condition += ' || '
+ else:
+ condition += char
+ else:
+ value += char
+ condition += genExpressionFromValue(value)
+ else:
+ condition = f'{memberRef} == {value}'
+ if condition == '':
+ return 'true'
+ else:
+ return f'({condition})'
+
+ def genVideoProfileUtils(self):
+ out = []
+
+ # Generate video format properties comparator
+ out.append('''
+bool is_video_format_same(const VkVideoFormatPropertiesKHR &format_a, const VkVideoFormatPropertiesKHR &format_b) {
+ auto a = reinterpret_cast<const VkBaseInStructure*>(&format_a);
+ auto b = reinterpret_cast<const VkBaseInStructure*>(&format_b);
+ bool same = true;
+ while (same && a != nullptr && b != nullptr) {
+ if (a->sType != b->sType) {
+ // Structure type mismatch (extension structures are expected to be chained in the same order)
+ same = false;
+ } else {
+ switch (a->sType) {''')
+
+ if 'VkVideoFormatPropertiesKHR' in self.registry.validextensionstructs:
+ for extstruct in ['VkVideoFormatPropertiesKHR'] + self.registry.validextensionstructs['VkVideoFormatPropertiesKHR']:
+ extstructDef = self.vk.structs[extstruct]
+ out.append(f'''
+ case {extstructDef.sType}:
+ same = same && memcmp(reinterpret_cast<const char*>(a) + sizeof(VkBaseInStructure),
+ reinterpret_cast<const char*>(b) + sizeof(VkBaseInStructure),
+ sizeof({extstruct}) - sizeof(VkBaseInStructure)) == 0;
+ break;''')
+
+ out.append('''
+ default:
+ // Unexpected structure type
+ same = false;
+ break;
+ }
+ }
+ a = a->pNext;
+ b = b->pNext;
+ }
+ return same;
+}
+''')
+
+ # Generate video profile info capture utilities
+ out.append('''
+std::vector<std::unique_ptr<AppVideoProfile>> enumerate_supported_video_profiles(AppGpu &gpu) {
+ std::vector<std::unique_ptr<AppVideoProfile>> result{};
+
+ struct ChromaSubsamplingInfo {
+ VkVideoChromaSubsamplingFlagsKHR value;
+ const char* name;
+ };
+ const std::vector<ChromaSubsamplingInfo> chroma_subsampling_list = {
+ {VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR, "4:2:0"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR, "4:2:2"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR, "4:4:4"},
+ {VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR, "monochrome"}
+ };
+
+ struct BitDepthInfo {
+ VkVideoComponentBitDepthFlagsKHR value;
+ const char* name;
+ };
+ const std::vector<BitDepthInfo> bit_depth_list = {
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR, "8"},
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR, "10"},
+ {VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR, "12"}
+ };
+
+ auto find_caps_struct = [](const VkVideoCapabilitiesKHR &capabilities, VkStructureType stype) -> const VkBaseInStructure* {
+ auto p = reinterpret_cast<const VkBaseInStructure*>(&capabilities);
+ while (p != nullptr) {
+ if (p->sType == stype) {
+ return p;
+ }
+ p = p->pNext;
+ }
+ return nullptr;
+ };
+
+ auto base_format = []
+ (const ChromaSubsamplingInfo &chroma_subsampling, const BitDepthInfo &luma_bit_depth, const BitDepthInfo &chroma_bit_depth) {
+ std::string result{};
+ result += " (";
+ result += chroma_subsampling.name;
+ result += " ";
+ result += luma_bit_depth.name;
+ if (luma_bit_depth.value != chroma_bit_depth.value) {
+ result += ":";
+ result += chroma_bit_depth.name;
+ }
+ result += "-bit)";
+ return result;
+ };
+
+ auto add_profile = [&](
+ const std::string &name,
+ const VkVideoProfileInfoKHR &profile_info,
+ AppVideoProfile::CreateProfileInfoChainCb create_profile_info_chain,
+ AppVideoProfile::CreateCapabilitiesChainCb create_capabilities_chain,
+ const AppVideoProfile::CreateFormatPropertiesChainCbList &create_format_properties_chain_list,
+ AppVideoProfile::InitProfileCb init_profile) {
+ auto profile = std::make_unique<AppVideoProfile>(gpu, gpu.phys_device,
+ name, profile_info,
+ create_profile_info_chain,
+ create_capabilities_chain,
+ create_format_properties_chain_list,
+ init_profile);
+ if (profile->supported) {
+ result.push_back(std::move(profile));
+ }
+ };
+''')
+
+ # Generate individual video profiles from the video codec metadata
+ for videoCodec in self.vk.videoCodecs.values():
+ # Ignore video codec categories
+ if videoCodec.value is None:
+ continue
+
+ out.append('\n')
+ out.extend(self.GetTypesPrecondition(videoCodec.profiles.keys(), 4))
+ out.append(f'{" " * 8}const std::string codec_name = "{videoCodec.name}";\n')
+
+ out.append('''
+ for (auto chroma_subsampling : chroma_subsampling_list) {
+ for (auto luma_bit_depth : bit_depth_list) {
+ for (auto chroma_bit_depth : bit_depth_list) {
+ if (chroma_subsampling.value == VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR && luma_bit_depth.value != chroma_bit_depth.value) {
+ // Ignore the chroma bit depth dimension for monochrome
+ continue;
+ }
+
+ std::string profile_base_name = codec_name + base_format(chroma_subsampling, luma_bit_depth, chroma_bit_depth);
+''')
+
+ # Setup video profile info
+ out.append(f'{" " * 20}VkVideoProfileInfoKHR profile_info{{\n')
+ out.append(f'{" " * 20} VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR,\n')
+ out.append(f'{" " * 20} nullptr,\n')
+ out.append(f'{" " * 20} {videoCodec.value},\n')
+ out.append(f'{" " * 20} chroma_subsampling.value,\n')
+ out.append(f'{" " * 20} luma_bit_depth.value,\n')
+ out.append(f'{" " * 20} chroma_bit_depth.value\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video profile info chain creation callback
+ out.append(f'{" " * 20}auto create_profile_info_chain = [&](const void **ppnext) -> std::unique_ptr<video_profile_info_chain> {{\n')
+ out.append(f'{" " * 20} auto profile_info_chain = std::make_unique<video_profile_info_chain>();\n')
+ for profileStruct in videoCodec.profiles:
+ structDef = self.vk.structs[profileStruct]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 24}if (profile_info_chain != nullptr) {{\n')
+ out.append(f'{" " * 28}profile_info_chain->{profileStruct[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 28}profile_info_chain->{profileStruct[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 28}*ppnext = &profile_info_chain->{profileStruct[2:]};\n')
+ out.append(f'{" " * 28}ppnext = &profile_info_chain->{profileStruct[2:]}.pNext;\n')
+ out.append(f'{" " * 24}}}\n')
+ if structDef.protect:
+ out.append(f'#else\n{" " * 20}profile_info_chain = nullptr;\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20} return profile_info_chain;\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video capabilities chain creation callback
+ out.append(f'{" " * 20}auto create_capabilities_chain = [&](void **ppnext) -> std::unique_ptr<video_capabilities_chain> {{\n')
+ out.append(f'{" " * 20} auto capabilities_chain = std::make_unique<video_capabilities_chain>();\n')
+ for capabilities in videoCodec.capabilities:
+ structDef = self.vk.structs[capabilities]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 24}if (capabilities_chain != nullptr) {{\n')
+ out.extend(self.GetTypesPrecondition([capabilities], 28))
+ out.append(f'{" " * 32}capabilities_chain->{capabilities[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 32}capabilities_chain->{capabilities[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 32}*ppnext = &capabilities_chain->{capabilities[2:]};\n')
+ out.append(f'{" " * 32}ppnext = &capabilities_chain->{capabilities[2:]}.pNext;\n')
+ out.append(f'{" " * 28}}}\n')
+ out.append(f'{" " * 24}}}\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20} return capabilities_chain;\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Setup video format properties chain creation callbacks
+ out.append(f'{" " * 20}const AppVideoProfile::CreateFormatPropertiesChainCbList create_format_properties_chain_list = {{\n')
+ for format in videoCodec.formats.values():
+ out.append(f'{" " * 24}AppVideoProfile::CreateFormatPropertiesChainCb {{\n')
+ out.append(f'{" " * 28}"{format.name}",\n')
+ out.append(f'{" " * 28}{format.usage.replace("+", " | ")},\n')
+
+ # Callback to check required capabilities
+ out.append(f'{" " * 28}[&](const VkVideoCapabilitiesKHR &capabilities) -> bool {{\n')
+ out.append(f'{" " * 28} bool supported = true;\n')
+ for requiredCap in format.requiredCaps:
+ structDef = self.vk.structs[requiredCap.struct]
+ out.append(self.AddGuardHeader(structDef))
+ out.extend(self.GetTypesPrecondition([requiredCap.struct], 32))
+ out.append(f'{" " * 32} auto caps = reinterpret_cast<const {requiredCap.struct}*>(find_caps_struct(capabilities, {structDef.sType}));\n')
+ out.append(f'{" " * 32} if (caps != nullptr) {{\n')
+ out.append(f'{" " * 32} supported = supported && {self.GetRequiredCapsCondition(requiredCap.struct, requiredCap.member, f"caps->{requiredCap.member}", requiredCap.value)};\n')
+ out.append(f'{" " * 32} }} else {{\n')
+ out.append(f'{" " * 32} supported = false;\n')
+ out.append(f'{" " * 32} }}\n')
+ out.append(f'{" " * 32}}} else {{\n')
+ out.append(f'{" " * 32} supported = false;\n')
+ out.append(f'{" " * 32}}}\n')
+ if structDef.protect:
+ out.append(f'#else\n{" " * 32}supported = false;\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 28} return supported;\n')
+ out.append(f'{" " * 28}}},\n')
+
+ # Callback to create video format properties chain
+ out.append(f'{" " * 28}[&](void **ppnext) -> std::unique_ptr<video_format_properties_chain> {{\n')
+ out.append(f'{" " * 28} auto format_properties_chain = std::make_unique<video_format_properties_chain>();\n')
+ for formatProps in format.properties:
+ structDef = self.vk.structs[formatProps]
+ out.append(self.AddGuardHeader(structDef))
+ out.append(f'{" " * 32}if (format_properties_chain != nullptr) {{\n')
+ out.extend(self.GetTypesPrecondition([formatProps], 36))
+ out.append(f'{" " * 40}format_properties_chain->{formatProps[2:]}.sType = {structDef.sType};\n')
+ out.append(f'{" " * 40}format_properties_chain->{formatProps[2:]}.pNext = nullptr;\n')
+ out.append(f'{" " * 40}*ppnext = &format_properties_chain->{formatProps[2:]};\n')
+ out.append(f'{" " * 40}ppnext = &format_properties_chain->{formatProps[2:]}.pNext;\n')
+ out.append(f'{" " * 36}}}\n')
+ out.append(f'{" " * 32}}}\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 28} return format_properties_chain;\n')
+ out.append(f'{" " * 28}}},\n')
+
+ out.append(f'{" " * 24}}},\n')
+ out.append(f'{" " * 20}}};\n\n')
+
+ # Permute profiles for each profile struct member value
+ profiles = {'': []}
+ for profileStruct in videoCodec.profiles.values():
+ for profileStructMember in profileStruct.members.values():
+ newProfiles = {}
+ for profileStructMemberValue, profileStructMemberName in profileStructMember.values.items():
+ for profileName, profile in profiles.items():
+ # Only add video profile name suffix to the full descriptive name if not empty to avoid excess whitespace
+ newProfileName = profileName if profileStructMemberName == '' else f'{profileName} {profileStructMemberName}'
+ newProfiles[newProfileName] = profile + [{
+ "struct": profileStruct.name,
+ "member": profileStructMember.name,
+ "value": profileStructMemberValue
+ }]
+ profiles = newProfiles
+
+ for profileName, profile in profiles.items():
+ out.append(f'{" " * 20}add_profile(profile_base_name + "{profileName}", profile_info,\n')
+ out.append(f'{" " * 20} create_profile_info_chain, create_capabilities_chain,\n')
+ out.append(f'{" " * 20} create_format_properties_chain_list,\n')
+ out.append(f'{" " * 20} [](AppVideoProfile& profile) {{\n')
+ for profileStruct in videoCodec.profiles:
+ structDef = self.vk.structs[profileStruct]
+ out.append(self.AddGuardHeader(structDef))
+ for elem in profile:
+ if elem['struct'] == profileStruct:
+ out.append(f'{" " * 24}profile.profile_info_chain->{elem["struct"][2:]}.{elem["member"]} = {elem["value"]};\n')
+ out.append(self.AddGuardFooter(structDef))
+ out.append(f'{" " * 20}}});\n')
+
+ out.append(f'{" " * 16}}}\n')
+ out.append(f'{" " * 12}}}\n')
+ out.append(f'{" " * 8}}}\n')
+ out.append(f'{" " * 4}}}\n')
+
+ out.append(' return result;\n')
+ out.append('}\n\n')
+
+ return out
+
+
+ # finds all the ranges of formats from core (1.0), core versions (1.1+), and extensions
+ def findFormatRanges(self):
+ min_val = 2**32
+ prev_field = None
+ max_val = 0
+ for f in self.vk.enums['VkFormat'].fields:
+ if f.value is None:
+ continue
+ if prev_field is not None and f.value != prev_field.value + 1:
+ for ext in prev_field.extensions:
+ if self.vk.extensions[ext].promotedTo is not None:
+ self.format_ranges.append(VulkanFormatRange(self.vk.extensions[ext].promotedTo.replace("VK_", "VK_API_"), [], min_val, max_val))
+ break
+ # only bother with the first extension
+ self.format_ranges.append(VulkanFormatRange(0, prev_field.extensions, min_val, max_val))
+ min_val = 2**32
+ max_val = 0
+ min_val = min(min_val, f.value)
+ max_val = max(max_val, f.value)
+
+ prev_field = f
+
+ for ext in prev_field.extensions:
+ if self.vk.extensions[ext].promotedTo is not None:
+ self.format_ranges.append(VulkanFormatRange(self.vk.extensions[ext].promotedTo.replace("VK_", "VK_API_"), [], min_val, max_val))
+ break
+
+ self.format_ranges.append(VulkanFormatRange(0, prev_field.extensions, min_val, max_val))
+
+ def findAllTypesToGen(self, initial_type_set):
+ out_set = set()
+ current_set = initial_type_set
+ while len(current_set) > 0:
+ out_set.update(current_set)
+ next_set = set()
+
+ for current_item in current_set:
+ if current_item in self.vk.structs:
+ for member in self.vk.structs[current_item].members:
+ if member.type not in out_set and member.name not in NAMES_TO_IGNORE:
+ next_set.add(member.type)
+
+ current_set = next_set
+ return out_set
+
+ def AddGuardHeader(self,obj):
+ if obj is not None and obj.protect is not None:
+ return f'#ifdef {obj.protect}\n'
+ else:
+ return ''
+
+
+ def AddGuardFooter(self,obj):
+ if obj is not None and obj.protect is not None:
+ return f'#endif // {obj.protect}\n'
+ else:
+ return ''
+
+ def PrintEnumToString(self,enum):
+ out = []
+ out.append(self.AddGuardHeader(enum))
+ out.append(f'std::string {enum.name}String({enum.name} value) {{\n')
+ out.append(' switch (value) {\n')
+ for v in enum.fields:
+ out.append(f' case ({v.name}): return "{v.name[3:]}";\n')
+ out.append(f' default: return std::string("UNKNOWN_{enum.name}_value") + std::to_string(value);\n')
+ out.append(' }\n}\n')
+ out.append(self.AddGuardFooter(enum))
+ return out
+
+
+ def PrintEnum(self,enum):
+ out = []
+ out.append(self.AddGuardHeader(enum))
+ out.append(f'''void Dump{enum.name}(Printer &p, std::string name, {enum.name} value) {{
+ if (p.Type() == OutputType::json)
+ p.PrintKeyString(name, std::string("VK_") + {enum.name}String(value));
+ else
+ p.PrintKeyString(name, {enum.name}String(value));
+}}
+''')
+ out.append(self.AddGuardFooter(enum))
+ return out
+
+
+ def PrintGetFlagStrings(self,name, bitmask):
+ out = []
+ out.append(f'std::vector<const char *> {name}GetStrings({name} value) {{\n')
+ out.append(' std::vector<const char *> strings;\n')
+ # If a bitmask contains a field whose value is zero, we want to support printing the correct bitflag
+ # Otherwise, use "None" for when there are not bits set in the bitmask
+ if bitmask.flags[0].value != 0:
+ out.append(' if (value == 0) { strings.push_back("None"); return strings; }\n')
+ else:
+ out.append(f' if (value == 0) {{ strings.push_back("{bitmask.flags[0].name[3:]}"); return strings; }}\n')
+ for v in bitmask.flags:
+ # only check single-bit flags
+ if v.value != 0 and (v.value & (v.value - 1)) == 0:
+ out.append(f' if ({v.name} & value) strings.push_back("{v.name[3:]}");\n')
+ out.append(' return strings;\n}\n')
+ return out
+
+
+ def PrintFlags(self, bitmask, name):
+ out = []
+ out.append(f'void Dump{name}(Printer &p, std::string name, {name} value) {{\n')
+ out.append(f''' if (static_cast<{bitmask.name}>(value) == 0) {{
+ ArrayWrapper arr(p, name, 0);
+ if (p.Type() != OutputType::json && p.Type() != OutputType::vkconfig_output)
+ p.SetAsType().PrintString("None");
+ return;
+ }}
+ auto strings = {bitmask.name}GetStrings(static_cast<{bitmask.name}>(value));
+ ArrayWrapper arr(p, name, strings.size());
+ for(auto& str : strings){{
+ if (p.Type() == OutputType::json)
+ p.SetAsType().PrintString(std::string("VK_") + str);
+ else
+ p.SetAsType().PrintString(str);
+ }}
+}}
+''')
+ return out
+
+
+ def PrintFlagBits(self, bitmask):
+ return [f'''void Dump{bitmask.name}(Printer &p, std::string name, {bitmask.name} value) {{
+ auto strings = {bitmask.name}GetStrings(value);
+ if (strings.size() > 0) {{
+ if (p.Type() == OutputType::json)
+ p.PrintKeyString(name, std::string("VK_") + strings.at(0));
+ else
+ p.PrintKeyString(name, strings.at(0));
+ }}
+}}
+''']
+
+
+ def PrintBitMask(self,bitmask, name):
+ out = []
+ out.extend(self.PrintGetFlagStrings(bitmask.name, bitmask))
+ out.append(self.AddGuardHeader(bitmask))
+ out.extend(self.PrintFlags(bitmask, name))
+ out.extend(self.PrintFlagBits(bitmask))
+ out.append(self.AddGuardFooter(bitmask))
+ out.append('\n')
+ return out
+
+
+ def PrintBitMaskToString(self, bitmask, name):
+ out = []
+ out.append(self.AddGuardHeader(bitmask))
+ out.append(f'std::string {name}String({name} value) {{\n')
+ out.append(' std::string out;\n')
+ out.append(' bool is_first = true;\n')
+ for v in bitmask.flags:
+ out.append(f' if ({v.name} & value) {{\n')
+ out.append(' if (is_first) { is_first = false; } else { out += " | "; }\n')
+ out.append(f' out += "{str(v.name)[3:]}";\n')
+ out.append(' }\n')
+ out.append(' return out;\n')
+ out.append('}\n')
+ out.append(self.AddGuardFooter(bitmask))
+ return out
+
+
+ def PrintStructure(self,struct, declare_only):
+ if len(struct.members) == 0:
+ return []
+ out = []
+ out.append(self.AddGuardHeader(struct))
+ max_key_len = 0
+ for v in struct.members:
+ if (v.type in PREDEFINED_TYPES or v.type in STRUCT_BLACKLIST) and (v.length is None or v.type in ['char'] or v.fixedSizeArray[0] in ['VK_UUID_SIZE', 'VK_LUID_SIZE']):
+ max_key_len = max(max_key_len, len(v.name))
+ out.append(f'void Dump{struct.name}(Printer &p, std::string name, const {struct.name} &obj)')
+ if declare_only:
+ out.append(';\n')
+ out.append(self.AddGuardFooter(struct))
+ return out
+ out.append(' {\n')
+ if struct.name == 'VkPhysicalDeviceLimits':
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.ObjectStart("limits");\n')
+ out.append(' else\n')
+ out.append(' p.SetSubHeader().ObjectStart(name);\n')
+ elif struct.name == 'VkPhysicalDeviceSparseProperties':
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.ObjectStart("sparseProperties");\n')
+ out.append(' else\n')
+ out.append(' p.SetSubHeader().ObjectStart(name);\n')
+ else:
+ out.append(' ObjectWrapper object{p, name};\n')
+ if max_key_len > 0:
+ out.append(f' p.SetMinKeyWidth({max_key_len});\n')
+ for v in struct.members:
+ # strings
+ if v.type == 'char':
+ if v.pointer == True:
+ out.append(f' if (obj.{v.name} == nullptr) {{')
+ out.append(f' p.PrintKeyString("{v.name}", "NULL");\n')
+ out.append(' } else {')
+ out.append(f' p.PrintKeyString("{v.name}", obj.{v.name});\n')
+ if v.pointer == True:
+ out.append(' }')
+ # arrays
+ elif v.length is not None:
+ # uuid's
+ if v.type == 'uint8_t' and (v.fixedSizeArray[0] == 'VK_LUID_SIZE' or v.fixedSizeArray[0] == 'VK_UUID_SIZE'): # VK_UUID_SIZE
+ if v.fixedSizeArray[0] == 'VK_LUID_SIZE':
+ out.append(' if (obj.deviceLUIDValid) { // special case\n')
+ out.append(f' p.PrintKeyValue("{v.name}", obj.{v.name});\n')
+ if v.fixedSizeArray[0] == 'VK_LUID_SIZE':
+ out.append(' }\n')
+ elif struct.name == 'VkQueueFamilyGlobalPriorityProperties' and v.name == 'priorities':
+ out.append(f' ArrayWrapper arr(p,"{v.name}", obj.priorityCount);\n')
+ out.append(' for (uint32_t i = 0; i < obj.priorityCount; i++) {\n')
+ out.append(' if (p.Type() == OutputType::json)\n')
+ out.append(' p.PrintString(std::string("VK_") + VkQueueGlobalPriorityString(obj.priorities[i]));\n')
+ out.append(' else\n')
+ out.append(' p.PrintString(VkQueueGlobalPriorityString(obj.priorities[i]));\n')
+ out.append(' }\n')
+ elif len(v.fixedSizeArray) == 2:
+ out.append(f' {{\n ArrayWrapper arr(p,"{v.name}", ' + v.fixedSizeArray[0] + ');\n')
+ out.append(f' for (uint32_t i = 0; i < {v.fixedSizeArray[0]}; i++) {{\n')
+ out.append(f' for (uint32_t j = 0; j < {v.fixedSizeArray[1]}; j++) {{\n')
+ out.append(f' p.PrintElement(obj.{v.name}[i][j]); }} }}\n')
+ out.append(' }\n')
+ elif len(v.fixedSizeArray) == 1:
+ out.append(f' {{\n ArrayWrapper arr(p,"{v.name}", ' + v.fixedSizeArray[0] + ');\n')
+ out.append(f' for (uint32_t i = 0; i < {v.fixedSizeArray[0]}; i++) {{ p.PrintElement(obj.{v.name}[i]); }}\n')
+ out.append(' }\n')
+ else: # dynamic array length based on other member
+ out.append(f' if (obj.{v.length} == 0 || obj.{v.name} == nullptr) {{\n')
+ out.append(f' p.PrintKeyString("{v.name}", "NULL");\n')
+ out.append(' } else {\n')
+ out.append(f' ArrayWrapper arr(p,"{v.name}", obj.{v.length});\n')
+ out.append(f' for (uint32_t i = 0; i < obj.{v.length}; i++) {{\n')
+ out.append(f' Dump{v.type}(p, std::to_string(i), obj.{v.name}[i]);\n')
+ out.append(' }\n')
+ out.append(' }\n')
+ elif v.type == 'VkBool32':
+ out.append(f' p.PrintKeyBool("{v.name}", static_cast<bool>(obj.{v.name}));\n')
+ elif v.type == 'uint8_t':
+ out.append(f' p.PrintKeyValue("{v.name}", static_cast<uint32_t>(obj.{v.name}));\n')
+ elif v.type == 'VkDeviceSize' or (v.type == 'uint32_t' and v.name in ['vendorID', 'deviceID']):
+ out.append(f' p.PrintKeyValue("{v.name}", to_hex_str(p, obj.{v.name}));\n')
+ elif v.type in PREDEFINED_TYPES:
+ out.append(f' p.PrintKeyValue("{v.name}", obj.{v.name});\n')
+ elif v.name not in NAMES_TO_IGNORE:
+ # if it is an enum/flag/bitmask
+ if v.type in ['VkFormatFeatureFlags', 'VkFormatFeatureFlags2']:
+ out.append(' p.SetOpenDetails();\n') # special case so that feature flags are open in html output
+ out.append(f' Dump{v.type}(p, "{v.name}", obj.{v.name});\n')
+
+ if struct.name in ['VkPhysicalDeviceLimits', 'VkPhysicalDeviceSparseProperties']:
+ out.append(' p.ObjectEnd();\n')
+ out.append('}\n')
+
+ out.append(self.AddGuardFooter(struct))
+ return out
+
+
+ def PrintStructShort(self,struct):
+ out = []
+ out.append(self.AddGuardHeader(struct))
+ out.append(f'std::ostream &operator<<(std::ostream &o, {struct.name} &obj) {{\n')
+ out.append(' return o << "(" << ')
+
+ first = True
+ for v in struct.members:
+ if first:
+ first = False
+ out.append(f'obj.{v.name} << ')
+ else:
+ out.append(f'\',\' << obj.{v.name} << ')
+ out.append('")";\n')
+ out.append('}\n')
+ out.append(self.AddGuardFooter(struct))
+ return out
+
+ def PrintChainStruct(self, listName, structs_to_print, chain_details):
+ version_desc = ''
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ version_desc = 'gpu.api_version'
+ else:
+ version_desc = 'inst.instance_version'
+
+ out = []
+
+ # use default constructor and delete copy & move operators
+ out.append(f'''struct {listName}_chain {{
+ {listName}_chain() = default;
+ {listName}_chain(const {listName}_chain &) = delete;
+ {listName}_chain& operator=(const {listName}_chain &) = delete;
+ {listName}_chain({listName}_chain &&) = delete;
+ {listName}_chain& operator=({listName}_chain &&) = delete;
+''')
+
+ out.append(' void* start_of_chain = nullptr;\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ out.append(self.AddGuardHeader(struct))
+ if struct.sType is not None:
+ out.append(f' {struct.name} {struct.name[2:]}{{}};\n')
+ # Specific versions of drivers have an incorrect definition of the size of these structs.
+ # We need to artificially pad the structure it just so the driver doesn't write out of bounds and
+ # into other structures that are adjacent. This bug comes from the in-development version of
+ # the extension having a larger size than the final version, so older drivers try to write to
+ # members which don't exist.
+ if struct.name in ['VkPhysicalDeviceShaderIntegerDotProductFeatures', 'VkPhysicalDeviceHostImageCopyFeaturesEXT']:
+ out.append(f' char {struct.name}_padding[64];\n')
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ out.append(f' std::vector<{member.type}> {struct.name}_{member.name};\n')
+ out.append(self.AddGuardFooter(struct))
+ out.append(' void initialize_chain(')
+ args = []
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ args.append('AppInstance &inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ args.append('AppGpu &gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ args.append('bool show_promoted_structs')
+ out.append(f'{", ".join(args)}) noexcept {{\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+
+ out.append(self.AddGuardHeader(struct))
+ out.append(f' {struct.name[2:]}.sType = {struct.sType};\n')
+ out.append(self.AddGuardFooter(struct))
+
+ out.append(' std::vector<VkBaseOutStructure*> chain_members{};\n')
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ out.append(self.AddGuardHeader(struct))
+
+ has_version = struct.version is not None
+ has_extNameStr = len(struct.extensions) > 0 or len(struct.aliases) > 0
+ if has_version or has_extNameStr:
+ out.append(' if (')
+ has_printed_condition = False
+ if has_extNameStr:
+ for ext in struct.extensions:
+ if has_printed_condition:
+ out.append('\n || ')
+ else:
+ has_printed_condition = True
+ if has_version:
+ out.append('(')
+ if self.vk.extensions[ext].device:
+ out.append(f'gpu.CheckPhysicalDeviceExtensionIncluded({self.vk.extensions[ext].nameString})')
+ elif self.vk.extensions[ext].instance:
+ out.append(f'inst.CheckExtensionEnabled({self.vk.extensions[ext].nameString})')
+ else:
+ assert False, 'Should never get here'
+ if has_version:
+ str_show_promoted_structs = '|| show_promoted_structs' if chain_details.get('can_show_promoted_structs') else ''
+ if struct.name in STRUCT_1_1_LIST:
+ out.append(f'{version_desc} == {struct.version.nameApi} {str_show_promoted_structs}')
+ elif has_printed_condition:
+ out.append(f')\n && ({version_desc} < {struct.version.nameApi} {str_show_promoted_structs})')
+ else:
+ out.append(f'({version_desc} >= {struct.version.nameApi})')
+ out.append(')\n ')
+ else:
+ out.append(' ')
+ out.append(f'chain_members.push_back(reinterpret_cast<VkBaseOutStructure*>(&{struct.name[2:]}));\n')
+ out.append(self.AddGuardFooter(struct))
+ chain_param_list = []
+ chain_arg_list = []
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ chain_param_list.append('AppInstance &inst')
+ chain_arg_list.append('inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ chain_param_list.append('AppGpu &gpu')
+ chain_arg_list.append('gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ chain_param_list.append('bool show_promoted_structs')
+ chain_arg_list.append('show_promoted_structs')
+
+ out.append(f'''
+ if (!chain_members.empty()) {{
+ for(size_t i = 0; i < chain_members.size() - 1; i++){{
+ chain_members[i]->pNext = chain_members[i + 1];
+ }}
+ start_of_chain = chain_members[0];
+ }}
+ }}
+}};
+void setup_{listName}_chain({chain_details['extends']}& start, std::unique_ptr<{listName}_chain>& chain, {','.join(chain_param_list)}){{
+ chain = std::unique_ptr<{listName}_chain>(new {listName}_chain());
+ chain->initialize_chain({','.join(chain_arg_list)});
+ start.pNext = chain->start_of_chain;
+}};
+''')
+ if chain_details.get('print_iterator'):
+ out.append('\n')
+ out.append(f'void chain_iterator_{listName}(')
+ args = ['Printer &p']
+ if chain_details.get('type') in [EXTENSION_TYPE_INSTANCE, EXTENSION_TYPE_BOTH]:
+ args.append('AppInstance &inst')
+ if chain_details.get('type') in [EXTENSION_TYPE_DEVICE, EXTENSION_TYPE_BOTH]:
+ args.append('AppGpu &gpu')
+ if chain_details.get('can_show_promoted_structs'):
+ args.append('bool show_promoted_structs')
+ args.append('const void * place')
+ out.append(f'{", ".join(args)}) {{\n')
+ out.append(' while (place) {\n')
+ out.append(' const VkBaseOutStructure *structure = (const VkBaseOutStructure *)place;\n')
+ out.append(' p.SetSubHeader();\n')
+
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+
+ out.append(self.AddGuardHeader(struct))
+ out.append(f' if (structure->sType == {struct.sType}')
+ if struct.name in PORTABILITY_STRUCTS:
+ out.append(' && p.Type() != OutputType::json')
+ out.append(') {\n')
+ out.append(f' const {struct.name}* props = (const {struct.name}*)structure;\n')
+ out.extend(self.PrintStructNameDecisionLogic(struct, version_desc, chain_details.get('can_show_promoted_structs')))
+ out.append(' p.AddNewline();\n')
+ out.append(' }\n')
+ out.append(self.AddGuardFooter(struct))
+ out.append(' place = structure->pNext;\n')
+ out.append(' }\n')
+ out.append('}\n')
+
+ out.append('\n')
+ out.append(f'bool prepare_{listName}_twocall_chain_vectors(std::unique_ptr<{listName}_chain>& chain) {{\n')
+ out.append(' (void)chain;\n')
+ is_twocall = False
+ for s in structs_to_print:
+ if s in STRUCT_BLACKLIST:
+ continue
+ struct = self.vk.structs[s]
+ has_length = False
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ has_length = True
+ if not has_length:
+ continue
+ out.append(self.AddGuardHeader(struct))
+ for member in struct.members:
+ if member.length is not None and len(member.fixedSizeArray) == 0:
+ out.append(f' chain->{struct.name}_{member.name}.resize(chain->{struct.name[2:]}.{member.length});\n')
+ out.append(f' chain->{struct.name[2:]}.{member.name} = chain->{struct.name}_{member.name}.data();\n')
+ out.append(self.AddGuardFooter(struct))
+ is_twocall = True
+ out.append(f' return {"true" if is_twocall else "false"};\n')
+ out.append('}\n')
+
+ return out
+
+ def GetStructCheckStringForMatchingExtension(self, struct, structName):
+ for ext_name in struct.extensions:
+ ext = self.vk.extensions[ext_name]
+ vendor = ext.name.split('_')[1]
+ if structName.endswith(vendor):
+ if ext.device:
+ return f'gpu.CheckPhysicalDeviceExtensionIncluded({ext.nameString})'
+ elif ext.instance:
+ return f'inst.CheckExtensionEnabled({ext.nameString})'
+ return None
+
+ # Function is complex because it has to do the following:
+ # Always print the struct with the most appropriate name given the gpu api version & enabled instance/device extensions
+ # Print struct aliases when --show-promoted-structs is set
+ # Not let alias printing duplicate the most appropriate name
+ def PrintStructNameDecisionLogic(self, struct, version_desc, can_show_promoted_structs):
+ out = []
+ out.append(f'{" " * 12}const char* name = ')
+ # Get a list of all the conditions to check and the type name to use
+ check_list = []
+ if struct.version is not None:
+ check_list.append([f'{version_desc} >= {struct.version.nameApi}', struct.name])
+ else:
+ check_list.append([f'{self.GetStructCheckStringForMatchingExtension(struct, struct.name)}', struct.name])
+
+ for alias in struct.aliases:
+ ext_str = self.GetStructCheckStringForMatchingExtension(struct, alias)
+ if ext_str is not None:
+ check_list.append([f'{self.GetStructCheckStringForMatchingExtension(struct, alias)}', alias])
+ end_parens = ''
+ # Turn the conditions into a nested ternary condition -
+ for check in check_list:
+ if check == check_list[-1]:
+ out.append( f'"{check[1]}"')
+ else:
+ out.append( f'{check[0]} ? "{check[1]}" : (')
+ end_parens += ')'
+ out.append(f'{end_parens};\n')
+ out.append(f'{" " * 12}Dump{struct.name}(p, name, *props);\n')
+ if not can_show_promoted_structs:
+ return out
+ for alias in struct.aliases:
+ ext_str = self.GetStructCheckStringForMatchingExtension(struct, alias)
+ if ext_str is not None:
+ out.append(f'{" " * 12}if (show_promoted_structs && strcmp(name, "{alias}") != 0 && {ext_str}) {{\n')
+ out.append(f'{" " * 16}p.AddNewline();\n')
+ out.append(f'{" " * 16}p.SetSubHeader();\n')
+ out.append(f'{" " * 16}Dump{struct.name}(p, "{alias}", *props);\n')
+ out.append(f'{" " * 12}}}\n')
+ return out
+
+ def PrintStructComparisonForwardDecl(self,structure):
+ out = []
+ out.append(f'bool operator==(const {structure.name} & a, const {structure.name} b);\n')
+ return out
+
+
+ def PrintStructComparison(self,structure):
+ out = []
+ out.append(f'bool operator==(const {structure.name} & a, const {structure.name} b) {{\n')
+ out.append(' return ')
+ is_first = True
+ for m in structure.members:
+ if m.name not in NAMES_TO_IGNORE:
+ if not is_first:
+ out.append('\n && ')
+ else:
+ is_first = False
+ out.append(f'a.{m.name} == b.{m.name}')
+ out.append(';\n')
+ out.append('}\n')
+ return out
+
+class VulkanFormatRange:
+ def __init__(self, min_inst_version, extensions, first, last):
+ self.minimum_instance_version = min_inst_version
+ self.extensions = extensions
+ self.first_format = first
+ self.last_format = last