aboutsummaryrefslogtreecommitdiff
path: root/layers/buffer_validation.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'layers/buffer_validation.cpp')
-rw-r--r--layers/buffer_validation.cpp364
1 files changed, 332 insertions, 32 deletions
diff --git a/layers/buffer_validation.cpp b/layers/buffer_validation.cpp
index e0c3b81b..a6903dfa 100644
--- a/layers/buffer_validation.cpp
+++ b/layers/buffer_validation.cpp
@@ -1144,7 +1144,7 @@ static inline bool IsExtentEqual(const VkExtent3D *extent, const VkExtent3D *oth
return result;
}
-// Returns the image extent of a specific subresource.
+// Returns the effective extent of an image subresource, adjusted for mip level and array depth.
static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const VkImageSubresourceLayers *subresource) {
const uint32_t mip = subresource->mipLevel;
@@ -1159,9 +1159,8 @@ static inline VkExtent3D GetImageSubresourceExtent(const IMAGE_STATE *img, const
extent.height = (0 == extent.height ? 0 : std::max(1U, extent.height >> mip));
extent.depth = (0 == extent.depth ? 0 : std::max(1U, extent.depth >> mip));
- // For 2D images, the number of layers present becomes the effective depth (for 2D <-> 3D copies)
- // In this case the depth extent is not diminished with mip level
- if (VK_IMAGE_TYPE_2D == img->createInfo.imageType) {
+ // Image arrays have an effective z extent that isn't diminished by mip level
+ if (VK_IMAGE_TYPE_3D != img->createInfo.imageType) {
extent.depth = img->createInfo.arrayLayers;
}
@@ -1263,7 +1262,15 @@ static inline bool CheckItgExtent(layer_data *device_data, const GLOBAL_CB_NODE
offset_extent_sum.width = static_cast<uint32_t>(abs(offset->x)) + extent->width;
offset_extent_sum.height = static_cast<uint32_t>(abs(offset->y)) + extent->height;
offset_extent_sum.depth = static_cast<uint32_t>(abs(offset->z)) + extent->depth;
- if ((IsExtentAligned(extent, granularity) == false) && (IsExtentEqual(&offset_extent_sum, subresource_extent) == false)) {
+
+ bool x_ok =
+ ((0 == SafeModulo(extent->width, granularity->width)) || (subresource_extent->width == offset_extent_sum.width));
+ bool y_ok =
+ ((0 == SafeModulo(extent->height, granularity->height)) || (subresource_extent->height == offset_extent_sum.height));
+ bool z_ok =
+ ((0 == SafeModulo(extent->depth, granularity->depth)) || (subresource_extent->depth == offset_extent_sum.depth));
+
+ if (!(x_ok && y_ok && z_ok)) {
skip |=
log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(cb_node->commandBuffer), __LINE__, DRAWSTATE_IMAGE_TRANSFER_GRANULARITY, "DS",
@@ -1343,26 +1350,311 @@ bool ValidateCopyBufferImageTransferGranularityRequirements(layer_data *device_d
// Check valid usage Image Tranfer Granularity requirements for elements of a VkImageCopy structure
bool ValidateCopyImageTransferGranularityRequirements(layer_data *device_data, const GLOBAL_CB_NODE *cb_node,
- const IMAGE_STATE *img, const VkImageCopy *region, const uint32_t i,
- const char *function) {
+ const IMAGE_STATE *src_img, const IMAGE_STATE *dst_img,
+ const VkImageCopy *region, const uint32_t i, const char *function) {
bool skip = false;
- VkExtent3D granularity = GetScaledItg(device_data, cb_node, img);
+ VkExtent3D granularity = GetScaledItg(device_data, cb_node, src_img);
skip |= CheckItgOffset(device_data, cb_node, &region->srcOffset, &granularity, i, function, "srcOffset");
+ VkExtent3D subresource_extent = GetImageSubresourceExtent(src_img, &region->srcSubresource);
+ skip |= CheckItgExtent(device_data, cb_node, &region->extent, &region->srcOffset, &granularity, &subresource_extent, i,
+ function, "extent");
+
+ granularity = GetScaledItg(device_data, cb_node, dst_img);
skip |= CheckItgOffset(device_data, cb_node, &region->dstOffset, &granularity, i, function, "dstOffset");
- VkExtent3D subresource_extent = GetImageSubresourceExtent(img, &region->dstSubresource);
+ subresource_extent = GetImageSubresourceExtent(dst_img, &region->dstSubresource);
skip |= CheckItgExtent(device_data, cb_node, &region->extent, &region->dstOffset, &granularity, &subresource_extent, i,
function, "extent");
return skip;
}
+// Validate contents of a VkImageCopy struct
+bool ValidateImageCopyData(const layer_data *device_data, const debug_report_data *report_data, const uint32_t regionCount,
+ const VkImageCopy *ic_regions, const IMAGE_STATE *src_state, const IMAGE_STATE *dst_state) {
+ bool skip = false;
+
+ for (uint32_t i = 0; i < regionCount; i++) {
+ VkImageCopy image_copy = ic_regions[i];
+ bool slice_override = false;
+ uint32_t depth_slices = 0;
+
+ // Special case for copying between a 1D/2D array and a 3D image
+ // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
+ if ((VK_IMAGE_TYPE_3D == src_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != dst_state->createInfo.imageType)) {
+ depth_slices = image_copy.dstSubresource.layerCount; // Slice count from 2D subresource
+ slice_override = (depth_slices != 1);
+ } else if ((VK_IMAGE_TYPE_3D == dst_state->createInfo.imageType) && (VK_IMAGE_TYPE_3D != src_state->createInfo.imageType)) {
+ depth_slices = image_copy.srcSubresource.layerCount; // Slice count from 2D subresource
+ slice_override = (depth_slices != 1);
+ }
+
+ // Do all checks on source image
+ //
+ if (src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
+ if ((0 != image_copy.srcOffset.y) || (1 != image_copy.extent.height)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01742, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcOffset.y is %d and extent.height is %d. For 1D images these must "
+ "be 0 and 1, respectively. %s",
+ i, image_copy.srcOffset.y, image_copy.extent.height, validation_error_map[VALIDATION_ERROR_01742]);
+ }
+ }
+
+ if ((src_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (src_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) {
+ if ((0 != image_copy.srcOffset.z) || (1 != image_copy.extent.depth)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01743, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcOffset.z is %d and extent.depth is %d. For 1D and 2D images "
+ "these must be 0 and 1, respectively. %s",
+ i, image_copy.srcOffset.z, image_copy.extent.depth, validation_error_map[VALIDATION_ERROR_01743]);
+ }
+ }
+
+ // VU01199 changed with mnt1
+ if (GetDeviceExtensions(device_data)->khr_maintenance1) {
+ if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+ if ((0 != image_copy.srcSubresource.baseArrayLayer) || (1 != image_copy.srcSubresource.layerCount)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01199, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and srcSubresource.layerCount "
+ "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s",
+ i, image_copy.srcSubresource.baseArrayLayer, image_copy.srcSubresource.layerCount,
+ validation_error_map[VALIDATION_ERROR_01199]);
+ }
+ }
+ } else { // Pre maint 1
+ if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+ if ((0 != image_copy.srcSubresource.baseArrayLayer) || (1 != image_copy.srcSubresource.layerCount)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01199, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer is %d and "
+ "srcSubresource.layerCount is %d. For copies with either source or dest of type "
+ "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s",
+ i, image_copy.srcSubresource.baseArrayLayer, image_copy.srcSubresource.layerCount,
+ validation_error_map[VALIDATION_ERROR_01199]);
+ }
+ }
+ }
+
+ // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec.
+ if ((image_copy.srcSubresource.baseArrayLayer >= src_state->createInfo.arrayLayers) ||
+ (image_copy.srcSubresource.baseArrayLayer + image_copy.srcSubresource.layerCount > src_state->createInfo.arrayLayers)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_02603, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcSubresource.baseArrayLayer (%d) must be less than the source image's "
+ "arrayLayers (%d), and the sum of baseArrayLayer and srcSubresource.layerCount (%d) must be less than "
+ "or equal to the source image's arrayLayers. %s",
+ i, image_copy.srcSubresource.baseArrayLayer, src_state->createInfo.arrayLayers,
+ image_copy.srcSubresource.layerCount, validation_error_map[VALIDATION_ERROR_02603]);
+ }
+
+ // Checks that apply only to compressed images
+ if (FormatIsCompressed(src_state->createInfo.format)) {
+ VkExtent3D block_size = FormatCompressedTexelBlockExtent(src_state->createInfo.format);
+
+ // image offsets must be multiples of block dimensions
+ if ((SafeModulo(image_copy.srcOffset.x, block_size.width) != 0) ||
+ (SafeModulo(image_copy.srcOffset.y, block_size.height) != 0) ||
+ (SafeModulo(image_copy.srcOffset.z, block_size.depth) != 0)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01209, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] srcOffset (%d, %d) must be multiples of the compressed image's "
+ "texel width & height (%d, %d). %s.",
+ i, image_copy.srcOffset.x, image_copy.srcOffset.y, block_size.width, block_size.height,
+ validation_error_map[VALIDATION_ERROR_01209]);
+ }
+
+ // extent width must be a multiple of block width, or extent+offset width must equal subresource width
+ VkExtent3D mip_extent = GetImageSubresourceExtent(src_state, &(image_copy.srcSubresource));
+ if ((SafeModulo(image_copy.extent.width, block_size.width) != 0) &&
+ (image_copy.extent.width + image_copy.srcOffset.x != mip_extent.width)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01210, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+ "width (%d), or when added to srcOffset.x (%d) must equal the image subresource width (%d). %s.",
+ i, image_copy.extent.width, block_size.width, image_copy.srcOffset.x, mip_extent.width,
+ validation_error_map[VALIDATION_ERROR_01210]);
+ }
+
+ // extent height must be a multiple of block height, or extent+offset height must equal subresource height
+ if ((SafeModulo(image_copy.extent.height, block_size.height) != 0) &&
+ (image_copy.extent.height + image_copy.srcOffset.y != mip_extent.height)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01211, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block "
+ "height (%d), or when added to srcOffset.y (%d) must equal the image subresource height (%d). %s.",
+ i, image_copy.extent.height, block_size.height, image_copy.srcOffset.y, mip_extent.height,
+ validation_error_map[VALIDATION_ERROR_01211]);
+ }
+
+ // extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
+ uint32_t copy_depth = (slice_override ? depth_slices : image_copy.extent.depth);
+ if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + image_copy.srcOffset.z != mip_extent.depth)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(src_state->image), __LINE__, VALIDATION_ERROR_01212, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+ "depth (%d), or when added to srcOffset.z (%d) must equal the image subresource depth (%d). %s.",
+ i, image_copy.extent.depth, block_size.depth, image_copy.srcOffset.z, mip_extent.depth,
+ validation_error_map[VALIDATION_ERROR_01212]);
+ }
+ } // Compressed
+
+ // Do all checks on dest image
+ //
+ if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) {
+ if ((0 != image_copy.dstOffset.y) || (1 != image_copy.extent.height)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01744, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstOffset.y is %d and extent.height is %d. For 1D images these must "
+ "be 0 and 1, respectively. %s",
+ i, image_copy.dstOffset.y, image_copy.extent.height, validation_error_map[VALIDATION_ERROR_01744]);
+ }
+ }
+
+ if ((dst_state->createInfo.imageType == VK_IMAGE_TYPE_1D) || (dst_state->createInfo.imageType == VK_IMAGE_TYPE_2D)) {
+ if ((0 != image_copy.dstOffset.z) || (1 != image_copy.extent.depth)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01745, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstOffset.z is %d and extent.depth is %d. For 1D and 2D images "
+ "these must be 0 and 1, respectively. %s",
+ i, image_copy.dstOffset.z, image_copy.extent.depth, validation_error_map[VALIDATION_ERROR_01745]);
+ }
+ }
+
+ if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+ if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01199, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
+ "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s",
+ i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount,
+ validation_error_map[VALIDATION_ERROR_01199]);
+ }
+ }
+ // VU01199 changed with mnt1
+ if (GetDeviceExtensions(device_data)->khr_maintenance1) {
+ if (dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+ if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01199, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and dstSubresource.layerCount "
+ "is %d. For VK_IMAGE_TYPE_3D images these must be 0 and 1, respectively. %s",
+ i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount,
+ validation_error_map[VALIDATION_ERROR_01199]);
+ }
+ }
+ } else { // Pre maint 1
+ if (src_state->createInfo.imageType == VK_IMAGE_TYPE_3D || dst_state->createInfo.imageType == VK_IMAGE_TYPE_3D) {
+ if ((0 != image_copy.dstSubresource.baseArrayLayer) || (1 != image_copy.dstSubresource.layerCount)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01199, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer is %d and "
+ "dstSubresource.layerCount is %d. For copies with either source or dest of type "
+ "VK_IMAGE_TYPE_3D, these must be 0 and 1, respectively. %s",
+ i, image_copy.dstSubresource.baseArrayLayer, image_copy.dstSubresource.layerCount,
+ validation_error_map[VALIDATION_ERROR_01199]);
+ }
+ }
+ }
+
+ // TODO: this VU is redundant with VU01224. Gitlab issue 812 submitted to get it removed from the spec.
+ if ((image_copy.dstSubresource.baseArrayLayer >= dst_state->createInfo.arrayLayers) ||
+ (image_copy.dstSubresource.baseArrayLayer + image_copy.dstSubresource.layerCount > dst_state->createInfo.arrayLayers)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_02604, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstSubresource.baseArrayLayer (%d) must be less than the dest image's "
+ "arrayLayers (%d), and the sum of baseArrayLayer and dstSubresource.layerCount (%d) must be less than "
+ "or equal to the dest image's arrayLayers. %s",
+ i, image_copy.dstSubresource.baseArrayLayer, dst_state->createInfo.arrayLayers,
+ image_copy.dstSubresource.layerCount, validation_error_map[VALIDATION_ERROR_02604]);
+ }
+
+ // Checks that apply only to compressed images
+ if (FormatIsCompressed(dst_state->createInfo.format)) {
+ VkExtent3D block_size = FormatCompressedTexelBlockExtent(dst_state->createInfo.format);
+
+ // image offsets must be multiples of block dimensions
+ if ((SafeModulo(image_copy.dstOffset.x, block_size.width) != 0) ||
+ (SafeModulo(image_copy.dstOffset.y, block_size.height) != 0) ||
+ (SafeModulo(image_copy.dstOffset.z, block_size.depth) != 0)) {
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01214, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] dstOffset (%d, %d) must be multiples of the compressed image's "
+ "texel width & height (%d, %d). %s.",
+ i, image_copy.dstOffset.x, image_copy.dstOffset.y, block_size.width, block_size.height,
+ validation_error_map[VALIDATION_ERROR_01214]);
+ }
+
+ // extent width must be a multiple of block width, or extent+offset width must equal subresource width
+ VkExtent3D mip_extent = GetImageSubresourceExtent(dst_state, &(image_copy.dstSubresource));
+ if ((SafeModulo(image_copy.extent.width, block_size.width) != 0) &&
+ (image_copy.extent.width + image_copy.dstOffset.x != mip_extent.width)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01215, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+ "width (%d), or when added to dstOffset.x (%d) must equal the image subresource width (%d). %s.",
+ i, image_copy.extent.width, block_size.width, image_copy.dstOffset.x, mip_extent.width,
+ validation_error_map[VALIDATION_ERROR_01215]);
+ }
+
+ // extent height must be a multiple of block height, or extent+offset height must equal subresource height
+ if ((SafeModulo(image_copy.extent.height, block_size.height) != 0) &&
+ (image_copy.extent.height + image_copy.dstOffset.y != mip_extent.height)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01216, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent height (%d) must be a multiple of the compressed texture block "
+ "height (%d), or when added to dstOffset.y (%d) must equal the image subresource height (%d). %s.",
+ i, image_copy.extent.height, block_size.height, image_copy.dstOffset.y, mip_extent.height,
+ validation_error_map[VALIDATION_ERROR_01216]);
+ }
+
+ // extent depth must be a multiple of block depth, or extent+offset depth must equal subresource depth
+ uint32_t copy_depth = (slice_override ? depth_slices : image_copy.extent.depth);
+ if ((SafeModulo(copy_depth, block_size.depth) != 0) && (copy_depth + image_copy.dstOffset.z != mip_extent.depth)) {
+ skip |=
+ log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+ reinterpret_cast<const uint64_t &>(dst_state->image), __LINE__, VALIDATION_ERROR_01217, "IMAGE",
+ "vkCmdCopyImage(): pRegion[%d] extent width (%d) must be a multiple of the compressed texture block "
+ "depth (%d), or when added to dstOffset.z (%d) must equal the image subresource depth (%d). %s.",
+ i, image_copy.extent.depth, block_size.depth, image_copy.dstOffset.z, mip_extent.depth,
+ validation_error_map[VALIDATION_ERROR_01217]);
+ }
+ } // Compressed
+ }
+ return skip;
+}
+
bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_node, IMAGE_STATE *src_image_state,
IMAGE_STATE *dst_image_state, uint32_t region_count, const VkImageCopy *regions,
VkImageLayout src_image_layout, VkImageLayout dst_image_layout) {
bool skip = false;
const debug_report_data *report_data = core_validation::GetReportData(device_data);
+ skip = ValidateImageCopyData(device_data, report_data, region_count, regions, src_image_state, dst_image_state);
+
VkCommandBuffer command_buffer = cb_node->commandBuffer;
for (uint32_t i = 0; i < region_count; i++) {
+ bool slice_override = false;
+ uint32_t depth_slices = 0;
+
+ // Special case for copying between a 1D/2D array and a 3D image
+ // TBD: This seems like the only way to reconcile 3 mutually-exclusive VU checks for 2D/3D copies. Heads up.
+ if ((VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType) &&
+ (VK_IMAGE_TYPE_3D != dst_image_state->createInfo.imageType)) {
+ depth_slices = regions[i].dstSubresource.layerCount; // Slice count from 2D subresource
+ slice_override = (depth_slices != 1);
+ } else if ((VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType) &&
+ (VK_IMAGE_TYPE_3D != src_image_state->createInfo.imageType)) {
+ depth_slices = regions[i].srcSubresource.layerCount; // Slice count from 2D subresource
+ slice_override = (depth_slices != 1);
+ }
+
if (regions[i].srcSubresource.layerCount == 0) {
std::stringstream ss;
ss << "vkCmdCopyImage: number of layers in pRegions[" << i << "] srcSubresource is zero";
@@ -1379,7 +1671,27 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
HandleToUint64(command_buffer), __LINE__, DRAWSTATE_INVALID_IMAGE_ASPECT, "IMAGE", "%s", ss.str().c_str());
}
- if (!GetDeviceExtensions(device_data)->khr_maintenance1) {
+ if (GetDeviceExtensions(device_data)->khr_maintenance1) {
+ // No chance of mismatch if we're overriding depth slice count
+ if (!slice_override) {
+ // The number of depth slices in srcSubresource and dstSubresource must match
+ // Depth comes from layerCount for 1D,2D resources, from extent.depth for 3D
+ uint32_t src_slices =
+ (VK_IMAGE_TYPE_3D == src_image_state->createInfo.imageType ? regions[i].extent.depth
+ : regions[i].srcSubresource.layerCount);
+ uint32_t dst_slices =
+ (VK_IMAGE_TYPE_3D == dst_image_state->createInfo.imageType ? regions[i].extent.depth
+ : regions[i].dstSubresource.layerCount);
+ if (src_slices != dst_slices) {
+ std::stringstream ss;
+ ss << "vkCmdCopyImage: number of depth slices in source and destination subresources for pRegions[" << i
+ << "] do not match";
+ skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+ reinterpret_cast<uint64_t &>(command_buffer), __LINE__, VALIDATION_ERROR_01198, "IMAGE",
+ "%s. %s", ss.str().c_str(), validation_error_map[VALIDATION_ERROR_01198]);
+ }
+ }
+ } else {
// For each region the layerCount member of srcSubresource and dstSubresource must match
if (regions[i].srcSubresource.layerCount != regions[i].dstSubresource.layerCount) {
std::stringstream ss;
@@ -1438,22 +1750,6 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
validation_error_map[VALIDATION_ERROR_01221]);
}
- if (!GetDeviceExtensions(device_data)->khr_maintenance1) {
- // If either of the calling command's src_image or dst_image parameters are of VkImageType VK_IMAGE_TYPE_3D,
- // the baseArrayLayer and layerCount members of both srcSubresource and dstSubresource must be 0 and 1, respectively
- if (((src_image_state->createInfo.imageType == VK_IMAGE_TYPE_3D) ||
- (dst_image_state->createInfo.imageType == VK_IMAGE_TYPE_3D)) &&
- ((regions[i].srcSubresource.baseArrayLayer != 0) || (regions[i].srcSubresource.layerCount != 1) ||
- (regions[i].dstSubresource.baseArrayLayer != 0) || (regions[i].dstSubresource.layerCount != 1))) {
- std::stringstream ss;
- ss << "vkCmdCopyImage: src or dstImage type was IMAGE_TYPE_3D, but in subRegion[" << i
- << "] baseArrayLayer was not zero or layerCount was not 1.";
- skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
- HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_01199, "IMAGE", "%s. %s",
- ss.str().c_str(), validation_error_map[VALIDATION_ERROR_01199]);
- }
- }
-
// MipLevel must be less than the mipLevels specified in VkImageCreateInfo when the image was created
if (regions[i].srcSubresource.mipLevel >= src_image_state->createInfo.mipLevels) {
std::stringstream ss;
@@ -1526,7 +1822,9 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
// Each dimension offset + extent limits must fall with image subresource extent
VkExtent3D subresource_extent = GetImageSubresourceExtent(src_image_state, &(regions[i].srcSubresource));
- uint32_t extent_check = ExceedsBounds(&(regions[i].srcOffset), &regions[i].extent, &subresource_extent);
+ VkExtent3D copy_extent = regions[i].extent;
+ if (slice_override) copy_extent.depth = depth_slices;
+ uint32_t extent_check = ExceedsBounds(&(regions[i].srcOffset), &copy_extent, &subresource_extent);
if (extent_check & x_bit) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_01202, "IMAGE",
@@ -1549,12 +1847,14 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_01204, "IMAGE",
"vkCmdCopyImage: Source image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
"depth [%1d]. %s",
- i, regions[i].srcOffset.z, regions[i].extent.depth, subresource_extent.depth,
+ i, regions[i].srcOffset.z, copy_extent.depth, subresource_extent.depth,
validation_error_map[VALIDATION_ERROR_01204]);
}
subresource_extent = GetImageSubresourceExtent(dst_image_state, &(regions[i].dstSubresource));
- extent_check = ExceedsBounds(&(regions[i].dstOffset), &regions[i].extent, &subresource_extent);
+ copy_extent = regions[i].extent;
+ if (slice_override) copy_extent.depth = depth_slices;
+ extent_check = ExceedsBounds(&(regions[i].dstOffset), &copy_extent, &subresource_extent);
if (extent_check & x_bit) {
skip |= log_msg(report_data, VK_DEBUG_REPORT_ERROR_BIT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_01205, "IMAGE",
@@ -1576,7 +1876,7 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
HandleToUint64(command_buffer), __LINE__, VALIDATION_ERROR_01207, "IMAGE",
"vkCmdCopyImage: Dest image pRegion %1d z-dimension offset [%1d] + extent [%1d] exceeds subResource "
"depth [%1d]. %s",
- i, regions[i].dstOffset.z, regions[i].extent.depth, subresource_extent.depth,
+ i, regions[i].dstOffset.z, copy_extent.depth, subresource_extent.depth,
validation_error_map[VALIDATION_ERROR_01207]);
}
@@ -1640,8 +1940,8 @@ bool PreCallValidateCmdCopyImage(layer_data *device_data, GLOBAL_CB_NODE *cb_nod
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_01180, &hit_error);
skip |= VerifyImageLayout(device_data, cb_node, dst_image_state, regions[i].dstSubresource, dst_image_layout,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, "vkCmdCopyImage()", VALIDATION_ERROR_01183, &hit_error);
- skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, dst_image_state, &regions[i], i,
- "vkCmdCopyImage()");
+ skip |= ValidateCopyImageTransferGranularityRequirements(device_data, cb_node, src_image_state, dst_image_state,
+ &regions[i], i, "vkCmdCopyImage()");
}
return skip;