anv: use updated tokens from vk.xml

Signed-off-by: Eric Engestrom <eric@igalia.com>
Reviewed-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
Acked-by: Jason Ekstrand <jason.ekstrand@collabora.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/17342>
This commit is contained in:
Eric Engestrom
2022-07-01 13:03:31 +01:00
committed by Marge Bot
parent 9554462f4d
commit 0225293a97
14 changed files with 181 additions and 181 deletions

View File

@@ -168,7 +168,7 @@ android_format_from_vk(unsigned vk_format)
} }
static VkFormatFeatureFlags static VkFormatFeatureFlags
features2_to_features(VkFormatFeatureFlags2KHR features2) features2_to_features(VkFormatFeatureFlags2 features2)
{ {
return features2 & VK_ALL_FORMAT_FEATURE_FLAG_BITS; return features2 & VK_ALL_FORMAT_FEATURE_FLAG_BITS;
} }
@@ -230,7 +230,7 @@ get_ahw_buffer_format_properties2(
* VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT" * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
*/ */
p->formatFeatures |= p->formatFeatures |=
VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR; VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT;
/* "Implementations may not always be able to determine the color model, /* "Implementations may not always be able to determine the color model,
* numerical range, or chroma offsets of the image contents, so the values * numerical range, or chroma offsets of the image contents, so the values

View File

@@ -196,7 +196,7 @@ anv_dynamic_state_copy(struct anv_dynamic_state *dest,
ANV_CMP_COPY(depth_bounds_test_enable, ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE); ANV_CMP_COPY(depth_bounds_test_enable, ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE);
ANV_CMP_COPY(stencil_test_enable, ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE); ANV_CMP_COPY(stencil_test_enable, ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE);
if (copy_mask & VK_DYNAMIC_STATE_STENCIL_OP_EXT) { if (copy_mask & VK_DYNAMIC_STATE_STENCIL_OP) {
ANV_CMP_COPY(stencil_op.front.fail_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP); ANV_CMP_COPY(stencil_op.front.fail_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP);
ANV_CMP_COPY(stencil_op.front.pass_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP); ANV_CMP_COPY(stencil_op.front.pass_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP);
ANV_CMP_COPY(stencil_op.front.depth_fail_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP); ANV_CMP_COPY(stencil_op.front.depth_fail_op, ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP);

View File

@@ -87,7 +87,7 @@ anv_descriptor_data_for_type(const struct anv_physical_device *device,
data = ANV_DESCRIPTOR_SURFACE_STATE; data = ANV_DESCRIPTOR_SURFACE_STATE;
break; break;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
data = ANV_DESCRIPTOR_INLINE_UNIFORM; data = ANV_DESCRIPTOR_INLINE_UNIFORM;
break; break;
@@ -133,7 +133,7 @@ anv_descriptor_data_for_mutable_type(const struct anv_physical_device *device,
for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) { for(VkDescriptorType i = 0; i <= VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT; i++) {
if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC || if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
continue; continue;
desc_data |= anv_descriptor_data_for_type(device, i); desc_data |= anv_descriptor_data_for_type(device, i);
@@ -182,7 +182,7 @@ static bool
anv_needs_descriptor_buffer(VkDescriptorType desc_type, anv_needs_descriptor_buffer(VkDescriptorType desc_type,
enum anv_descriptor_data desc_data) enum anv_descriptor_data desc_data)
{ {
if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT || if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK ||
anv_descriptor_data_size(desc_data) > 0) anv_descriptor_data_size(desc_data) > 0)
return true; return true;
return false; return false;
@@ -223,7 +223,7 @@ anv_descriptor_size_for_mutable_type(const struct anv_physical_device *device,
if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC || if (i == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || i == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) i == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
continue; continue;
enum anv_descriptor_data desc_data = enum anv_descriptor_data desc_data =
@@ -290,10 +290,10 @@ anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
if (pdevice->always_use_bindless) if (pdevice->always_use_bindless)
return anv_descriptor_supports_bindless(pdevice, binding, sampler); return anv_descriptor_supports_bindless(pdevice, binding, sampler);
static const VkDescriptorBindingFlagBitsEXT flags_requiring_bindless = static const VkDescriptorBindingFlagBits flags_requiring_bindless =
VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT; VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT;
return (binding->flags & flags_requiring_bindless) != 0; return (binding->flags & flags_requiring_bindless) != 0;
} }
@@ -342,7 +342,7 @@ void anv_GetDescriptorSetLayoutSupport(
/* There is no real limit on samplers */ /* There is no real limit on samplers */
break; break;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
/* Inline uniforms don't use a binding */ /* Inline uniforms don't use a binding */
break; break;
@@ -382,7 +382,7 @@ void anv_GetDescriptorSetLayoutSupport(
vk_find_struct(pSupport->pNext, vk_find_struct(pSupport->pNext,
DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT); DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
if (vdcls != NULL) { if (vdcls != NULL) {
if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE; vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
} else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) { } else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
vdcls->maxVariableDescriptorCount = UINT16_MAX; vdcls->maxVariableDescriptorCount = UINT16_MAX;
@@ -482,9 +482,9 @@ VkResult anv_CreateDescriptorSetLayout(
set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1); set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
} }
const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *binding_flags_info = const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
vk_find_struct_const(pCreateInfo->pNext, vk_find_struct_const(pCreateInfo->pNext,
DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT); DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info = const VkMutableDescriptorTypeCreateInfoVALVE *mutable_info =
vk_find_struct_const(pCreateInfo->pNext, vk_find_struct_const(pCreateInfo->pNext,
@@ -528,7 +528,7 @@ VkResult anv_CreateDescriptorSetLayout(
assert(!(set_layout->binding[b].flags & assert(!(set_layout->binding[b].flags &
(VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT | (VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT | VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT))); VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)));
} }
} }
@@ -593,7 +593,7 @@ VkResult anv_CreateDescriptorSetLayout(
anv_descriptor_size(&set_layout->binding[b]); anv_descriptor_size(&set_layout->binding[b]);
if (binding->descriptorType == if (binding->descriptorType ==
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
/* Inline uniform blocks are specified to use the descriptor array /* Inline uniform blocks are specified to use the descriptor array
* size as the size in bytes of the block. * size as the size in bytes of the block.
*/ */
@@ -653,7 +653,7 @@ set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
assert(var_desc_count <= dynamic_binding->array_size); assert(var_desc_count <= dynamic_binding->array_size);
uint32_t shrink = dynamic_binding->array_size - var_desc_count; uint32_t shrink = dynamic_binding->array_size - var_desc_count;
if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK)
return set_layout->descriptor_count; return set_layout->descriptor_count;
return set_layout->descriptor_count - shrink; return set_layout->descriptor_count - shrink;
@@ -690,7 +690,7 @@ anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set
uint32_t shrink = dynamic_binding->array_size - var_desc_count; uint32_t shrink = dynamic_binding->array_size - var_desc_count;
uint32_t set_size; uint32_t set_size;
if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
/* Inline uniform blocks are specified to use the descriptor array /* Inline uniform blocks are specified to use the descriptor array
* size as the size in bytes of the block. * size as the size in bytes of the block.
*/ */
@@ -902,7 +902,7 @@ VkResult anv_CreateDescriptorPool(
desc_data_size *= 3; desc_data_size *= 3;
if (pCreateInfo->pPoolSizes[i].type == if (pCreateInfo->pPoolSizes[i].type ==
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
/* Inline uniform blocks are specified to use the descriptor array /* Inline uniform blocks are specified to use the descriptor array
* size as the size in bytes of the block. * size as the size in bytes of the block.
*/ */
@@ -1768,7 +1768,7 @@ void anv_UpdateDescriptorSets(
} }
break; break;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: { case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK: {
const VkWriteDescriptorSetInlineUniformBlock *inline_write = const VkWriteDescriptorSetInlineUniformBlock *inline_write =
vk_find_struct_const(write->pNext, vk_find_struct_const(write->pNext,
WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK); WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK);
@@ -1948,7 +1948,7 @@ anv_descriptor_set_write_template(struct anv_device *device,
} }
break; break;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
anv_descriptor_set_write_inline_uniform_data(device, set, anv_descriptor_set_write_inline_uniform_data(device, set,
entry->binding, entry->binding,
data + entry->offset, data + entry->offset,

View File

@@ -1526,9 +1526,9 @@ void anv_GetPhysicalDeviceFeatures2(
break; break;
} }
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT: { case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR: {
VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *features = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *features =
(VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT *)ext; (VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR *)ext;
features->globalPriorityQuery = true; features->globalPriorityQuery = true;
break; break;
} }
@@ -2054,19 +2054,19 @@ anv_get_physical_device_properties_1_2(struct anv_physical_device *pdevice,
{ {
assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES); assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES);
p->driverID = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR; p->driverID = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA;
memset(p->driverName, 0, sizeof(p->driverName)); memset(p->driverName, 0, sizeof(p->driverName));
snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE_KHR, snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE,
"Intel open-source Mesa driver"); "Intel open-source Mesa driver");
memset(p->driverInfo, 0, sizeof(p->driverInfo)); memset(p->driverInfo, 0, sizeof(p->driverInfo));
snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE_KHR, snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE,
"Mesa " PACKAGE_VERSION MESA_GIT_SHA1); "Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
/* Don't advertise conformance with a particular version if the hardware's /* Don't advertise conformance with a particular version if the hardware's
* support is incomplete/alpha. * support is incomplete/alpha.
*/ */
if (pdevice->is_alpha) { if (pdevice->is_alpha) {
p->conformanceVersion = (VkConformanceVersionKHR) { p->conformanceVersion = (VkConformanceVersion) {
.major = 0, .major = 0,
.minor = 0, .minor = 0,
.subminor = 0, .subminor = 0,
@@ -2074,7 +2074,7 @@ anv_get_physical_device_properties_1_2(struct anv_physical_device *pdevice,
}; };
} }
else { else {
p->conformanceVersion = (VkConformanceVersionKHR) { p->conformanceVersion = (VkConformanceVersion) {
.major = 1, .major = 1,
.minor = 3, .minor = 3,
.subminor = 0, .subminor = 0,
@@ -2083,9 +2083,9 @@ anv_get_physical_device_properties_1_2(struct anv_physical_device *pdevice,
} }
p->denormBehaviorIndependence = p->denormBehaviorIndependence =
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR; VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL;
p->roundingModeIndependence = p->roundingModeIndependence =
VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR; VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE;
/* Broadwell does not support HF denorms and there are restrictions /* Broadwell does not support HF denorms and there are restrictions
* other gens. According to Kabylake's PRM: * other gens. According to Kabylake's PRM:
@@ -2154,18 +2154,18 @@ anv_get_physical_device_properties_1_2(struct anv_physical_device *pdevice,
p->maxDescriptorSetUpdateAfterBindInputAttachments = MAX_DESCRIPTOR_SET_INPUT_ATTACHMENTS; p->maxDescriptorSetUpdateAfterBindInputAttachments = MAX_DESCRIPTOR_SET_INPUT_ATTACHMENTS;
/* We support all of the depth resolve modes */ /* We support all of the depth resolve modes */
p->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR | p->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT |
VK_RESOLVE_MODE_AVERAGE_BIT_KHR | VK_RESOLVE_MODE_AVERAGE_BIT |
VK_RESOLVE_MODE_MIN_BIT_KHR | VK_RESOLVE_MODE_MIN_BIT |
VK_RESOLVE_MODE_MAX_BIT_KHR; VK_RESOLVE_MODE_MAX_BIT;
/* Average doesn't make sense for stencil so we don't support that */ /* Average doesn't make sense for stencil so we don't support that */
p->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR; p->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT;
if (pdevice->info.ver >= 8) { if (pdevice->info.ver >= 8) {
/* The advanced stencil resolve modes currently require stencil /* The advanced stencil resolve modes currently require stencil
* sampling be supported by the hardware. * sampling be supported by the hardware.
*/ */
p->supportedStencilResolveModes |= VK_RESOLVE_MODE_MIN_BIT_KHR | p->supportedStencilResolveModes |= VK_RESOLVE_MODE_MIN_BIT |
VK_RESOLVE_MODE_MAX_BIT_KHR; VK_RESOLVE_MODE_MAX_BIT;
} }
p->independentResolveNone = true; p->independentResolveNone = true;
p->independentResolve = true; p->independentResolve = true;
@@ -2430,9 +2430,9 @@ void anv_GetPhysicalDeviceProperties2(
break; break;
} }
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR: { case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: {
VkPhysicalDeviceMaintenance4PropertiesKHR *properties = VkPhysicalDeviceMaintenance4Properties *properties =
(VkPhysicalDeviceMaintenance4PropertiesKHR *)ext; (VkPhysicalDeviceMaintenance4Properties *)ext;
properties->maxBufferSize = pdevice->isl_dev.max_buffer_size; properties->maxBufferSize = pdevice->isl_dev.max_buffer_size;
break; break;
} }
@@ -2624,13 +2624,13 @@ static int
vk_priority_to_gen(int priority) vk_priority_to_gen(int priority)
{ {
switch (priority) { switch (priority) {
case VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT: case VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR:
return INTEL_CONTEXT_LOW_PRIORITY; return INTEL_CONTEXT_LOW_PRIORITY;
case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT: case VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR:
return INTEL_CONTEXT_MEDIUM_PRIORITY; return INTEL_CONTEXT_MEDIUM_PRIORITY;
case VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT: case VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR:
return INTEL_CONTEXT_HIGH_PRIORITY; return INTEL_CONTEXT_HIGH_PRIORITY;
case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT: case VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR:
return INTEL_CONTEXT_REALTIME_PRIORITY; return INTEL_CONTEXT_REALTIME_PRIORITY;
default: default:
unreachable("Invalid priority"); unreachable("Invalid priority");
@@ -2661,16 +2661,16 @@ void anv_GetPhysicalDeviceQueueFamilyProperties2(
vk_foreach_struct(ext, p->pNext) { vk_foreach_struct(ext, p->pNext) {
switch (ext->sType) { switch (ext->sType) {
case VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT: { case VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR: {
VkQueueFamilyGlobalPriorityPropertiesEXT *properties = VkQueueFamilyGlobalPriorityPropertiesKHR *properties =
(VkQueueFamilyGlobalPriorityPropertiesEXT *)ext; (VkQueueFamilyGlobalPriorityPropertiesKHR *)ext;
/* Deliberately sorted low to high */ /* Deliberately sorted low to high */
VkQueueGlobalPriorityEXT all_priorities[] = { VkQueueGlobalPriorityKHR all_priorities[] = {
VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT, VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR,
VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT, VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR,
VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT, VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR,
VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT, VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR,
}; };
uint32_t count = 0; uint32_t count = 0;
@@ -3071,13 +3071,13 @@ VkResult anv_CreateDevice(
} }
/* Check if client specified queue priority. */ /* Check if client specified queue priority. */
const VkDeviceQueueGlobalPriorityCreateInfoEXT *queue_priority = const VkDeviceQueueGlobalPriorityCreateInfoKHR *queue_priority =
vk_find_struct_const(pCreateInfo->pQueueCreateInfos[0].pNext, vk_find_struct_const(pCreateInfo->pQueueCreateInfos[0].pNext,
DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT); DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR);
VkQueueGlobalPriorityEXT priority = VkQueueGlobalPriorityKHR priority =
queue_priority ? queue_priority->globalPriority : queue_priority ? queue_priority->globalPriority :
VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT; VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR;
device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator, device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
sizeof(*device), 8, sizeof(*device), 8,
@@ -3229,15 +3229,15 @@ VkResult anv_CreateDevice(
/* As per spec, the driver implementation may deny requests to acquire /* As per spec, the driver implementation may deny requests to acquire
* a priority above the default priority (MEDIUM) if the caller does not * a priority above the default priority (MEDIUM) if the caller does not
* have sufficient privileges. In this scenario VK_ERROR_NOT_PERMITTED_EXT * have sufficient privileges. In this scenario VK_ERROR_NOT_PERMITTED_KHR
* is returned. * is returned.
*/ */
if (physical_device->max_context_priority >= INTEL_CONTEXT_MEDIUM_PRIORITY) { if (physical_device->max_context_priority >= INTEL_CONTEXT_MEDIUM_PRIORITY) {
int err = anv_gem_set_context_param(device->fd, device->context_id, int err = anv_gem_set_context_param(device->fd, device->context_id,
I915_CONTEXT_PARAM_PRIORITY, I915_CONTEXT_PARAM_PRIORITY,
vk_priority_to_gen(priority)); vk_priority_to_gen(priority));
if (err != 0 && priority > VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT) { if (err != 0 && priority > VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR) {
result = vk_error(device, VK_ERROR_NOT_PERMITTED_EXT); result = vk_error(device, VK_ERROR_NOT_PERMITTED_KHR);
goto fail_vmas; goto fail_vmas;
} }
} }
@@ -3811,9 +3811,9 @@ VkResult anv_AllocateMemory(
dedicated_info = (void *)ext; dedicated_info = (void *)ext;
break; break;
case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR: { case VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO: {
const VkMemoryOpaqueCaptureAddressAllocateInfoKHR *addr_info = const VkMemoryOpaqueCaptureAddressAllocateInfo *addr_info =
(const VkMemoryOpaqueCaptureAddressAllocateInfoKHR *)ext; (const VkMemoryOpaqueCaptureAddressAllocateInfo *)ext;
client_address = addr_info->opaqueCaptureAddress; client_address = addr_info->opaqueCaptureAddress;
break; break;
} }
@@ -3832,7 +3832,7 @@ VkResult anv_AllocateMemory(
if (device->physical->has_implicit_ccs && device->info.has_aux_map) if (device->physical->has_implicit_ccs && device->info.has_aux_map)
alloc_flags |= ANV_BO_ALLOC_IMPLICIT_CCS; alloc_flags |= ANV_BO_ALLOC_IMPLICIT_CCS;
if (vk_flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR) if (vk_flags & VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT)
alloc_flags |= ANV_BO_ALLOC_CLIENT_VISIBLE_ADDRESS; alloc_flags |= ANV_BO_ALLOC_CLIENT_VISIBLE_ADDRESS;
if ((export_info && export_info->handleTypes) || if ((export_info && export_info->handleTypes) ||
@@ -4444,7 +4444,7 @@ void anv_GetBufferMemoryRequirements2(
void anv_GetDeviceBufferMemoryRequirementsKHR( void anv_GetDeviceBufferMemoryRequirementsKHR(
VkDevice _device, VkDevice _device,
const VkDeviceBufferMemoryRequirementsKHR* pInfo, const VkDeviceBufferMemoryRequirements* pInfo,
VkMemoryRequirements2* pMemoryRequirements) VkMemoryRequirements2* pMemoryRequirements)
{ {
ANV_FROM_HANDLE(anv_device, device, _device); ANV_FROM_HANDLE(anv_device, device, _device);
@@ -4500,7 +4500,7 @@ void anv_DestroyBuffer(
VkDeviceAddress anv_GetBufferDeviceAddress( VkDeviceAddress anv_GetBufferDeviceAddress(
VkDevice device, VkDevice device,
const VkBufferDeviceAddressInfoKHR* pInfo) const VkBufferDeviceAddressInfo* pInfo)
{ {
ANV_FROM_HANDLE(anv_buffer, buffer, pInfo->buffer); ANV_FROM_HANDLE(anv_buffer, buffer, pInfo->buffer);
@@ -4512,14 +4512,14 @@ VkDeviceAddress anv_GetBufferDeviceAddress(
uint64_t anv_GetBufferOpaqueCaptureAddress( uint64_t anv_GetBufferOpaqueCaptureAddress(
VkDevice device, VkDevice device,
const VkBufferDeviceAddressInfoKHR* pInfo) const VkBufferDeviceAddressInfo* pInfo)
{ {
return 0; return 0;
} }
uint64_t anv_GetDeviceMemoryOpaqueCaptureAddress( uint64_t anv_GetDeviceMemoryOpaqueCaptureAddress(
VkDevice device, VkDevice device,
const VkDeviceMemoryOpaqueCaptureAddressInfoKHR* pInfo) const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo)
{ {
ANV_FROM_HANDLE(anv_device_memory, memory, pInfo->memory); ANV_FROM_HANDLE(anv_device_memory, memory, pInfo->memory);

View File

@@ -335,8 +335,8 @@ static const struct anv_format main_formats[] = {
}; };
static const struct anv_format _4444_formats[] = { static const struct anv_format _4444_formats[] = {
fmt1(VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT, ISL_FORMAT_B4G4R4A4_UNORM), fmt1(VK_FORMAT_A4R4G4B4_UNORM_PACK16, ISL_FORMAT_B4G4R4A4_UNORM),
fmt_unsupported(VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT), fmt_unsupported(VK_FORMAT_A4B4G4R4_UNORM_PACK16),
}; };
static const struct anv_format ycbcr_formats[] = { static const struct anv_format ycbcr_formats[] = {
@@ -544,14 +544,14 @@ anv_get_format_aspect(const struct intel_device_info *devinfo,
// Format capabilities // Format capabilities
VkFormatFeatureFlags2KHR VkFormatFeatureFlags2
anv_get_image_format_features2(const struct intel_device_info *devinfo, anv_get_image_format_features2(const struct intel_device_info *devinfo,
VkFormat vk_format, VkFormat vk_format,
const struct anv_format *anv_format, const struct anv_format *anv_format,
VkImageTiling vk_tiling, VkImageTiling vk_tiling,
const struct isl_drm_modifier_info *isl_mod_info) const struct isl_drm_modifier_info *isl_mod_info)
{ {
VkFormatFeatureFlags2KHR flags = 0; VkFormatFeatureFlags2 flags = 0;
if (anv_format == NULL) if (anv_format == NULL)
return 0; return 0;
@@ -566,21 +566,21 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
vk_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT) vk_tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
return 0; return 0;
flags |= VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR | flags |= VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT |
VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR | VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT |
VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR | VK_FORMAT_FEATURE_2_BLIT_SRC_BIT |
VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR | VK_FORMAT_FEATURE_2_BLIT_DST_BIT |
VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT |
VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR; VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT;
if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT) if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT)
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
if ((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) && devinfo->ver >= 9) if ((aspects & VK_IMAGE_ASPECT_DEPTH_BIT) && devinfo->ver >= 9)
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT;
if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT) if (aspects & VK_IMAGE_ASPECT_DEPTH_BIT)
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT;
return flags; return flags;
} }
@@ -607,16 +607,16 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
*/ */
if (vk_tiling != VK_IMAGE_TILING_OPTIMAL && if (vk_tiling != VK_IMAGE_TILING_OPTIMAL &&
isl_format_get_layout(plane_format.isl_format)->txc == ISL_TXC_ASTC) isl_format_get_layout(plane_format.isl_format)->txc == ISL_TXC_ASTC)
return VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR | return VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT |
VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR; VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT;
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT;
if (devinfo->ver >= 9) if (devinfo->ver >= 9)
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT;
if (isl_format_supports_filtering(devinfo, plane_format.isl_format)) if (isl_format_supports_filtering(devinfo, plane_format.isl_format))
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
} }
/* We can render to swizzled formats. However, if the alpha channel is /* We can render to swizzled formats. However, if the alpha channel is
@@ -625,7 +625,7 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
*/ */
if (isl_format_supports_rendering(devinfo, plane_format.isl_format) && if (isl_format_supports_rendering(devinfo, plane_format.isl_format) &&
plane_format.swizzle.a == ISL_CHANNEL_SELECT_ALPHA) { plane_format.swizzle.a == ISL_CHANNEL_SELECT_ALPHA) {
flags |= VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT;
/* While we can render to swizzled formats, they don't blend correctly /* While we can render to swizzled formats, they don't blend correctly
* if there are blend constants involved. The swizzle just remaps the * if there are blend constants involved. The swizzle just remaps the
@@ -635,40 +635,40 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
*/ */
if (isl_format_supports_alpha_blending(devinfo, plane_format.isl_format) && if (isl_format_supports_alpha_blending(devinfo, plane_format.isl_format) &&
isl_swizzle_is_identity(plane_format.swizzle)) isl_swizzle_is_identity(plane_format.swizzle))
flags |= VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT;
} }
/* Load/store is determined based on base format. This prevents RGB /* Load/store is determined based on base format. This prevents RGB
* formats from showing up as load/store capable. * formats from showing up as load/store capable.
*/ */
if (isl_format_supports_typed_reads(devinfo, base_isl_format)) if (isl_format_supports_typed_reads(devinfo, base_isl_format))
flags |= VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT;
if (isl_format_supports_typed_writes(devinfo, base_isl_format)) if (isl_format_supports_typed_writes(devinfo, base_isl_format))
flags |= VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT;
/* Keep this old behavior on VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR. /* Keep this old behavior on VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT.
* When KHR_format_features2 is enabled, applications should only rely on * When KHR_format_features2 is enabled, applications should only rely on
* it for the list of shader storage extended formats [1]. Before that, * it for the list of shader storage extended formats [1]. Before that,
* this applies to all VkFormats. * this applies to all VkFormats.
* *
* [1] : https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-shaderStorageImageExtendedFormats * [1] : https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#features-shaderStorageImageExtendedFormats
*/ */
if (flags & VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR) if (flags & VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT)
flags |= VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT;
if (base_isl_format == ISL_FORMAT_R32_SINT || if (base_isl_format == ISL_FORMAT_R32_SINT ||
base_isl_format == ISL_FORMAT_R32_UINT || base_isl_format == ISL_FORMAT_R32_UINT ||
base_isl_format == ISL_FORMAT_R32_FLOAT) base_isl_format == ISL_FORMAT_R32_FLOAT)
flags |= VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT;
if (flags) { if (flags) {
flags |= VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR | flags |= VK_FORMAT_FEATURE_2_BLIT_SRC_BIT |
VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR | VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT |
VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR; VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT;
/* Blit destination requires rendering support. */ /* Blit destination requires rendering support. */
if (isl_format_supports_rendering(devinfo, plane_format.isl_format)) if (isl_format_supports_rendering(devinfo, plane_format.isl_format))
flags |= VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_BLIT_DST_BIT;
} }
/* XXX: We handle 3-channel formats by switching them out for RGBX or /* XXX: We handle 3-channel formats by switching them out for RGBX or
@@ -682,8 +682,8 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
base_isl_format != ISL_FORMAT_UNSUPPORTED && base_isl_format != ISL_FORMAT_UNSUPPORTED &&
!util_is_power_of_two_or_zero(isl_format_layouts[base_isl_format].bpb) && !util_is_power_of_two_or_zero(isl_format_layouts[base_isl_format].bpb) &&
isl_format_rgb_to_rgbx(base_isl_format) == ISL_FORMAT_UNSUPPORTED) { isl_format_rgb_to_rgbx(base_isl_format) == ISL_FORMAT_UNSUPPORTED) {
flags &= ~VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT;
flags &= ~VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_BLIT_DST_BIT;
} }
if (anv_format->can_ycbcr) { if (anv_format->can_ycbcr) {
@@ -695,11 +695,11 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
* sampler. The failures show a slightly out of range values on the * sampler. The failures show a slightly out of range values on the
* bottom left of the sampled image. * bottom left of the sampled image.
*/ */
flags |= VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT;
} else { } else {
flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR | flags |= VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT |
VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR | VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT |
VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR; VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT;
} }
/* We can support cosited chroma locations when handle planes with our /* We can support cosited chroma locations when handle planes with our
@@ -708,20 +708,20 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
for (unsigned p = 0; p < anv_format->n_planes; p++) { for (unsigned p = 0; p < anv_format->n_planes; p++) {
if (anv_format->planes[p].denominator_scales[0] > 1 || if (anv_format->planes[p].denominator_scales[0] > 1 ||
anv_format->planes[p].denominator_scales[1] > 1) { anv_format->planes[p].denominator_scales[1] > 1) {
flags |= VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT;
break; break;
} }
} }
if (anv_format->n_planes > 1) if (anv_format->n_planes > 1)
flags |= VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_DISJOINT_BIT;
const VkFormatFeatureFlags2KHR disallowed_ycbcr_image_features = const VkFormatFeatureFlags2 disallowed_ycbcr_image_features =
VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR | VK_FORMAT_FEATURE_2_BLIT_SRC_BIT |
VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR | VK_FORMAT_FEATURE_2_BLIT_DST_BIT |
VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR | VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT |
VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR | VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT |
VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR; VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT;
flags &= ~disallowed_ycbcr_image_features; flags &= ~disallowed_ycbcr_image_features;
} }
@@ -772,7 +772,7 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
if (anv_format->n_planes > 1) { if (anv_format->n_planes > 1) {
/* For simplicity, keep DISJOINT disabled for multi-planar format. */ /* For simplicity, keep DISJOINT disabled for multi-planar format. */
flags &= ~VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_DISJOINT_BIT;
/* VK_ANDROID_external_memory_android_hardware_buffer in Virtio-GPU /* VK_ANDROID_external_memory_android_hardware_buffer in Virtio-GPU
* Venus driver layers on top of VK_EXT_image_drm_format_modifier of * Venus driver layers on top of VK_EXT_image_drm_format_modifier of
@@ -803,14 +803,14 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
* eglCreateImage, we require that the dma_buf for the primary surface * eglCreateImage, we require that the dma_buf for the primary surface
* and the dma_buf for its aux surface refer to the same bo. * and the dma_buf for its aux surface refer to the same bo.
*/ */
flags &= ~VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_DISJOINT_BIT;
/* When the hardware accesses a storage image, it bypasses the aux /* When the hardware accesses a storage image, it bypasses the aux
* surface. We could support storage access on images with aux * surface. We could support storage access on images with aux
* modifiers by resolving the aux surface prior to the storage access. * modifiers by resolving the aux surface prior to the storage access.
*/ */
flags &= ~VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT;
flags &= ~VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR; flags &= ~VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT;
} }
} }
@@ -822,12 +822,12 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
return flags; return flags;
} }
static VkFormatFeatureFlags2KHR static VkFormatFeatureFlags2
get_buffer_format_features2(const struct intel_device_info *devinfo, get_buffer_format_features2(const struct intel_device_info *devinfo,
VkFormat vk_format, VkFormat vk_format,
const struct anv_format *anv_format) const struct anv_format *anv_format)
{ {
VkFormatFeatureFlags2KHR flags = 0; VkFormatFeatureFlags2 flags = 0;
if (anv_format == NULL) if (anv_format == NULL)
return 0; return 0;
@@ -848,22 +848,22 @@ get_buffer_format_features2(const struct intel_device_info *devinfo,
if (isl_format_supports_sampling(devinfo, isl_format) && if (isl_format_supports_sampling(devinfo, isl_format) &&
!isl_format_is_compressed(isl_format)) !isl_format_is_compressed(isl_format))
flags |= VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT;
if (isl_format_supports_vertex_fetch(devinfo, isl_format)) if (isl_format_supports_vertex_fetch(devinfo, isl_format))
flags |= VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT;
if (isl_is_storage_image_format(isl_format)) if (isl_is_storage_image_format(isl_format))
flags |= VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT;
if (isl_format == ISL_FORMAT_R32_SINT || isl_format == ISL_FORMAT_R32_UINT) if (isl_format == ISL_FORMAT_R32_SINT || isl_format == ISL_FORMAT_R32_UINT)
flags |= VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR; flags |= VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT;
return flags; return flags;
} }
static VkFormatFeatureFlags static VkFormatFeatureFlags
features2_to_features(VkFormatFeatureFlags2KHR features2) features2_to_features(VkFormatFeatureFlags2 features2)
{ {
return features2 & VK_ALL_FORMAT_FEATURE_FLAG_BITS; return features2 & VK_ALL_FORMAT_FEATURE_FLAG_BITS;
} }
@@ -881,7 +881,7 @@ get_drm_format_modifier_properties_list(const struct anv_physical_device *physic
&list->drmFormatModifierCount); &list->drmFormatModifierCount);
isl_drm_modifier_info_for_each(isl_mod_info) { isl_drm_modifier_info_for_each(isl_mod_info) {
VkFormatFeatureFlags2KHR features2 = VkFormatFeatureFlags2 features2 =
anv_get_image_format_features2(devinfo, vk_format, anv_format, anv_get_image_format_features2(devinfo, vk_format, anv_format,
VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,
isl_mod_info); isl_mod_info);
@@ -916,7 +916,7 @@ get_drm_format_modifier_properties_list_2(const struct anv_physical_device *phys
&list->drmFormatModifierCount); &list->drmFormatModifierCount);
isl_drm_modifier_info_for_each(isl_mod_info) { isl_drm_modifier_info_for_each(isl_mod_info) {
VkFormatFeatureFlags2KHR features2 = VkFormatFeatureFlags2 features2 =
anv_get_image_format_features2(devinfo, vk_format, anv_format, anv_get_image_format_features2(devinfo, vk_format, anv_format,
VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT, VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,
isl_mod_info); isl_mod_info);
@@ -948,7 +948,7 @@ void anv_GetPhysicalDeviceFormatProperties2(
assert(pFormatProperties->sType == VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2); assert(pFormatProperties->sType == VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2);
VkFormatFeatureFlags2KHR linear2, optimal2, buffer2; VkFormatFeatureFlags2 linear2, optimal2, buffer2;
linear2 = anv_get_image_format_features2(devinfo, vk_format, anv_format, linear2 = anv_get_image_format_features2(devinfo, vk_format, anv_format,
VK_IMAGE_TILING_LINEAR, NULL); VK_IMAGE_TILING_LINEAR, NULL);
optimal2 = anv_get_image_format_features2(devinfo, vk_format, anv_format, optimal2 = anv_get_image_format_features2(devinfo, vk_format, anv_format,
@@ -974,8 +974,8 @@ void anv_GetPhysicalDeviceFormatProperties2(
(void *)ext); (void *)ext);
break; break;
case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR: { case VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3: {
VkFormatProperties3KHR *props = (VkFormatProperties3KHR *)ext; VkFormatProperties3 *props = (VkFormatProperties3 *)ext;
props->linearTilingFeatures = linear2; props->linearTilingFeatures = linear2;
props->optimalTilingFeatures = optimal2; props->optimalTilingFeatures = optimal2;
props->bufferFeatures = buffer2; props->bufferFeatures = buffer2;
@@ -995,7 +995,7 @@ anv_get_image_format_properties(
VkImageFormatProperties *pImageFormatProperties, VkImageFormatProperties *pImageFormatProperties,
VkSamplerYcbcrConversionImageFormatProperties *pYcbcrImageFormatProperties) VkSamplerYcbcrConversionImageFormatProperties *pYcbcrImageFormatProperties)
{ {
VkFormatFeatureFlags2KHR format_feature_flags; VkFormatFeatureFlags2 format_feature_flags;
VkExtent3D maxExtent; VkExtent3D maxExtent;
uint32_t maxMipLevels; uint32_t maxMipLevels;
uint32_t maxArraySize; uint32_t maxArraySize;
@@ -1031,7 +1031,7 @@ anv_get_image_format_properties(
for (uint32_t i = 0; i < format_list_info->viewFormatCount; ++i) { for (uint32_t i = 0; i < format_list_info->viewFormatCount; ++i) {
VkFormat vk_view_format = format_list_info->pViewFormats[i]; VkFormat vk_view_format = format_list_info->pViewFormats[i];
const struct anv_format *anv_view_format = anv_get_format(vk_view_format); const struct anv_format *anv_view_format = anv_get_format(vk_view_format);
VkFormatFeatureFlags2KHR view_format_features = VkFormatFeatureFlags2 view_format_features =
anv_get_image_format_features2(devinfo, vk_view_format, anv_get_image_format_features2(devinfo, vk_view_format,
anv_view_format, anv_view_format,
info->tiling, info->tiling,
@@ -1135,8 +1135,8 @@ anv_get_image_format_properties(
if (info->tiling == VK_IMAGE_TILING_OPTIMAL && if (info->tiling == VK_IMAGE_TILING_OPTIMAL &&
info->type == VK_IMAGE_TYPE_2D && info->type == VK_IMAGE_TYPE_2D &&
(format_feature_flags & (VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR | (format_feature_flags & (VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT |
VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR)) && VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT)) &&
!(info->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) && !(info->flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) &&
!(image_usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(image_usage & VK_IMAGE_USAGE_STORAGE_BIT) &&
isl_format_supports_multisampling(devinfo, format->planes[0].isl_format)) { isl_format_supports_multisampling(devinfo, format->planes[0].isl_format)) {
@@ -1144,21 +1144,21 @@ anv_get_image_format_properties(
} }
if (view_usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) { if (view_usage & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) {
if (!(format_feature_flags & (VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR | if (!(format_feature_flags & (VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT |
VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR))) { VK_FORMAT_FEATURE_2_BLIT_SRC_BIT))) {
goto unsupported; goto unsupported;
} }
} }
if (view_usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) { if (view_usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT) {
if (!(format_feature_flags & (VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR | if (!(format_feature_flags & (VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT |
VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR))) { VK_FORMAT_FEATURE_2_BLIT_DST_BIT))) {
goto unsupported; goto unsupported;
} }
} }
if (view_usage & VK_IMAGE_USAGE_SAMPLED_BIT) { if (view_usage & VK_IMAGE_USAGE_SAMPLED_BIT) {
if (!(format_feature_flags & VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR)) { if (!(format_feature_flags & VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT)) {
goto unsupported; goto unsupported;
} }
} }
@@ -1175,19 +1175,19 @@ anv_get_image_format_properties(
} }
if (view_usage & VK_IMAGE_USAGE_STORAGE_BIT) { if (view_usage & VK_IMAGE_USAGE_STORAGE_BIT) {
if (!(format_feature_flags & VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR)) { if (!(format_feature_flags & VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT)) {
goto unsupported; goto unsupported;
} }
} }
if (view_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) { if (view_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) {
if (!(format_feature_flags & VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR)) { if (!(format_feature_flags & VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT)) {
goto unsupported; goto unsupported;
} }
} }
if (view_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) { if (view_usage & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) {
if (!(format_feature_flags & VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR)) { if (!(format_feature_flags & VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT)) {
goto unsupported; goto unsupported;
} }
} }
@@ -1197,11 +1197,11 @@ anv_get_image_format_properties(
* *
* If format is a multi-planar format, and if imageCreateFormatFeatures * If format is a multi-planar format, and if imageCreateFormatFeatures
* (as defined in Image Creation Limits) does not contain * (as defined in Image Creation Limits) does not contain
* VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR, then flags must not contain * VK_FORMAT_FEATURE_2_DISJOINT_BIT, then flags must not contain
* VK_IMAGE_CREATE_DISJOINT_BIT. * VK_IMAGE_CREATE_DISJOINT_BIT.
*/ */
if (format->n_planes > 1 && if (format->n_planes > 1 &&
!(format_feature_flags & VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR)) { !(format_feature_flags & VK_FORMAT_FEATURE_2_DISJOINT_BIT)) {
goto unsupported; goto unsupported;
} }
@@ -1393,10 +1393,10 @@ VkResult anv_GetPhysicalDeviceImageFormatProperties2(
external_info = (const void *) s; external_info = (const void *) s;
break; break;
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT:
case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR: case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
/* anv_get_image_format_properties will handle these */ /* anv_get_image_format_properties will handle these */
break; break;
case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
/* Ignore but don't warn */ /* Ignore but don't warn */
break; break;
default: default:
@@ -1481,12 +1481,12 @@ VkResult anv_GetPhysicalDeviceImageFormatProperties2(
* method exists, then we reject image creation here. * method exists, then we reject image creation here.
* *
* If the memory handle requires matching * If the memory handle requires matching
* VkPhysicalDeviceIDPropertiesKHR::driverUUID and ::deviceUUID, then the * VkPhysicalDeviceIDProperties::driverUUID and ::deviceUUID, then the
* match-requirement guarantees that all users of the image agree on the * match-requirement guarantees that all users of the image agree on the
* image's memory layout. * image's memory layout.
* *
* If the memory handle does not require matching * If the memory handle does not require matching
* VkPhysicalDeviceIDPropertiesKHR::driverUUID nor ::deviceUUID, then we * VkPhysicalDeviceIDProperties::driverUUID nor ::deviceUUID, then we
* require that the app and driver be able to explicitly communicate to * require that the app and driver be able to explicitly communicate to
* each other the image's memory layout. * each other the image's memory layout.
* *

View File

@@ -54,7 +54,7 @@ memory_range_end(struct anv_image_memory_range memory_range)
/** /**
* Get binding for VkImagePlaneMemoryRequirementsInfo, * Get binding for VkImagePlaneMemoryRequirementsInfo,
* VkBindImagePlaneMemoryInfo and VkDeviceImageMemoryRequirementsKHR. * VkBindImagePlaneMemoryInfo and VkDeviceImageMemoryRequirements.
*/ */
static struct anv_image_binding * static struct anv_image_binding *
image_aspect_to_binding(struct anv_image *image, VkImageAspectFlags aspect) image_aspect_to_binding(struct anv_image *image, VkImageAspectFlags aspect)
@@ -329,7 +329,7 @@ add_surface(struct anv_device *device,
* parameter @a inout_primary_tiling_flags. * parameter @a inout_primary_tiling_flags.
* *
* If the image plane is a separate stencil plane and if the user provided * If the image plane is a separate stencil plane and if the user provided
* VkImageStencilUsageCreateInfoEXT, then @a usage must be stencilUsage. * VkImageStencilUsageCreateInfo, then @a usage must be stencilUsage.
* *
* @see anv_image::planes[]::shadow_surface * @see anv_image::planes[]::shadow_surface
*/ */
@@ -373,7 +373,7 @@ static bool
can_fast_clear_with_non_zero_color(const struct intel_device_info *devinfo, can_fast_clear_with_non_zero_color(const struct intel_device_info *devinfo,
const struct anv_image *image, const struct anv_image *image,
uint32_t plane, uint32_t plane,
const VkImageFormatListCreateInfoKHR *fmt_list) const VkImageFormatListCreateInfo *fmt_list)
{ {
/* If we don't have an AUX surface where fast clears apply, we can return /* If we don't have an AUX surface where fast clears apply, we can return
* early. * early.
@@ -453,7 +453,7 @@ storage_image_format_supports_atomic(const struct intel_device_info *devinfo,
VkImageCreateFlags create_flags, VkImageCreateFlags create_flags,
enum isl_format format, enum isl_format format,
VkImageTiling vk_tiling, VkImageTiling vk_tiling,
const VkImageFormatListCreateInfoKHR *fmt_list) const VkImageFormatListCreateInfo *fmt_list)
{ {
if (isl_format_supports_typed_atomics(devinfo, format)) if (isl_format_supports_typed_atomics(devinfo, format))
return true; return true;
@@ -502,7 +502,7 @@ formats_ccs_e_compatible(const struct intel_device_info *devinfo,
VkImageCreateFlags create_flags, VkImageCreateFlags create_flags,
enum isl_format format, VkImageTiling vk_tiling, enum isl_format format, VkImageTiling vk_tiling,
VkImageUsageFlags vk_usage, VkImageUsageFlags vk_usage,
const VkImageFormatListCreateInfoKHR *fmt_list) const VkImageFormatListCreateInfo *fmt_list)
{ {
if (!isl_format_supports_ccs_e(devinfo, format)) if (!isl_format_supports_ccs_e(devinfo, format))
return false; return false;
@@ -535,7 +535,7 @@ anv_formats_ccs_e_compatible(const struct intel_device_info *devinfo,
VkImageCreateFlags create_flags, VkImageCreateFlags create_flags,
VkFormat vk_format, VkImageTiling vk_tiling, VkFormat vk_format, VkImageTiling vk_tiling,
VkImageUsageFlags vk_usage, VkImageUsageFlags vk_usage,
const VkImageFormatListCreateInfoKHR *fmt_list) const VkImageFormatListCreateInfo *fmt_list)
{ {
enum isl_format format = enum isl_format format =
anv_get_isl_format_with_usage(devinfo, vk_format, anv_get_isl_format_with_usage(devinfo, vk_format,
@@ -689,7 +689,7 @@ add_aux_surface_if_supported(struct anv_device *device,
struct anv_image *image, struct anv_image *image,
uint32_t plane, uint32_t plane,
struct anv_format_plane plane_format, struct anv_format_plane plane_format,
const VkImageFormatListCreateInfoKHR *fmt_list, const VkImageFormatListCreateInfo *fmt_list,
uint64_t offset, uint64_t offset,
uint32_t stride, uint32_t stride,
isl_surf_usage_flags_t isl_extra_usage_flags) isl_surf_usage_flags_t isl_extra_usage_flags)
@@ -1472,9 +1472,9 @@ anv_image_init(struct anv_device *device, struct anv_image *image,
choose_isl_tiling_flags(&device->info, create_info, isl_mod_info, choose_isl_tiling_flags(&device->info, create_info, isl_mod_info,
image->vk.wsi_legacy_scanout); image->vk.wsi_legacy_scanout);
const VkImageFormatListCreateInfoKHR *fmt_list = const VkImageFormatListCreateInfo *fmt_list =
vk_find_struct_const(pCreateInfo->pNext, vk_find_struct_const(pCreateInfo->pNext,
IMAGE_FORMAT_LIST_CREATE_INFO_KHR); IMAGE_FORMAT_LIST_CREATE_INFO);
if (mod_explicit_info) { if (mod_explicit_info) {
r = add_all_surfaces_explicit_layout(device, image, fmt_list, r = add_all_surfaces_explicit_layout(device, image, fmt_list,
@@ -1789,7 +1789,7 @@ void anv_GetImageMemoryRequirements2(
void anv_GetDeviceImageMemoryRequirementsKHR( void anv_GetDeviceImageMemoryRequirementsKHR(
VkDevice _device, VkDevice _device,
const VkDeviceImageMemoryRequirementsKHR* pInfo, const VkDeviceImageMemoryRequirements* pInfo,
VkMemoryRequirements2* pMemoryRequirements) VkMemoryRequirements2* pMemoryRequirements)
{ {
ANV_FROM_HANDLE(anv_device, device, _device); ANV_FROM_HANDLE(anv_device, device, _device);
@@ -1826,7 +1826,7 @@ void anv_GetImageSparseMemoryRequirements2(
void anv_GetDeviceImageSparseMemoryRequirementsKHR( void anv_GetDeviceImageSparseMemoryRequirementsKHR(
VkDevice device, VkDevice device,
const VkDeviceImageMemoryRequirementsKHR* pInfo, const VkDeviceImageMemoryRequirements* pInfo,
uint32_t* pSparseMemoryRequirementCount, uint32_t* pSparseMemoryRequirementCount,
VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
{ {

View File

@@ -74,7 +74,7 @@ addr_format_for_desc_type(VkDescriptorType desc_type,
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
return state->ubo_addr_format; return state->ubo_addr_format;
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK:
return state->desc_addr_format; return state->desc_addr_format;
default: default:
@@ -348,7 +348,7 @@ build_res_index(nir_builder *b, uint32_t set, uint32_t binding,
case nir_address_format_32bit_index_offset: { case nir_address_format_32bit_index_offset: {
assert(state->desc_addr_format == nir_address_format_32bit_index_offset); assert(state->desc_addr_format == nir_address_format_32bit_index_offset);
if (bind_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { if (bind_layout->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
uint32_t surface_index = state->set[set].desc_offset; uint32_t surface_index = state->set[set].desc_offset;
return nir_imm_ivec2(b, surface_index, return nir_imm_ivec2(b, surface_index,
bind_layout->descriptor_offset); bind_layout->descriptor_offset);
@@ -443,7 +443,7 @@ build_desc_addr(nir_builder *b,
struct res_index_defs res = unpack_res_index(b, index); struct res_index_defs res = unpack_res_index(b, index);
nir_ssa_def *desc_offset = res.desc_offset_base; nir_ssa_def *desc_offset = res.desc_offset_base;
if (desc_type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { if (desc_type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
/* Compute the actual descriptor offset. For inline uniform blocks, /* Compute the actual descriptor offset. For inline uniform blocks,
* the array index is ignored as they are only allowed to be a single * the array index is ignored as they are only allowed to be a single
* descriptor (not an array) and there is no concept of a "stride". * descriptor (not an array) and there is no concept of a "stride".
@@ -472,7 +472,7 @@ build_desc_addr(nir_builder *b,
} }
case nir_address_format_32bit_index_offset: case nir_address_format_32bit_index_offset:
assert(desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT); assert(desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK);
assert(state->desc_addr_format == nir_address_format_32bit_index_offset); assert(state->desc_addr_format == nir_address_format_32bit_index_offset);
return index; return index;
@@ -495,7 +495,7 @@ build_buffer_addr_for_res_index(nir_builder *b,
nir_address_format addr_format, nir_address_format addr_format,
struct apply_pipeline_layout_state *state) struct apply_pipeline_layout_state *state)
{ {
if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) { if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK) {
assert(addr_format == state->desc_addr_format); assert(addr_format == state->desc_addr_format);
return build_desc_addr(b, NULL, desc_type, res_index, addr_format, state); return build_desc_addr(b, NULL, desc_type, res_index, addr_format, state);
} else if (addr_format == nir_address_format_32bit_index_offset) { } else if (addr_format == nir_address_format_32bit_index_offset) {

View File

@@ -371,7 +371,7 @@ VkResult anv_EnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
vk_outarray_append_typed(VkPerformanceCounterKHR, &out, counter) { vk_outarray_append_typed(VkPerformanceCounterKHR, &out, counter) {
counter->unit = intel_perf_counter_unit_to_vk_unit[intel_counter->units]; counter->unit = intel_perf_counter_unit_to_vk_unit[intel_counter->units];
counter->scope = VK_QUERY_SCOPE_COMMAND_KHR; counter->scope = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR;
counter->storage = intel_perf_counter_data_type_to_vk_storage[intel_counter->data_type]; counter->storage = intel_perf_counter_data_type_to_vk_storage[intel_counter->data_type];
unsigned char sha1_result[20]; unsigned char sha1_result[20];

View File

@@ -551,7 +551,7 @@ struct anv_pipeline_stage {
struct brw_compile_stats stats[3]; struct brw_compile_stats stats[3];
char *disasm[3]; char *disasm[3];
VkPipelineCreationFeedbackEXT feedback; VkPipelineCreationFeedback feedback;
const unsigned *code; const unsigned *code;
@@ -1336,7 +1336,7 @@ anv_pipeline_compile_graphics(struct anv_graphics_pipeline *pipeline,
const VkGraphicsPipelineCreateInfo *info, const VkGraphicsPipelineCreateInfo *info,
const VkPipelineRenderingCreateInfo *rendering_info) const VkPipelineRenderingCreateInfo *rendering_info)
{ {
VkPipelineCreationFeedbackEXT pipeline_feedback = { VkPipelineCreationFeedback pipeline_feedback = {
.flags = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, .flags = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
}; };
int64_t pipeline_start = os_time_get_nano(); int64_t pipeline_start = os_time_get_nano();
@@ -1788,7 +1788,7 @@ anv_pipeline_compile_cs(struct anv_compute_pipeline *pipeline,
const VkPipelineShaderStageCreateInfo *sinfo = &info->stage; const VkPipelineShaderStageCreateInfo *sinfo = &info->stage;
assert(sinfo->stage == VK_SHADER_STAGE_COMPUTE_BIT); assert(sinfo->stage == VK_SHADER_STAGE_COMPUTE_BIT);
VkPipelineCreationFeedbackEXT pipeline_feedback = { VkPipelineCreationFeedback pipeline_feedback = {
.flags = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, .flags = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
}; };
int64_t pipeline_start = os_time_get_nano(); int64_t pipeline_start = os_time_get_nano();

View File

@@ -2213,7 +2213,7 @@ enum anv_cmd_dirty_bits {
ANV_CMD_DIRTY_RENDER_TARGETS = 1 << 11, ANV_CMD_DIRTY_RENDER_TARGETS = 1 << 11,
ANV_CMD_DIRTY_XFB_ENABLE = 1 << 12, ANV_CMD_DIRTY_XFB_ENABLE = 1 << 12,
ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE = 1 << 13, /* VK_DYNAMIC_STATE_LINE_STIPPLE_EXT */ ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE = 1 << 13, /* VK_DYNAMIC_STATE_LINE_STIPPLE_EXT */
ANV_CMD_DIRTY_DYNAMIC_CULL_MODE = 1 << 14, /* VK_DYNAMIC_STATE_CULL_MODE_EXT */ ANV_CMD_DIRTY_DYNAMIC_CULL_MODE = 1 << 14, /* VK_DYNAMIC_STATE_CULL_MODE */
ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE = 1 << 15, /* VK_DYNAMIC_STATE_FRONT_FACE */ ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE = 1 << 15, /* VK_DYNAMIC_STATE_FRONT_FACE */
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY = 1 << 16, /* VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY */ ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY = 1 << 16, /* VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY */
ANV_CMD_DIRTY_DYNAMIC_VERTEX_INPUT_BINDING_STRIDE = 1 << 17, /* VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE */ ANV_CMD_DIRTY_DYNAMIC_VERTEX_INPUT_BINDING_STRIDE = 1 << 17, /* VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE */

View File

@@ -3781,7 +3781,7 @@ genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer)
if (buffer) { if (buffer) {
uint32_t stride = dynamic_stride ? uint32_t stride = dynamic_stride ?
cmd_buffer->state.vertex_bindings[vb].stride : pipeline->vb[vb].stride; cmd_buffer->state.vertex_bindings[vb].stride : pipeline->vb[vb].stride;
/* From the Vulkan spec (vkCmdBindVertexBuffers2EXT): /* From the Vulkan spec (vkCmdBindVertexBuffers2):
* *
* "If pname:pSizes is not NULL then pname:pSizes[i] specifies * "If pname:pSizes is not NULL then pname:pSizes[i] specifies
* the bound size of the vertex buffer starting from the corresponding * the bound size of the vertex buffer starting from the corresponding

View File

@@ -2708,7 +2708,7 @@ genX(graphics_pipeline_create)(
pAllocator); pAllocator);
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
vk_free2(&device->vk.alloc, pAllocator, pipeline); vk_free2(&device->vk.alloc, pAllocator, pipeline);
if (result == VK_PIPELINE_COMPILE_REQUIRED_EXT) if (result == VK_PIPELINE_COMPILE_REQUIRED)
*pPipeline = VK_NULL_HANDLE; *pPipeline = VK_NULL_HANDLE;
return result; return result;
} }
@@ -2990,7 +2990,7 @@ compute_pipeline_create(
if (result != VK_SUCCESS) { if (result != VK_SUCCESS) {
anv_pipeline_finish(&pipeline->base, device, pAllocator); anv_pipeline_finish(&pipeline->base, device, pAllocator);
vk_free2(&device->vk.alloc, pAllocator, pipeline); vk_free2(&device->vk.alloc, pAllocator, pipeline);
if (result == VK_PIPELINE_COMPILE_REQUIRED_EXT) if (result == VK_PIPELINE_COMPILE_REQUIRED)
*pPipeline = VK_NULL_HANDLE; *pPipeline = VK_NULL_HANDLE;
return result; return result;
} }
@@ -3028,10 +3028,10 @@ VkResult genX(CreateGraphicsPipelines)(
* is not obvious what error should be report upon 2 different failures. * is not obvious what error should be report upon 2 different failures.
* */ * */
result = res; result = res;
if (res != VK_PIPELINE_COMPILE_REQUIRED_EXT) if (res != VK_PIPELINE_COMPILE_REQUIRED)
break; break;
if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT) if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT)
break; break;
} }
@@ -3066,10 +3066,10 @@ VkResult genX(CreateComputePipelines)(
* is not obvious what error should be report upon 2 different failures. * is not obvious what error should be report upon 2 different failures.
* */ * */
result = res; result = res;
if (res != VK_PIPELINE_COMPILE_REQUIRED_EXT) if (res != VK_PIPELINE_COMPILE_REQUIRED)
break; break;
if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT) if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT)
break; break;
} }
@@ -3245,10 +3245,10 @@ genX(CreateRayTracingPipelinesKHR)(
/* Bail out on the first error as it is not obvious what error should be /* Bail out on the first error as it is not obvious what error should be
* report upon 2 different failures. */ * report upon 2 different failures. */
result = res; result = res;
if (result != VK_PIPELINE_COMPILE_REQUIRED_EXT) if (result != VK_PIPELINE_COMPILE_REQUIRED)
break; break;
if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT) if (pCreateInfos[i].flags & VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT)
break; break;
} }

View File

@@ -1293,7 +1293,7 @@ void genX(CmdWriteTimestamp2)(
struct mi_builder b; struct mi_builder b;
mi_builder_init(&b, &cmd_buffer->device->info, &cmd_buffer->batch); mi_builder_init(&b, &cmd_buffer->device->info, &cmd_buffer->batch);
if (stage == VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR) { if (stage == VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT) {
mi_store(&b, mi_mem64(anv_address_add(query_addr, 8)), mi_store(&b, mi_mem64(anv_address_add(query_addr, 8)),
mi_reg64(TIMESTAMP)); mi_reg64(TIMESTAMP));
emit_query_mi_availability(&b, query_addr, true); emit_query_mi_availability(&b, query_addr, true);

View File

@@ -869,9 +869,9 @@ static const uint32_t vk_to_intel_shadow_compare_op[] = {
#if GFX_VER >= 9 #if GFX_VER >= 9
static const uint32_t vk_to_intel_sampler_reduction_mode[] = { static const uint32_t vk_to_intel_sampler_reduction_mode[] = {
[VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT] = STD_FILTER, [VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE] = STD_FILTER,
[VK_SAMPLER_REDUCTION_MODE_MIN_EXT] = MINIMUM, [VK_SAMPLER_REDUCTION_MODE_MIN] = MINIMUM,
[VK_SAMPLER_REDUCTION_MODE_MAX_EXT] = MAXIMUM, [VK_SAMPLER_REDUCTION_MODE_MAX] = MAXIMUM,
}; };
#endif #endif