anv: prepare image/buffer views for non indirect descriptors

When in direct descriptor mode, the descriptor pool buffers will hold
surface states directly. We won't allocate surface states in image &
buffer views.

Instead views will hold a packed RENDER_SURFACE_STATE ready to copied
into the descriptor buffers.

Signed-off-by: Lionel Landwerlin <lionel.g.landwerlin@intel.com>
Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/21645>
This commit is contained in:
Lionel Landwerlin
2023-02-23 15:19:11 +02:00
committed by Marge Bot
parent 1c45cd217e
commit 64f20cec28
5 changed files with 187 additions and 95 deletions

View File

@@ -1244,7 +1244,7 @@ anv_descriptor_set_create(struct anv_device *device,
if (!pool->host_only) { if (!pool->host_only) {
set->desc_surface_state = anv_descriptor_pool_alloc_state(pool); set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
anv_fill_buffer_surface_state(device, set->desc_surface_state, anv_fill_buffer_surface_state(device, set->desc_surface_state.map,
format, ISL_SWIZZLE_IDENTITY, format, ISL_SWIZZLE_IDENTITY,
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT, ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
set->desc_addr, set->desc_addr,
@@ -1296,24 +1296,27 @@ anv_descriptor_set_create(struct anv_device *device,
} }
} }
/* Allocate surface states for real descriptor sets. For host only sets, we /* Allocate surface states for real descriptor sets if we're using indirect
* just store the surface state data in malloc memory. * descriptors. For host only sets, we just store the surface state data in
* malloc memory.
*/ */
if (!pool->host_only) { if (device->physical->indirect_descriptors) {
for (uint32_t b = 0; b < set->buffer_view_count; b++) { if (!pool->host_only) {
set->buffer_views[b].surface_state = for (uint32_t b = 0; b < set->buffer_view_count; b++) {
anv_descriptor_pool_alloc_state(pool); set->buffer_views[b].general.state =
} anv_descriptor_pool_alloc_state(pool);
} else { }
void *host_surface_states = } else {
set->buffer_views + set->buffer_view_count; void *host_surface_states =
memset(host_surface_states, 0, set->buffer_views + set->buffer_view_count;
set->buffer_view_count * ANV_SURFACE_STATE_SIZE); memset(host_surface_states, 0,
for (uint32_t b = 0; b < set->buffer_view_count; b++) { set->buffer_view_count * ANV_SURFACE_STATE_SIZE);
set->buffer_views[b].surface_state = (struct anv_state) { for (uint32_t b = 0; b < set->buffer_view_count; b++) {
.alloc_size = ANV_SURFACE_STATE_SIZE, set->buffer_views[b].general.state = (struct anv_state) {
.map = host_surface_states + b * ANV_SURFACE_STATE_SIZE, .alloc_size = ANV_SURFACE_STATE_SIZE,
}; .map = host_surface_states + b * ANV_SURFACE_STATE_SIZE,
};
}
} }
} }
@@ -1339,10 +1342,14 @@ anv_descriptor_set_destroy(struct anv_device *device,
anv_descriptor_pool_free_state(pool, set->desc_surface_state); anv_descriptor_pool_free_state(pool, set->desc_surface_state);
} }
if (!pool->host_only) { if (device->physical->indirect_descriptors) {
for (uint32_t b = 0; b < set->buffer_view_count; b++) { if (!pool->host_only) {
if (set->buffer_views[b].surface_state.alloc_size) for (uint32_t b = 0; b < set->buffer_view_count; b++) {
anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state); if (set->buffer_views[b].general.state.alloc_size) {
anv_descriptor_pool_free_state(
pool, set->buffer_views[b].general.state);
}
}
} }
} }
@@ -1517,8 +1524,8 @@ anv_descriptor_set_write_image_view(struct anv_device *device,
for (unsigned p = 0; p < image_view->n_planes; p++) { for (unsigned p = 0; p < image_view->n_planes; p++) {
struct anv_surface_state sstate = struct anv_surface_state sstate =
(desc->layout == VK_IMAGE_LAYOUT_GENERAL) ? (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
image_view->planes[p].general_sampler_surface_state : image_view->planes[p].general_sampler :
image_view->planes[p].optimal_sampler_surface_state; image_view->planes[p].optimal_sampler;
desc_data[p].image = desc_data[p].image =
anv_surface_state_to_handle(device->physical, sstate.state); anv_surface_state_to_handle(device->physical, sstate.state);
} }
@@ -1545,7 +1552,7 @@ anv_descriptor_set_write_image_view(struct anv_device *device,
struct anv_storage_image_descriptor desc_data = { struct anv_storage_image_descriptor desc_data = {
.vanilla = anv_surface_state_to_handle( .vanilla = anv_surface_state_to_handle(
device->physical, device->physical,
image_view->planes[0].storage_surface_state.state), image_view->planes[0].storage.state),
}; };
memcpy(desc_map, &desc_data, sizeof(desc_data)); memcpy(desc_map, &desc_data, sizeof(desc_data));
} }
@@ -1588,8 +1595,7 @@ anv_descriptor_set_write_buffer_view(struct anv_device *device,
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) { if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
struct anv_sampled_image_descriptor desc_data = { struct anv_sampled_image_descriptor desc_data = {
.image = anv_surface_state_to_handle( .image = anv_surface_state_to_handle(
device->physical, device->physical, buffer_view->general.state),
buffer_view->surface_state),
}; };
memcpy(desc_map, &desc_data, sizeof(desc_data)); memcpy(desc_map, &desc_data, sizeof(desc_data));
} }
@@ -1597,8 +1603,7 @@ anv_descriptor_set_write_buffer_view(struct anv_device *device,
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) { if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
struct anv_storage_image_descriptor desc_data = { struct anv_storage_image_descriptor desc_data = {
.vanilla = anv_surface_state_to_handle( .vanilla = anv_surface_state_to_handle(
device->physical, device->physical, buffer_view->storage.state),
buffer_view->storage_surface_state),
}; };
memcpy(desc_map, &desc_data, sizeof(desc_data)); memcpy(desc_map, &desc_data, sizeof(desc_data));
} }
@@ -1609,11 +1614,11 @@ anv_descriptor_write_surface_state(struct anv_device *device,
struct anv_descriptor *desc, struct anv_descriptor *desc,
struct anv_state surface_state) struct anv_state surface_state)
{ {
assert(surface_state.alloc_size);
struct anv_buffer_view *bview = desc->buffer_view; struct anv_buffer_view *bview = desc->buffer_view;
bview->surface_state = surface_state; bview->general.state = surface_state;
assert(bview->surface_state.alloc_size);
isl_surf_usage_flags_t usage = isl_surf_usage_flags_t usage =
(desc->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || (desc->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
@@ -1623,7 +1628,7 @@ anv_descriptor_write_surface_state(struct anv_device *device,
enum isl_format format = enum isl_format format =
anv_isl_format_for_descriptor_type(device, desc->type); anv_isl_format_for_descriptor_type(device, desc->type);
anv_fill_buffer_surface_state(device, bview->surface_state, anv_fill_buffer_surface_state(device, bview->general.state.map,
format, ISL_SWIZZLE_IDENTITY, format, ISL_SWIZZLE_IDENTITY,
usage, bview->address, bview->range, 1); usage, bview->address, bview->range, 1);
} }
@@ -1699,7 +1704,7 @@ anv_descriptor_set_write_buffer(struct anv_device *device,
if (set->is_push) if (set->is_push)
set->generate_surface_states |= BITFIELD_BIT(descriptor_index); set->generate_surface_states |= BITFIELD_BIT(descriptor_index);
else else
anv_descriptor_write_surface_state(device, desc, bview->surface_state); anv_descriptor_write_surface_state(device, desc, bview->general.state);
} }
void void
@@ -1908,8 +1913,8 @@ void anv_UpdateDescriptorSets(
dst_bview->range = src_bview->range; dst_bview->range = src_bview->range;
dst_bview->address = src_bview->address; dst_bview->address = src_bview->address;
memcpy(dst_bview->surface_state.map, memcpy(dst_bview->general.state.map,
src_bview->surface_state.map, src_bview->general.state.map,
ANV_SURFACE_STATE_SIZE); ANV_SURFACE_STATE_SIZE);
} }
} }

View File

@@ -4442,14 +4442,15 @@ uint64_t anv_GetDeviceMemoryOpaqueCaptureAddress(
} }
void void
anv_fill_buffer_surface_state(struct anv_device *device, struct anv_state state, anv_fill_buffer_surface_state(struct anv_device *device,
void *surface_state_ptr,
enum isl_format format, enum isl_format format,
struct isl_swizzle swizzle, struct isl_swizzle swizzle,
isl_surf_usage_flags_t usage, isl_surf_usage_flags_t usage,
struct anv_address address, struct anv_address address,
uint32_t range, uint32_t stride) uint32_t range, uint32_t stride)
{ {
isl_buffer_fill_state(&device->isl_dev, state.map, isl_buffer_fill_state(&device->isl_dev, surface_state_ptr,
.address = anv_address_physical(address), .address = anv_address_physical(address),
.mocs = isl_mocs(&device->isl_dev, usage, .mocs = isl_mocs(&device->isl_dev, usage,
address.bo && address.bo->is_external), address.bo && address.bo->is_external),

View File

@@ -2433,9 +2433,13 @@ anv_layout_has_untracked_aux_writes(const struct intel_device_info * const devin
} }
static struct anv_state static struct anv_state
alloc_bindless_surface_state(struct anv_device *device) maybe_alloc_surface_state(struct anv_device *device)
{ {
return anv_state_pool_alloc(&device->bindless_surface_state_pool, 64, 64); if (device->physical->indirect_descriptors) {
return anv_state_pool_alloc(&device->bindless_surface_state_pool, 64, 64);
} else {
return ANV_STATE_NULL;
}
} }
static enum isl_channel_select static enum isl_channel_select
@@ -2454,6 +2458,23 @@ remap_swizzle(VkComponentSwizzle swizzle,
} }
} }
static void *
anv_surface_get_surface_state_ptr(struct anv_device *device,
struct anv_surface_state *state)
{
/* Check whether a surface state was allocated and use it. In the indirect
* descriptor case, we always have a surface state. In the direct
* descriptor case, only attachments have surface states (see
* anv_cmd_buffer_init_attachments())
*/
if (state->state.map) {
return state->state.map;
} else {
assert(!device->physical->indirect_descriptors);
return state->state_data.data;
}
}
void void
anv_image_fill_surface_state(struct anv_device *device, anv_image_fill_surface_state(struct anv_device *device,
const struct anv_image *image, const struct anv_image *image,
@@ -2488,6 +2509,8 @@ anv_image_fill_surface_state(struct anv_device *device,
const struct anv_address address = const struct anv_address address =
anv_image_address(image, &surface->memory_range); anv_image_address(image, &surface->memory_range);
void *surface_state_map = state_inout->state_data.data;
const struct isl_surf *isl_surf = &surface->isl; const struct isl_surf *isl_surf = &surface->isl;
struct isl_surf tmp_surf; struct isl_surf tmp_surf;
@@ -2523,7 +2546,7 @@ anv_image_fill_surface_state(struct anv_device *device,
} }
state_inout->clear_address = clear_address; state_inout->clear_address = clear_address;
isl_surf_fill_state(&device->isl_dev, state_inout->state.map, isl_surf_fill_state(&device->isl_dev, surface_state_map,
.surf = isl_surf, .surf = isl_surf,
.view = &view, .view = &view,
.address = anv_address_physical(state_inout->address), .address = anv_address_physical(state_inout->address),
@@ -2546,18 +2569,21 @@ anv_image_fill_surface_state(struct anv_device *device,
* the surface buffer addresses are always 4K page aligned. * the surface buffer addresses are always 4K page aligned.
*/ */
if (!anv_address_is_null(aux_address)) { if (!anv_address_is_null(aux_address)) {
uint32_t *aux_addr_dw = state_inout->state.map + uint32_t *aux_addr_dw = surface_state_map +
device->isl_dev.ss.aux_addr_offset; device->isl_dev.ss.aux_addr_offset;
assert((aux_address.offset & 0xfff) == 0); assert((aux_address.offset & 0xfff) == 0);
state_inout->aux_address.offset |= *aux_addr_dw & 0xfff; state_inout->aux_address.offset |= *aux_addr_dw & 0xfff;
} }
if (device->info->ver >= 10 && clear_address.bo) { if (device->info->ver >= 10 && clear_address.bo) {
uint32_t *clear_addr_dw = state_inout->state.map + uint32_t *clear_addr_dw = surface_state_map +
device->isl_dev.ss.clear_color_state_offset; device->isl_dev.ss.clear_color_state_offset;
assert((clear_address.offset & 0x3f) == 0); assert((clear_address.offset & 0x3f) == 0);
state_inout->clear_address.offset |= *clear_addr_dw & 0x3f; state_inout->clear_address.offset |= *clear_addr_dw & 0x3f;
} }
if (state_inout->state.map)
memcpy(state_inout->state.map, surface_state_map, ANV_SURFACE_STATE_SIZE);
} }
static uint32_t static uint32_t
@@ -2769,10 +2795,10 @@ anv_CreateImageView(VkDevice _device,
if (iview->vk.usage & (VK_IMAGE_USAGE_SAMPLED_BIT | if (iview->vk.usage & (VK_IMAGE_USAGE_SAMPLED_BIT |
VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) { VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) {
iview->planes[vplane].optimal_sampler_surface_state.state = iview->planes[vplane].optimal_sampler.state =
alloc_bindless_surface_state(device); maybe_alloc_surface_state(device);
iview->planes[vplane].general_sampler_surface_state.state = iview->planes[vplane].general_sampler.state =
alloc_bindless_surface_state(device); maybe_alloc_surface_state(device);
enum isl_aux_usage general_aux_usage = enum isl_aux_usage general_aux_usage =
anv_layout_to_aux_usage(device->info, image, 1UL << iaspect_bit, anv_layout_to_aux_usage(device->info, image, 1UL << iaspect_bit,
@@ -2788,14 +2814,14 @@ anv_CreateImageView(VkDevice _device,
ISL_SURF_USAGE_TEXTURE_BIT, ISL_SURF_USAGE_TEXTURE_BIT,
optimal_aux_usage, NULL, optimal_aux_usage, NULL,
ANV_IMAGE_VIEW_STATE_TEXTURE_OPTIMAL, ANV_IMAGE_VIEW_STATE_TEXTURE_OPTIMAL,
&iview->planes[vplane].optimal_sampler_surface_state); &iview->planes[vplane].optimal_sampler);
anv_image_fill_surface_state(device, image, 1ULL << iaspect_bit, anv_image_fill_surface_state(device, image, 1ULL << iaspect_bit,
&iview->planes[vplane].isl, &iview->planes[vplane].isl,
ISL_SURF_USAGE_TEXTURE_BIT, ISL_SURF_USAGE_TEXTURE_BIT,
general_aux_usage, NULL, general_aux_usage, NULL,
0, 0,
&iview->planes[vplane].general_sampler_surface_state); &iview->planes[vplane].general_sampler);
} }
/* NOTE: This one needs to go last since it may stomp isl_view.format */ /* NOTE: This one needs to go last since it may stomp isl_view.format */
@@ -2810,15 +2836,15 @@ anv_CreateImageView(VkDevice _device,
anv_layout_to_aux_usage(device->info, image, 1UL << iaspect_bit, anv_layout_to_aux_usage(device->info, image, 1UL << iaspect_bit,
VK_IMAGE_USAGE_STORAGE_BIT, VK_IMAGE_USAGE_STORAGE_BIT,
VK_IMAGE_LAYOUT_GENERAL); VK_IMAGE_LAYOUT_GENERAL);
iview->planes[vplane].storage_surface_state.state = iview->planes[vplane].storage.state =
alloc_bindless_surface_state(device); maybe_alloc_surface_state(device);
anv_image_fill_surface_state(device, image, 1ULL << iaspect_bit, anv_image_fill_surface_state(device, image, 1ULL << iaspect_bit,
&storage_view, &storage_view,
ISL_SURF_USAGE_STORAGE_BIT, ISL_SURF_USAGE_STORAGE_BIT,
general_aux_usage, NULL, general_aux_usage, NULL,
0, 0,
&iview->planes[vplane].storage_surface_state); &iview->planes[vplane].storage);
} }
} }
@@ -2838,25 +2864,53 @@ anv_DestroyImageView(VkDevice _device, VkImageView _iview,
return; return;
for (uint32_t plane = 0; plane < iview->n_planes; plane++) { for (uint32_t plane = 0; plane < iview->n_planes; plane++) {
if (iview->planes[plane].optimal_sampler_surface_state.state.alloc_size) { if (iview->planes[plane].optimal_sampler.state.alloc_size) {
anv_state_pool_free(&device->bindless_surface_state_pool, anv_state_pool_free(&device->bindless_surface_state_pool,
iview->planes[plane].optimal_sampler_surface_state.state); iview->planes[plane].optimal_sampler.state);
} }
if (iview->planes[plane].general_sampler_surface_state.state.alloc_size) { if (iview->planes[plane].general_sampler.state.alloc_size) {
anv_state_pool_free(&device->bindless_surface_state_pool, anv_state_pool_free(&device->bindless_surface_state_pool,
iview->planes[plane].general_sampler_surface_state.state); iview->planes[plane].general_sampler.state);
} }
if (iview->planes[plane].storage_surface_state.state.alloc_size) { if (iview->planes[plane].storage.state.alloc_size) {
anv_state_pool_free(&device->bindless_surface_state_pool, anv_state_pool_free(&device->bindless_surface_state_pool,
iview->planes[plane].storage_surface_state.state); iview->planes[plane].storage.state);
} }
} }
vk_image_view_destroy(&device->vk, pAllocator, &iview->vk); vk_image_view_destroy(&device->vk, pAllocator, &iview->vk);
} }
static void *
anv_buffer_state_get_ptr(struct anv_device *device,
struct anv_buffer_state *state)
{
if (device->physical->indirect_descriptors)
return state->state.map;
else
return state->state_data.data;
}
static void
anv_fill_buffer_view_surface_state(struct anv_device *device,
struct anv_buffer_state *state,
enum isl_format format,
struct isl_swizzle swizzle,
isl_surf_usage_flags_t usage,
struct anv_address address,
uint32_t range, uint32_t stride)
{
anv_fill_buffer_surface_state(device,
state->state_data.data,
format, swizzle,
ISL_SURF_USAGE_TEXTURE_BIT,
address, range, stride);
if (state->state.map)
memcpy(state->state.map, state->state_data.data, ANV_SURFACE_STATE_SIZE);
}
VkResult VkResult
anv_CreateBufferView(VkDevice _device, anv_CreateBufferView(VkDevice _device,
@@ -2885,25 +2939,28 @@ anv_CreateBufferView(VkDevice _device,
view->address = anv_address_add(buffer->address, pCreateInfo->offset); view->address = anv_address_add(buffer->address, pCreateInfo->offset);
if (buffer->vk.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) { if (buffer->vk.usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) {
view->surface_state = alloc_bindless_surface_state(device); view->general.state = maybe_alloc_surface_state(device);
anv_fill_buffer_surface_state(device, view->surface_state, anv_fill_buffer_view_surface_state(device,
format.isl_format, format.swizzle, &view->general,
ISL_SURF_USAGE_TEXTURE_BIT, format.isl_format,
view->address, view->range, format_bs); format.swizzle,
ISL_SURF_USAGE_TEXTURE_BIT,
view->address, view->range, format_bs);
} else { } else {
view->surface_state = (struct anv_state){ 0 }; view->general.state = ANV_STATE_NULL;
} }
if (buffer->vk.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) { if (buffer->vk.usage & VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT) {
view->storage_surface_state = alloc_bindless_surface_state(device); view->storage.state = maybe_alloc_surface_state(device);
anv_fill_buffer_surface_state(device, view->storage_surface_state, anv_fill_buffer_view_surface_state(device,
format.isl_format, format.swizzle, &view->storage,
ISL_SURF_USAGE_STORAGE_BIT, format.isl_format, format.swizzle,
view->address, view->range, format_bs); ISL_SURF_USAGE_STORAGE_BIT,
view->address, view->range, format_bs);
} else { } else {
view->storage_surface_state = (struct anv_state){ 0 }; view->storage.state = ANV_STATE_NULL;
} }
*pView = anv_buffer_view_to_handle(view); *pView = anv_buffer_view_to_handle(view);
@@ -2921,13 +2978,15 @@ anv_DestroyBufferView(VkDevice _device, VkBufferView bufferView,
if (!view) if (!view)
return; return;
if (view->surface_state.alloc_size > 0) if (view->general.state.alloc_size > 0) {
anv_state_pool_free(&device->bindless_surface_state_pool, anv_state_pool_free(&device->bindless_surface_state_pool,
view->surface_state); view->general.state);
}
if (view->storage_surface_state.alloc_size > 0) if (view->storage.state.alloc_size > 0) {
anv_state_pool_free(&device->bindless_surface_state_pool, anv_state_pool_free(&device->bindless_surface_state_pool,
view->storage_surface_state); view->storage.state);
}
vk_object_free(&device->vk, pAllocator, view); vk_object_free(&device->vk, pAllocator, view);
} }

View File

@@ -1605,10 +1605,6 @@ struct anv_vue_header {
float PointWidth; float PointWidth;
}; };
struct anv_surface_state_data {
uint8_t data[ANV_SURFACE_STATE_SIZE];
};
/** Struct representing a sampled image descriptor /** Struct representing a sampled image descriptor
* *
* This descriptor layout is used for sampled images, bare sampler, and * This descriptor layout is used for sampled images, bare sampler, and
@@ -1851,6 +1847,24 @@ anv_descriptor_set_is_push(struct anv_descriptor_set *set)
return set->pool == NULL; return set->pool == NULL;
} }
struct anv_surface_state_data {
uint8_t data[ANV_SURFACE_STATE_SIZE];
};
struct anv_buffer_state {
/** Surface state allocated from the bindless heap
*
* Only valid if anv_physical_device::indirect_descriptors is true
*/
struct anv_state state;
/** Surface state after genxml packing
*
* Only valid if anv_physical_device::indirect_descriptors is false
*/
struct anv_surface_state_data state_data;
};
struct anv_buffer_view { struct anv_buffer_view {
struct vk_object_base base; struct vk_object_base base;
@@ -1858,8 +1872,8 @@ struct anv_buffer_view {
struct anv_address address; struct anv_address address;
struct anv_state surface_state; struct anv_buffer_state general;
struct anv_state storage_surface_state; struct anv_buffer_state storage;
}; };
struct anv_push_descriptor_set { struct anv_push_descriptor_set {
@@ -2493,7 +2507,18 @@ struct anv_push_constants {
}; };
struct anv_surface_state { struct anv_surface_state {
/** Surface state allocated from the bindless heap
*
* Can be NULL if unused.
*/
struct anv_state state; struct anv_state state;
/** Surface state after genxml packing
*
* Same data as in state.
*/
struct anv_surface_state_data state_data;
/** Address of the surface referred to by this state /** Address of the surface referred to by this state
* *
* This address is relative to the start of the BO. * This address is relative to the start of the BO.
@@ -4211,18 +4236,18 @@ struct anv_image_view {
* image layout of SHADER_READ_ONLY_OPTIMAL or * image layout of SHADER_READ_ONLY_OPTIMAL or
* DEPTH_STENCIL_READ_ONLY_OPTIMAL. * DEPTH_STENCIL_READ_ONLY_OPTIMAL.
*/ */
struct anv_surface_state optimal_sampler_surface_state; struct anv_surface_state optimal_sampler;
/** /**
* RENDER_SURFACE_STATE when using image as a sampler surface with an * RENDER_SURFACE_STATE when using image as a sampler surface with an
* image layout of GENERAL. * image layout of GENERAL.
*/ */
struct anv_surface_state general_sampler_surface_state; struct anv_surface_state general_sampler;
/** /**
* RENDER_SURFACE_STATE when using image as a storage image. * RENDER_SURFACE_STATE when using image as a storage image.
*/ */
struct anv_surface_state storage_surface_state; struct anv_surface_state storage;
} planes[3]; } planes[3];
}; };
@@ -4334,7 +4359,7 @@ anv_get_image_format_features2(const struct intel_device_info *devinfo,
const struct isl_drm_modifier_info *isl_mod_info); const struct isl_drm_modifier_info *isl_mod_info);
void anv_fill_buffer_surface_state(struct anv_device *device, void anv_fill_buffer_surface_state(struct anv_device *device,
struct anv_state state, void *surface_state_ptr,
enum isl_format format, enum isl_format format,
struct isl_swizzle swizzle, struct isl_swizzle swizzle,
isl_surf_usage_flags_t usage, isl_surf_usage_flags_t usage,

View File

@@ -1965,7 +1965,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
const enum isl_format format = const enum isl_format format =
anv_isl_format_for_descriptor_type(cmd_buffer->device, anv_isl_format_for_descriptor_type(cmd_buffer->device,
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER); VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
anv_fill_buffer_surface_state(cmd_buffer->device, surface_state, anv_fill_buffer_surface_state(cmd_buffer->device, surface_state.map,
format, ISL_SWIZZLE_IDENTITY, format, ISL_SWIZZLE_IDENTITY,
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT, ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
cmd_buffer->state.compute.num_workgroups, cmd_buffer->state.compute.num_workgroups,
@@ -2055,8 +2055,8 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
if (desc->image_view) { if (desc->image_view) {
struct anv_surface_state sstate = struct anv_surface_state sstate =
(desc->layout == VK_IMAGE_LAYOUT_GENERAL) ? (desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
desc->image_view->planes[binding->plane].general_sampler_surface_state : desc->image_view->planes[binding->plane].general_sampler :
desc->image_view->planes[binding->plane].optimal_sampler_surface_state; desc->image_view->planes[binding->plane].optimal_sampler;
surface_state = surface_state =
anv_bindless_state_for_binding_table(cmd_buffer->device, sstate.state); anv_bindless_state_for_binding_table(cmd_buffer->device, sstate.state);
assert(surface_state.alloc_size); assert(surface_state.alloc_size);
@@ -2072,7 +2072,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: { case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
if (desc->image_view) { if (desc->image_view) {
struct anv_surface_state sstate = struct anv_surface_state sstate =
desc->image_view->planes[binding->plane].storage_surface_state; desc->image_view->planes[binding->plane].storage;
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
cmd_buffer->device, sstate.state); cmd_buffer->device, sstate.state);
assert(surface_state.alloc_size); assert(surface_state.alloc_size);
@@ -2087,7 +2087,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER: case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
if (desc->set_buffer_view) { if (desc->set_buffer_view) {
surface_state = desc->set_buffer_view->surface_state; surface_state = desc->set_buffer_view->general.state;
assert(surface_state.alloc_size); assert(surface_state.alloc_size);
} else { } else {
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
@@ -2100,7 +2100,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
if (desc->buffer_view) { if (desc->buffer_view) {
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
cmd_buffer->device, cmd_buffer->device,
desc->buffer_view->surface_state); desc->buffer_view->general.state);
assert(surface_state.alloc_size); assert(surface_state.alloc_size);
} else { } else {
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
@@ -2139,7 +2139,8 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT : ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
ISL_SURF_USAGE_STORAGE_BIT; ISL_SURF_USAGE_STORAGE_BIT;
anv_fill_buffer_surface_state(cmd_buffer->device, surface_state, anv_fill_buffer_surface_state(cmd_buffer->device,
surface_state.map,
format, ISL_SWIZZLE_IDENTITY, format, ISL_SWIZZLE_IDENTITY,
usage, address, range, 1); usage, address, range, 1);
} else { } else {
@@ -2155,7 +2156,7 @@ emit_binding_table(struct anv_cmd_buffer *cmd_buffer,
if (desc->buffer_view) { if (desc->buffer_view) {
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
cmd_buffer->device, cmd_buffer->device,
desc->buffer_view->storage_surface_state); desc->buffer_view->storage.state);
assert(surface_state.alloc_size); assert(surface_state.alloc_size);
} else { } else {
surface_state = anv_bindless_state_for_binding_table( surface_state = anv_bindless_state_for_binding_table(
@@ -2316,9 +2317,10 @@ flush_push_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
struct anv_buffer_view *bview = desc->set_buffer_view; struct anv_buffer_view *bview = desc->set_buffer_view;
if (bview != NULL) { if (bview != NULL) {
bview->surface_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer); bview->general.state =
anv_cmd_buffer_alloc_surface_state(cmd_buffer);
anv_descriptor_write_surface_state(cmd_buffer->device, desc, anv_descriptor_write_surface_state(cmd_buffer->device, desc,
bview->surface_state); bview->general.state);
} }
} }
} }
@@ -2330,7 +2332,7 @@ flush_push_descriptor_set(struct anv_cmd_buffer *cmd_buffer,
set->desc_surface_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer); set->desc_surface_state = anv_cmd_buffer_alloc_surface_state(cmd_buffer);
anv_fill_buffer_surface_state(cmd_buffer->device, anv_fill_buffer_surface_state(cmd_buffer->device,
set->desc_surface_state, set->desc_surface_state.map,
format, ISL_SWIZZLE_IDENTITY, format, ISL_SWIZZLE_IDENTITY,
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT, ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
set->desc_addr, set->desc_addr,