anv: Implement descriptor pools

Descriptor pools are an optimization that lets applications allocate
descriptor sets through an externally synchronized object (that is,
unlocked).  In our case it's also plugging a memory leak, since we
didn't track all allocated sets and failed to free them in
vkResetDescriptorPool() and vkDestroyDescriptorPool().
This commit is contained in:
Kristian Høgsberg Kristensen
2016-02-11 22:46:28 -08:00
parent 353d5bf286
commit 2570a58bcd
5 changed files with 200 additions and 45 deletions

View File

@@ -244,17 +244,67 @@ void anv_DestroyPipelineLayout(
} }
/* /*
* Descriptor pools. These are a no-op for now. * Descriptor pools.
*
* These are implemented using a big pool of memory and a free-list for the
* host memory allocations and a state_stream and a free list for the buffer
* view surface state. The spec allows us to fail to allocate due to
* fragmentation in all cases but two: 1) after pool reset, allocating up
* until the pool size with no freeing must succeed and 2) allocating and
* freeing only descriptor sets with the same layout. Case 1) is easy enogh,
* and the free lists lets us recycle blocks for case 2).
*/ */
#define EMPTY 1
VkResult anv_CreateDescriptorPool( VkResult anv_CreateDescriptorPool(
VkDevice device, VkDevice _device,
const VkDescriptorPoolCreateInfo* pCreateInfo, const VkDescriptorPoolCreateInfo* pCreateInfo,
const VkAllocationCallbacks* pAllocator, const VkAllocationCallbacks* pAllocator,
VkDescriptorPool* pDescriptorPool) VkDescriptorPool* pDescriptorPool)
{ {
anv_finishme("VkDescriptorPool is a stub"); ANV_FROM_HANDLE(anv_device, device, _device);
*pDescriptorPool = (VkDescriptorPool)1; struct anv_descriptor_pool *pool;
uint32_t descriptor_count = 0;
uint32_t buffer_count = 0;
for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
switch (pCreateInfo->pPoolSizes[i].type) {
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
default:
descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
break;
}
}
const size_t set_size =
sizeof(struct anv_descriptor_set) +
descriptor_count * sizeof(struct anv_descriptor) +
buffer_count * sizeof(struct anv_buffer_view);
const size_t size =
sizeof(*pool) +
pCreateInfo->maxSets * set_size;
pool = anv_alloc2(&device->alloc, pAllocator, size, 8,
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
if (!pool)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
pool->size = size;
pool->next = 0;
pool->free_list = EMPTY;
anv_state_stream_init(&pool->surface_state_stream,
&device->surface_state_block_pool);
pool->surface_state_free_list = NULL;
*pDescriptorPool = anv_descriptor_pool_to_handle(pool);
return VK_SUCCESS; return VK_SUCCESS;
} }
@@ -263,37 +313,85 @@ void anv_DestroyDescriptorPool(
VkDescriptorPool _pool, VkDescriptorPool _pool,
const VkAllocationCallbacks* pAllocator) const VkAllocationCallbacks* pAllocator)
{ {
anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets"); ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
anv_state_stream_finish(&pool->surface_state_stream);
anv_free2(&device->alloc, pAllocator, pool);
} }
VkResult anv_ResetDescriptorPool( VkResult anv_ResetDescriptorPool(
VkDevice device, VkDevice _device,
VkDescriptorPool descriptorPool, VkDescriptorPool descriptorPool,
VkDescriptorPoolResetFlags flags) VkDescriptorPoolResetFlags flags)
{ {
anv_finishme("VkDescriptorPool is a stub: free the pool's descriptor sets"); ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
pool->next = 0;
pool->free_list = EMPTY;
anv_state_stream_finish(&pool->surface_state_stream);
anv_state_stream_init(&pool->surface_state_stream,
&device->surface_state_block_pool);
pool->surface_state_free_list = NULL;
return VK_SUCCESS; return VK_SUCCESS;
} }
struct pool_free_list_entry {
uint32_t next;
uint32_t size;
};
static size_t
layout_size(const struct anv_descriptor_set_layout *layout)
{
return
sizeof(struct anv_descriptor_set) +
layout->size * sizeof(struct anv_descriptor) +
layout->buffer_count * sizeof(struct anv_buffer_view);
}
struct surface_state_free_list_entry {
void *next;
uint32_t offset;
};
VkResult VkResult
anv_descriptor_set_create(struct anv_device *device, anv_descriptor_set_create(struct anv_device *device,
struct anv_descriptor_pool *pool,
const struct anv_descriptor_set_layout *layout, const struct anv_descriptor_set_layout *layout,
struct anv_descriptor_set **out_set) struct anv_descriptor_set **out_set)
{ {
struct anv_descriptor_set *set; struct anv_descriptor_set *set;
size_t size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]); const size_t size = layout_size(layout);
set = anv_alloc(&device->alloc /* XXX: Use the pool */, size, 8, set = NULL;
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); if (size <= pool->size - pool->next) {
if (!set) set = (struct anv_descriptor_set *) (pool->data + pool->next);
pool->next += size;
} else {
struct pool_free_list_entry *entry;
uint32_t *link = &pool->free_list;
for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
entry = (struct pool_free_list_entry *) (pool->data + f);
if (size <= entry->size) {
*link = entry->next;
set = (struct anv_descriptor_set *) entry;
break;
}
link = &entry->next;
}
}
if (set == NULL)
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
/* A descriptor set may not be 100% filled. Clear the set so we can can set->size = size;
* later detect holes in it.
*/
memset(set, 0, size);
set->layout = layout; set->layout = layout;
set->buffer_views =
(struct anv_buffer_view *) &set->descriptors[layout->size];
set->buffer_count = layout->buffer_count;
/* Go through and fill out immutable samplers if we have any */ /* Go through and fill out immutable samplers if we have any */
struct anv_descriptor *desc = set->descriptors; struct anv_descriptor *desc = set->descriptors;
@@ -305,21 +403,24 @@ anv_descriptor_set_create(struct anv_device *device,
desc += layout->binding[b].array_size; desc += layout->binding[b].array_size;
} }
/* XXX: Use the pool */ /* Allocate surface state for the buffer views. */
set->buffer_views = for (uint32_t b = 0; b < layout->buffer_count; b++) {
anv_alloc(&device->alloc, struct surface_state_free_list_entry *entry =
sizeof(set->buffer_views[0]) * layout->buffer_count, 8, pool->surface_state_free_list;
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); struct anv_state state;
if (!set->buffer_views) {
anv_free(&device->alloc, set); if (entry) {
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY); state.map = entry;
state.offset = entry->offset;
state.alloc_size = 64;
pool->surface_state_free_list = entry->next;
} else {
state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
} }
for (uint32_t b = 0; b < layout->buffer_count; b++) { set->buffer_views[b].surface_state = state;
set->buffer_views[b].surface_state =
anv_state_pool_alloc(&device->surface_state_pool, 64, 64);
} }
set->buffer_count = layout->buffer_count;
*out_set = set; *out_set = set;
return VK_SUCCESS; return VK_SUCCESS;
@@ -327,15 +428,27 @@ anv_descriptor_set_create(struct anv_device *device,
void void
anv_descriptor_set_destroy(struct anv_device *device, anv_descriptor_set_destroy(struct anv_device *device,
struct anv_descriptor_pool *pool,
struct anv_descriptor_set *set) struct anv_descriptor_set *set)
{ {
/* XXX: Use the pool */ /* Put the buffer view surface state back on the free list. */
for (uint32_t b = 0; b < set->buffer_count; b++) for (uint32_t b = 0; b < set->buffer_count; b++) {
anv_state_pool_free(&device->surface_state_pool, struct surface_state_free_list_entry *entry =
set->buffer_views[b].surface_state); set->buffer_views[b].surface_state.map;
entry->next = pool->surface_state_free_list;
pool->surface_state_free_list = entry;
}
anv_free(&device->alloc, set->buffer_views); /* Put the descriptor set allocation back on the free list. */
anv_free(&device->alloc, set); const uint32_t index = (char *) set - pool->data;
if (index + set->size == pool->next) {
pool->next = index;
} else {
struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
entry->next = pool->free_list;
entry->size = set->size;
pool->free_list = (char *) entry - pool->data;
}
} }
VkResult anv_AllocateDescriptorSets( VkResult anv_AllocateDescriptorSets(
@@ -344,6 +457,7 @@ VkResult anv_AllocateDescriptorSets(
VkDescriptorSet* pDescriptorSets) VkDescriptorSet* pDescriptorSets)
{ {
ANV_FROM_HANDLE(anv_device, device, _device); ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
VkResult result = VK_SUCCESS; VkResult result = VK_SUCCESS;
struct anv_descriptor_set *set; struct anv_descriptor_set *set;
@@ -353,7 +467,7 @@ VkResult anv_AllocateDescriptorSets(
ANV_FROM_HANDLE(anv_descriptor_set_layout, layout, ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
pAllocateInfo->pSetLayouts[i]); pAllocateInfo->pSetLayouts[i]);
result = anv_descriptor_set_create(device, layout, &set); result = anv_descriptor_set_create(device, pool, layout, &set);
if (result != VK_SUCCESS) if (result != VK_SUCCESS)
break; break;
@@ -374,11 +488,12 @@ VkResult anv_FreeDescriptorSets(
const VkDescriptorSet* pDescriptorSets) const VkDescriptorSet* pDescriptorSets)
{ {
ANV_FROM_HANDLE(anv_device, device, _device); ANV_FROM_HANDLE(anv_device, device, _device);
ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
for (uint32_t i = 0; i < count; i++) { for (uint32_t i = 0; i < count; i++) {
ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]); ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
anv_descriptor_set_destroy(device, set); anv_descriptor_set_destroy(device, pool, set);
} }
return VK_SUCCESS; return VK_SUCCESS;

View File

@@ -138,6 +138,27 @@ anv_device_init_meta(struct anv_device *device)
.pfnFree = meta_free, .pfnFree = meta_free,
}; };
const VkDescriptorPoolCreateInfo create_info = {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
.pNext = NULL,
.flags = 0,
.maxSets = 1,
.poolSizeCount = 1,
.pPoolSizes = (VkDescriptorPoolSize[]) {
{
.type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
.descriptorCount = 1
},
}
};
result = anv_CreateDescriptorPool(anv_device_to_handle(device),
&create_info,
&device->meta_state.alloc,
&device->meta_state.desc_pool);
if (result != VK_SUCCESS)
goto fail_desc_pool;
result = anv_device_init_meta_clear_state(device); result = anv_device_init_meta_clear_state(device);
if (result != VK_SUCCESS) if (result != VK_SUCCESS)
goto fail_clear; goto fail_clear;
@@ -157,6 +178,10 @@ fail_blit:
fail_resolve: fail_resolve:
anv_device_finish_meta_clear_state(device); anv_device_finish_meta_clear_state(device);
fail_clear: fail_clear:
anv_DestroyDescriptorPool(anv_device_to_handle(device),
device->meta_state.desc_pool,
&device->meta_state.alloc);
fail_desc_pool:
return result; return result;
} }

View File

@@ -165,7 +165,6 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
VkFilter blit_filter) VkFilter blit_filter)
{ {
struct anv_device *device = cmd_buffer->device; struct anv_device *device = cmd_buffer->device;
VkDescriptorPool dummy_desc_pool = (VkDescriptorPool)1;
struct blit_vb_data { struct blit_vb_data {
float pos[2]; float pos[2];
@@ -248,7 +247,7 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
anv_AllocateDescriptorSets(anv_device_to_handle(device), anv_AllocateDescriptorSets(anv_device_to_handle(device),
&(VkDescriptorSetAllocateInfo) { &(VkDescriptorSetAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
.descriptorPool = dummy_desc_pool, .descriptorPool = device->meta_state.desc_pool,
.descriptorSetCount = 1, .descriptorSetCount = 1,
.pSetLayouts = &device->meta_state.blit.ds_layout .pSetLayouts = &device->meta_state.blit.ds_layout
}, &set); }, &set);
@@ -341,7 +340,8 @@ meta_emit_blit(struct anv_cmd_buffer *cmd_buffer,
/* At the point where we emit the draw call, all data from the /* At the point where we emit the draw call, all data from the
* descriptor sets, etc. has been used. We are free to delete it. * descriptor sets, etc. has been used. We are free to delete it.
*/ */
anv_descriptor_set_destroy(device, anv_descriptor_set_from_handle(set)); anv_ResetDescriptorPool(anv_device_to_handle(device),
device->meta_state.desc_pool, 0);
anv_DestroySampler(anv_device_to_handle(device), sampler, anv_DestroySampler(anv_device_to_handle(device), sampler,
&cmd_buffer->pool->alloc); &cmd_buffer->pool->alloc);
anv_DestroyFramebuffer(anv_device_to_handle(device), fb, anv_DestroyFramebuffer(anv_device_to_handle(device), fb,

View File

@@ -483,7 +483,6 @@ emit_resolve(struct anv_cmd_buffer *cmd_buffer,
VkCommandBuffer cmd_buffer_h = anv_cmd_buffer_to_handle(cmd_buffer); VkCommandBuffer cmd_buffer_h = anv_cmd_buffer_to_handle(cmd_buffer);
const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer; const struct anv_framebuffer *fb = cmd_buffer->state.framebuffer;
const struct anv_image *src_image = src_iview->image; const struct anv_image *src_image = src_iview->image;
VkDescriptorPool dummy_desc_pool_h = (VkDescriptorPool) 1;
const struct vertex_attrs vertex_data[3] = { const struct vertex_attrs vertex_data[3] = {
{ {
@@ -564,7 +563,7 @@ emit_resolve(struct anv_cmd_buffer *cmd_buffer,
anv_AllocateDescriptorSets(device_h, anv_AllocateDescriptorSets(device_h,
&(VkDescriptorSetAllocateInfo) { &(VkDescriptorSetAllocateInfo) {
.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
.descriptorPool = dummy_desc_pool_h, .descriptorPool = device->meta_state.desc_pool,
.descriptorSetCount = 1, .descriptorSetCount = 1,
.pSetLayouts = (VkDescriptorSetLayout[]) { .pSetLayouts = (VkDescriptorSetLayout[]) {
device->meta_state.resolve.ds_layout, device->meta_state.resolve.ds_layout,
@@ -572,8 +571,6 @@ emit_resolve(struct anv_cmd_buffer *cmd_buffer,
}, },
&desc_set_h); &desc_set_h);
ANV_FROM_HANDLE(anv_descriptor_set, desc_set, desc_set_h);
anv_UpdateDescriptorSets(device_h, anv_UpdateDescriptorSets(device_h,
/*writeCount*/ 1, /*writeCount*/ 1,
(VkWriteDescriptorSet[]) { (VkWriteDescriptorSet[]) {
@@ -644,7 +641,8 @@ emit_resolve(struct anv_cmd_buffer *cmd_buffer,
/* All objects below are consumed by the draw call. We may safely destroy /* All objects below are consumed by the draw call. We may safely destroy
* them. * them.
*/ */
anv_descriptor_set_destroy(device, desc_set); anv_ResetDescriptorPool(anv_device_to_handle(device),
device->meta_state.desc_pool, 0);
anv_DestroySampler(device_h, sampler_h, anv_DestroySampler(device_h, sampler_h,
&cmd_buffer->pool->alloc); &cmd_buffer->pool->alloc);
} }

View File

@@ -571,6 +571,8 @@ void anv_finish_wsi(struct anv_instance *instance);
struct anv_meta_state { struct anv_meta_state {
VkAllocationCallbacks alloc; VkAllocationCallbacks alloc;
VkDescriptorPool desc_pool;
/** /**
* Use array element `i` for images with `2^i` samples. * Use array element `i` for images with `2^i` samples.
*/ */
@@ -959,18 +961,32 @@ struct anv_descriptor {
struct anv_descriptor_set { struct anv_descriptor_set {
const struct anv_descriptor_set_layout *layout; const struct anv_descriptor_set_layout *layout;
uint32_t size;
uint32_t buffer_count; uint32_t buffer_count;
struct anv_buffer_view *buffer_views; struct anv_buffer_view *buffer_views;
struct anv_descriptor descriptors[0]; struct anv_descriptor descriptors[0];
}; };
struct anv_descriptor_pool {
uint32_t size;
uint32_t next;
uint32_t free_list;
struct anv_state_stream surface_state_stream;
void *surface_state_free_list;
char data[0];
};
VkResult VkResult
anv_descriptor_set_create(struct anv_device *device, anv_descriptor_set_create(struct anv_device *device,
struct anv_descriptor_pool *pool,
const struct anv_descriptor_set_layout *layout, const struct anv_descriptor_set_layout *layout,
struct anv_descriptor_set **out_set); struct anv_descriptor_set **out_set);
void void
anv_descriptor_set_destroy(struct anv_device *device, anv_descriptor_set_destroy(struct anv_device *device,
struct anv_descriptor_pool *pool,
struct anv_descriptor_set *set); struct anv_descriptor_set *set);
struct anv_pipeline_binding { struct anv_pipeline_binding {
@@ -1839,6 +1855,7 @@ ANV_DEFINE_HANDLE_CASTS(anv_queue, VkQueue)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_cmd_pool, VkCommandPool) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_cmd_pool, VkCommandPool)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer, VkBuffer) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer, VkBuffer)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer_view, VkBufferView) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_buffer_view, VkBufferView)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_pool, VkDescriptorPool)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set, VkDescriptorSet) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set, VkDescriptorSet)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set_layout, VkDescriptorSetLayout) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_descriptor_set_layout, VkDescriptorSetLayout)
ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_device_memory, VkDeviceMemory) ANV_DEFINE_NONDISP_HANDLE_CASTS(anv_device_memory, VkDeviceMemory)