vulkan/pipeline_cache: remove vk_device from vk_pipeline_cache_object

It is not necessary to store the extra pointer.

Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/21967>
This commit is contained in:
Daniel Schürmann
2023-03-16 15:35:48 +01:00
committed by Marge Bot
parent 4ac56e3e5a
commit 5daff41e27
8 changed files with 67 additions and 54 deletions

View File

@@ -2874,7 +2874,8 @@ tu_shaders_deserialize(struct vk_device *device,
struct blob_reader *blob); struct blob_reader *blob);
static void static void
tu_shaders_destroy(struct vk_pipeline_cache_object *object) tu_shaders_destroy(struct vk_device *device,
struct vk_pipeline_cache_object *object)
{ {
struct tu_compiled_shaders *shaders = struct tu_compiled_shaders *shaders =
container_of(object, struct tu_compiled_shaders, base); container_of(object, struct tu_compiled_shaders, base);
@@ -2886,7 +2887,7 @@ tu_shaders_destroy(struct vk_pipeline_cache_object *object)
ralloc_free(shaders->safe_const_variants[i]); ralloc_free(shaders->safe_const_variants[i]);
vk_pipeline_cache_object_finish(&shaders->base); vk_pipeline_cache_object_finish(&shaders->base);
vk_free(&object->device->alloc, shaders); vk_free(&device->alloc, shaders);
} }
const struct vk_pipeline_cache_object_ops tu_shaders_ops = { const struct vk_pipeline_cache_object_ops tu_shaders_ops = {
@@ -3003,7 +3004,8 @@ tu_nir_shaders_deserialize(struct vk_device *device,
struct blob_reader *blob); struct blob_reader *blob);
static void static void
tu_nir_shaders_destroy(struct vk_pipeline_cache_object *object) tu_nir_shaders_destroy(struct vk_device *device,
struct vk_pipeline_cache_object *object)
{ {
struct tu_nir_shaders *shaders = struct tu_nir_shaders *shaders =
container_of(object, struct tu_nir_shaders, base); container_of(object, struct tu_nir_shaders, base);
@@ -3012,7 +3014,7 @@ tu_nir_shaders_destroy(struct vk_pipeline_cache_object *object)
ralloc_free(shaders->nir[i]); ralloc_free(shaders->nir[i]);
vk_pipeline_cache_object_finish(&shaders->base); vk_pipeline_cache_object_finish(&shaders->base);
vk_free(&object->device->alloc, shaders); vk_free(&device->alloc, shaders);
} }
const struct vk_pipeline_cache_object_ops tu_nir_shaders_ops = { const struct vk_pipeline_cache_object_ops tu_nir_shaders_ops = {
@@ -3540,7 +3542,8 @@ done:;
* when compiling all stages, but make sure we don't leak. * when compiling all stages, but make sure we don't leak.
*/ */
if (nir_shaders) if (nir_shaders)
vk_pipeline_cache_object_unref(&nir_shaders->base); vk_pipeline_cache_object_unref(&builder->device->vk,
&nir_shaders->base);
} else { } else {
pipeline->compiled_shaders = compiled_shaders; pipeline->compiled_shaders = compiled_shaders;
pipeline->nir_shaders = nir_shaders; pipeline->nir_shaders = nir_shaders;
@@ -3579,10 +3582,12 @@ fail:
} }
if (compiled_shaders) if (compiled_shaders)
vk_pipeline_cache_object_unref(&compiled_shaders->base); vk_pipeline_cache_object_unref(&builder->device->vk,
&compiled_shaders->base);
if (nir_shaders) if (nir_shaders)
vk_pipeline_cache_object_unref(&nir_shaders->base); vk_pipeline_cache_object_unref(&builder->device->vk,
&nir_shaders->base);
return result; return result;
} }
@@ -4738,10 +4743,11 @@ tu_pipeline_finish(struct tu_pipeline *pipeline,
tu_bo_finish(dev, pipeline->pvtmem_bo); tu_bo_finish(dev, pipeline->pvtmem_bo);
if (pipeline->compiled_shaders) if (pipeline->compiled_shaders)
vk_pipeline_cache_object_unref(&pipeline->compiled_shaders->base); vk_pipeline_cache_object_unref(&dev->vk,
&pipeline->compiled_shaders->base);
if (pipeline->nir_shaders) if (pipeline->nir_shaders)
vk_pipeline_cache_object_unref(&pipeline->nir_shaders->base); vk_pipeline_cache_object_unref(&dev->vk, &pipeline->nir_shaders->base);
for (unsigned i = 0; i < pipeline->num_sets; i++) { for (unsigned i = 0; i < pipeline->num_sets; i++) {
if (pipeline->layouts[i]) if (pipeline->layouts[i])
@@ -4893,7 +4899,8 @@ static void
tu_pipeline_builder_finish(struct tu_pipeline_builder *builder) tu_pipeline_builder_finish(struct tu_pipeline_builder *builder)
{ {
if (builder->compiled_shaders) if (builder->compiled_shaders)
vk_pipeline_cache_object_unref(&builder->compiled_shaders->base); vk_pipeline_cache_object_unref(&builder->device->vk,
&builder->compiled_shaders->base);
ralloc_free(builder->mem_ctx); ralloc_free(builder->mem_ctx);
} }
@@ -5305,7 +5312,7 @@ tu_compute_pipeline_create(VkDevice device,
pipeline->program.cs_instrlen = v->instrlen; pipeline->program.cs_instrlen = v->instrlen;
vk_pipeline_cache_object_unref(&compiled->base); vk_pipeline_cache_object_unref(&dev->vk, &compiled->base);
ralloc_free(pipeline_mem_ctx); ralloc_free(pipeline_mem_ctx);
*pPipeline = tu_pipeline_to_handle(pipeline); *pPipeline = tu_pipeline_to_handle(pipeline);
@@ -5314,7 +5321,7 @@ tu_compute_pipeline_create(VkDevice device,
fail: fail:
if (compiled) if (compiled)
vk_pipeline_cache_object_unref(&compiled->base); vk_pipeline_cache_object_unref(&dev->vk, &compiled->base);
ralloc_free(pipeline_mem_ctx); ralloc_free(pipeline_mem_ctx);

View File

@@ -43,10 +43,12 @@ anv_shader_bin_deserialize(struct vk_device *device,
struct blob_reader *blob); struct blob_reader *blob);
static void static void
anv_shader_bin_destroy(struct vk_pipeline_cache_object *object) anv_shader_bin_destroy(struct vk_device *_device,
struct vk_pipeline_cache_object *object)
{ {
struct anv_device *device = struct anv_device *device =
container_of(object->device, struct anv_device, vk); container_of(_device, struct anv_device, vk);
struct anv_shader_bin *shader = struct anv_shader_bin *shader =
container_of(object, struct anv_shader_bin, base); container_of(object, struct anv_shader_bin, base);

View File

@@ -3063,7 +3063,7 @@ anv_shader_bin_ref(struct anv_shader_bin *shader)
static inline void static inline void
anv_shader_bin_unref(struct anv_device *device, struct anv_shader_bin *shader) anv_shader_bin_unref(struct anv_device *device, struct anv_shader_bin *shader)
{ {
vk_pipeline_cache_object_unref(&shader->base); vk_pipeline_cache_object_unref(&device->vk, &shader->base);
} }
struct anv_pipeline_executable { struct anv_pipeline_executable {

View File

@@ -41,10 +41,11 @@ anv_shader_bin_deserialize(struct vk_device *device,
struct blob_reader *blob); struct blob_reader *blob);
static void static void
anv_shader_bin_destroy(struct vk_pipeline_cache_object *object) anv_shader_bin_destroy(struct vk_device *_device,
struct vk_pipeline_cache_object *object)
{ {
struct anv_device *device = struct anv_device *device =
container_of(object->device, struct anv_device, vk); container_of(_device, struct anv_device, vk);
struct anv_shader_bin *shader = struct anv_shader_bin *shader =
container_of(object, struct anv_shader_bin, base); container_of(object, struct anv_shader_bin, base);

View File

@@ -2817,7 +2817,7 @@ anv_shader_bin_ref(struct anv_shader_bin *shader)
static inline void static inline void
anv_shader_bin_unref(struct anv_device *device, struct anv_shader_bin *shader) anv_shader_bin_unref(struct anv_device *device, struct anv_shader_bin *shader)
{ {
vk_pipeline_cache_object_unref(&shader->base); vk_pipeline_cache_object_unref(&device->vk, &shader->base);
} }
struct anv_pipeline_executable { struct anv_pipeline_executable {

View File

@@ -93,12 +93,13 @@ dzn_cached_blob_serialize(struct vk_pipeline_cache_object *object,
} }
static void static void
dzn_cached_blob_destroy(struct vk_pipeline_cache_object *object) dzn_cached_blob_destroy(struct vk_device *device,
struct vk_pipeline_cache_object *object)
{ {
struct dzn_cached_blob *shader = struct dzn_cached_blob *shader =
container_of(object, struct dzn_cached_blob, base); container_of(object, struct dzn_cached_blob, base);
vk_free(&shader->base.device->alloc, shader); vk_free(&device->alloc, shader);
} }
static struct vk_pipeline_cache_object * static struct vk_pipeline_cache_object *
@@ -544,7 +545,7 @@ dzn_pipeline_cache_lookup_dxil_shader(struct vk_pipeline_cache *cache,
*stage = info->stage; *stage = info->stage;
out: out:
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
return ret; return ret;
} }
@@ -571,7 +572,7 @@ dzn_pipeline_cache_add_dxil_shader(struct vk_pipeline_cache *cache,
memcpy(info->data, bc->pShaderBytecode, bc->BytecodeLength); memcpy(info->data, bc->pShaderBytecode, bc->BytecodeLength);
cache_obj = vk_pipeline_cache_add_object(cache, cache_obj); cache_obj = vk_pipeline_cache_add_object(cache, cache_obj);
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
} }
struct dzn_cached_gfx_pipeline_header { struct dzn_cached_gfx_pipeline_header {
@@ -647,7 +648,7 @@ dzn_pipeline_cache_lookup_gfx_pipeline(struct dzn_graphics_pipeline *pipeline,
*cache_hit = true; *cache_hit = true;
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
return VK_SUCCESS; return VK_SUCCESS;
} }
@@ -702,7 +703,7 @@ dzn_pipeline_cache_add_gfx_pipeline(struct dzn_graphics_pipeline *pipeline,
} }
cache_obj = vk_pipeline_cache_add_object(cache, cache_obj); cache_obj = vk_pipeline_cache_add_object(cache, cache_obj);
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
} }
static void static void
@@ -2333,7 +2334,7 @@ dzn_pipeline_cache_lookup_compute_pipeline(struct vk_pipeline_cache *cache,
*cache_hit = true; *cache_hit = true;
out: out:
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
return ret; return ret;
} }
@@ -2353,7 +2354,7 @@ dzn_pipeline_cache_add_compute_pipeline(struct vk_pipeline_cache *cache,
memcpy((void *)cached_blob->data, dxil_hash, SHA1_DIGEST_LENGTH); memcpy((void *)cached_blob->data, dxil_hash, SHA1_DIGEST_LENGTH);
cache_obj = vk_pipeline_cache_add_object(cache, cache_obj); cache_obj = vk_pipeline_cache_add_object(cache, cache_obj);
vk_pipeline_cache_object_unref(cache_obj); vk_pipeline_cache_object_unref(cache->base.device, cache_obj);
} }
static VkResult static VkResult

View File

@@ -83,12 +83,13 @@ raw_data_object_deserialize(struct vk_device *device,
} }
static void static void
raw_data_object_destroy(struct vk_pipeline_cache_object *object) raw_data_object_destroy(struct vk_device *device,
struct vk_pipeline_cache_object *object)
{ {
struct raw_data_object *data_obj = struct raw_data_object *data_obj =
container_of(object, struct raw_data_object, base); container_of(object, struct raw_data_object, base);
vk_free(&data_obj->base.device->alloc, data_obj); vk_free(&device->alloc, data_obj);
} }
static const struct vk_pipeline_cache_object_ops raw_data_object_ops = { static const struct vk_pipeline_cache_object_ops raw_data_object_ops = {
@@ -165,14 +166,14 @@ vk_pipeline_cache_remove_object(struct vk_pipeline_cache *cache,
_mesa_set_search_pre_hashed(cache->object_cache, hash, object); _mesa_set_search_pre_hashed(cache->object_cache, hash, object);
if (entry && entry->key == (const void *)object) { if (entry && entry->key == (const void *)object) {
/* Drop the reference owned by the cache */ /* Drop the reference owned by the cache */
vk_pipeline_cache_object_unref(object); vk_pipeline_cache_object_unref(cache->base.device, object);
_mesa_set_remove(cache->object_cache, entry); _mesa_set_remove(cache->object_cache, entry);
} }
vk_pipeline_cache_unlock(cache); vk_pipeline_cache_unlock(cache);
/* Drop our reference */ /* Drop our reference */
vk_pipeline_cache_object_unref(object); vk_pipeline_cache_object_unref(cache->base.device, object);
} }
/* Consumes references to both search and replace and produces a reference */ /* Consumes references to both search and replace and produces a reference */
@@ -192,7 +193,7 @@ vk_pipeline_cache_replace_object(struct vk_pipeline_cache *cache,
if (entry) { if (entry) {
if (entry->key == (const void *)search) { if (entry->key == (const void *)search) {
/* Drop the reference owned by the cache */ /* Drop the reference owned by the cache */
vk_pipeline_cache_object_unref(search); vk_pipeline_cache_object_unref(cache->base.device, search);
entry->key = vk_pipeline_cache_object_ref(replace); entry->key = vk_pipeline_cache_object_ref(replace);
} else { } else {
@@ -205,10 +206,10 @@ vk_pipeline_cache_replace_object(struct vk_pipeline_cache *cache,
} }
vk_pipeline_cache_unlock(cache); vk_pipeline_cache_unlock(cache);
vk_pipeline_cache_object_unref(search); vk_pipeline_cache_object_unref(cache->base.device, search);
if (found) { if (found) {
vk_pipeline_cache_object_unref(replace); vk_pipeline_cache_object_unref(cache->base.device, replace);
return found; return found;
} else { } else {
return replace; return replace;
@@ -293,7 +294,6 @@ vk_pipeline_cache_object_deserialize(struct vk_pipeline_cache *cache,
} }
assert(reader.current == reader.end && !reader.overrun); assert(reader.current == reader.end && !reader.overrun);
assert(object->device == cache->base.device);
assert(object->ops == ops); assert(object->ops == ops);
assert(object->ref_cnt == 1); assert(object->ref_cnt == 1);
assert(object->key_size == key_size); assert(object->key_size == key_size);
@@ -413,7 +413,7 @@ vk_pipeline_cache_add_object(struct vk_pipeline_cache *cache,
vk_pipeline_cache_unlock(cache); vk_pipeline_cache_unlock(cache);
if (found) { if (found) {
vk_pipeline_cache_object_unref(object); vk_pipeline_cache_object_unref(cache->base.device, object);
return found_object; return found_object;
} else { } else {
/* If it wasn't in the object cache, it might not be in the disk cache /* If it wasn't in the object cache, it might not be in the disk cache
@@ -461,7 +461,7 @@ vk_pipeline_cache_lookup_nir(struct vk_pipeline_cache *cache,
blob_reader_init(&blob, data_obj->data, data_obj->data_size); blob_reader_init(&blob, data_obj->data, data_obj->data_size);
nir_shader *nir = nir_deserialize(mem_ctx, nir_options, &blob); nir_shader *nir = nir_deserialize(mem_ctx, nir_options, &blob);
vk_pipeline_cache_object_unref(object); vk_pipeline_cache_object_unref(cache->base.device, object);
if (blob.overrun) { if (blob.overrun) {
ralloc_free(nir); ralloc_free(nir);
@@ -494,7 +494,7 @@ vk_pipeline_cache_add_nir(struct vk_pipeline_cache *cache,
struct vk_pipeline_cache_object *cached = struct vk_pipeline_cache_object *cached =
vk_pipeline_cache_add_object(cache, &data_obj->base); vk_pipeline_cache_add_object(cache, &data_obj->base);
vk_pipeline_cache_object_unref(cached); vk_pipeline_cache_object_unref(cache->base.device, cached);
} }
static int32_t static int32_t
@@ -565,7 +565,7 @@ vk_pipeline_cache_load(struct vk_pipeline_cache *cache,
continue; continue;
object = vk_pipeline_cache_add_object(cache, object); object = vk_pipeline_cache_add_object(cache, object);
vk_pipeline_cache_object_unref(object); vk_pipeline_cache_object_unref(cache->base.device, object);
} }
} }
@@ -619,18 +619,17 @@ vk_pipeline_cache_create(struct vk_device *device,
return cache; return cache;
} }
static void
object_unref_cb(struct set_entry *entry)
{
vk_pipeline_cache_object_unref((void *)entry->key);
}
void void
vk_pipeline_cache_destroy(struct vk_pipeline_cache *cache, vk_pipeline_cache_destroy(struct vk_pipeline_cache *cache,
const VkAllocationCallbacks *pAllocator) const VkAllocationCallbacks *pAllocator)
{ {
if (cache->object_cache) if (cache->object_cache) {
_mesa_set_destroy(cache->object_cache, object_unref_cb); set_foreach_remove(cache->object_cache, entry) {
vk_pipeline_cache_object_unref(cache->base.device,
(void *)entry->key);
}
_mesa_set_destroy(cache->object_cache, NULL);
}
simple_mtx_destroy(&cache->lock); simple_mtx_destroy(&cache->lock);
vk_object_free(cache->base.device, pAllocator, cache); vk_object_free(cache->base.device, pAllocator, cache);
} }
@@ -749,12 +748,14 @@ vk_common_GetPipelineCacheData(VkDevice _device,
} }
VKAPI_ATTR VkResult VKAPI_CALL VKAPI_ATTR VkResult VKAPI_CALL
vk_common_MergePipelineCaches(VkDevice device, vk_common_MergePipelineCaches(VkDevice _device,
VkPipelineCache dstCache, VkPipelineCache dstCache,
uint32_t srcCacheCount, uint32_t srcCacheCount,
const VkPipelineCache *pSrcCaches) const VkPipelineCache *pSrcCaches)
{ {
VK_FROM_HANDLE(vk_pipeline_cache, dst, dstCache); VK_FROM_HANDLE(vk_pipeline_cache, dst, dstCache);
VK_FROM_HANDLE(vk_device, device, _device);
assert(dst->base.device == device);
if (!dst->object_cache) if (!dst->object_cache)
return VK_SUCCESS; return VK_SUCCESS;
@@ -763,6 +764,7 @@ vk_common_MergePipelineCaches(VkDevice device,
for (uint32_t i = 0; i < srcCacheCount; i++) { for (uint32_t i = 0; i < srcCacheCount; i++) {
VK_FROM_HANDLE(vk_pipeline_cache, src, pSrcCaches[i]); VK_FROM_HANDLE(vk_pipeline_cache, src, pSrcCaches[i]);
assert(src->base.device == device);
if (!src->object_cache) if (!src->object_cache)
continue; continue;
@@ -788,7 +790,7 @@ vk_common_MergePipelineCaches(VkDevice device,
/* Even though dst has the object, it only has the blob version /* Even though dst has the object, it only has the blob version
* which isn't as useful. Replace it with the real object. * which isn't as useful. Replace it with the real object.
*/ */
vk_pipeline_cache_object_unref(dst_object); vk_pipeline_cache_object_unref(device, dst_object);
dst_entry->key = vk_pipeline_cache_object_ref(src_object); dst_entry->key = vk_pipeline_cache_object_ref(src_object);
} }
} else { } else {

View File

@@ -86,7 +86,8 @@ struct vk_pipeline_cache_object_ops {
* *
* Called when vk_pipeline_cache_object.ref_cnt hits 0. * Called when vk_pipeline_cache_object.ref_cnt hits 0.
*/ */
void (*destroy)(struct vk_pipeline_cache_object *object); void (*destroy)(struct vk_device *device,
struct vk_pipeline_cache_object *object);
}; };
/** Base struct for cached objects /** Base struct for cached objects
@@ -105,7 +106,6 @@ struct vk_pipeline_cache_object_ops {
* it never has two objects of different types with the same key. * it never has two objects of different types with the same key.
*/ */
struct vk_pipeline_cache_object { struct vk_pipeline_cache_object {
struct vk_device *device;
const struct vk_pipeline_cache_object_ops *ops; const struct vk_pipeline_cache_object_ops *ops;
uint32_t ref_cnt; uint32_t ref_cnt;
@@ -121,7 +121,6 @@ vk_pipeline_cache_object_init(struct vk_device *device,
const void *key_data, uint32_t key_size) const void *key_data, uint32_t key_size)
{ {
memset(object, 0, sizeof(*object)); memset(object, 0, sizeof(*object));
object->device = device;
object->ops = ops; object->ops = ops;
p_atomic_set(&object->ref_cnt, 1); p_atomic_set(&object->ref_cnt, 1);
object->data_size = 0; /* Unknown */ object->data_size = 0; /* Unknown */
@@ -144,11 +143,12 @@ vk_pipeline_cache_object_ref(struct vk_pipeline_cache_object *object)
} }
static inline void static inline void
vk_pipeline_cache_object_unref(struct vk_pipeline_cache_object *object) vk_pipeline_cache_object_unref(struct vk_device *device,
struct vk_pipeline_cache_object *object)
{ {
assert(object && p_atomic_read(&object->ref_cnt) >= 1); assert(object && p_atomic_read(&object->ref_cnt) >= 1);
if (p_atomic_dec_zero(&object->ref_cnt)) if (p_atomic_dec_zero(&object->ref_cnt))
object->ops->destroy(object); object->ops->destroy(device, object);
} }
/** A generic implementation of VkPipelineCache */ /** A generic implementation of VkPipelineCache */