diff --git a/src/intel/vulkan/anv_batch_chain.c b/src/intel/vulkan/anv_batch_chain.c index 234da63fa9a..fa00bf9d031 100644 --- a/src/intel/vulkan/anv_batch_chain.c +++ b/src/intel/vulkan/anv_batch_chain.c @@ -55,20 +55,21 @@ VkResult anv_reloc_list_init(struct anv_reloc_list *list, const VkAllocationCallbacks *alloc) { + assert(alloc != NULL); memset(list, 0, sizeof(*list)); + list->alloc = alloc; return VK_SUCCESS; } static VkResult anv_reloc_list_init_clone(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc, const struct anv_reloc_list *other_list) { list->dep_words = other_list->dep_words; if (list->dep_words > 0) { list->deps = - vk_alloc(alloc, list->dep_words * sizeof(BITSET_WORD), 8, + vk_alloc(list->alloc, list->dep_words * sizeof(BITSET_WORD), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); memcpy(list->deps, other_list->deps, list->dep_words * sizeof(BITSET_WORD)); @@ -80,15 +81,13 @@ anv_reloc_list_init_clone(struct anv_reloc_list *list, } void -anv_reloc_list_finish(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc) +anv_reloc_list_finish(struct anv_reloc_list *list) { - vk_free(alloc, list->deps); + vk_free(list->alloc, list->deps); } static VkResult anv_reloc_list_grow_deps(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc, uint32_t min_num_words) { if (min_num_words <= list->dep_words) @@ -99,7 +98,7 @@ anv_reloc_list_grow_deps(struct anv_reloc_list *list, new_length *= 2; BITSET_WORD *new_deps = - vk_realloc(alloc, list->deps, new_length * sizeof(BITSET_WORD), 8, + vk_realloc(list->alloc, list->deps, new_length * sizeof(BITSET_WORD), 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); if (new_deps == NULL) return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY); @@ -115,11 +114,10 @@ anv_reloc_list_grow_deps(struct anv_reloc_list *list, VkResult anv_reloc_list_add_bo(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc, struct anv_bo *target_bo) { uint32_t idx = target_bo->gem_handle; - VkResult result = anv_reloc_list_grow_deps(list, alloc, + VkResult result = anv_reloc_list_grow_deps(list, (idx / BITSET_WORDBITS) + 1); if (unlikely(result != VK_SUCCESS)) return result; @@ -138,10 +136,9 @@ anv_reloc_list_clear(struct anv_reloc_list *list) static VkResult anv_reloc_list_append(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc, struct anv_reloc_list *other) { - anv_reloc_list_grow_deps(list, alloc, other->dep_words); + anv_reloc_list_grow_deps(list, other->dep_words); for (uint32_t w = 0; w < other->dep_words; w++) list->deps[w] |= other->deps[w]; @@ -227,8 +224,7 @@ anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other) VG(VALGRIND_CHECK_MEM_IS_DEFINED(other->start, size)); memcpy(batch->next, other->start, size); - VkResult result = anv_reloc_list_append(batch->relocs, batch->alloc, - other->relocs); + VkResult result = anv_reloc_list_append(batch->relocs, other->relocs); if (result != VK_SUCCESS) { anv_batch_set_error(batch, result); return; @@ -291,8 +287,7 @@ anv_batch_bo_clone(struct anv_cmd_buffer *cmd_buffer, if (result != VK_SUCCESS) goto fail_alloc; - result = anv_reloc_list_init_clone(&bbo->relocs, &cmd_buffer->vk.pool->alloc, - &other_bbo->relocs); + result = anv_reloc_list_init_clone(&bbo->relocs, &other_bbo->relocs); if (result != VK_SUCCESS) goto fail_bo_alloc; @@ -366,7 +361,7 @@ static void anv_batch_bo_destroy(struct anv_batch_bo *bbo, struct anv_cmd_buffer *cmd_buffer) { - anv_reloc_list_finish(&bbo->relocs, &cmd_buffer->vk.pool->alloc); + anv_reloc_list_finish(&bbo->relocs); anv_bo_pool_free(&cmd_buffer->device->batch_bo_pool, bbo->bo); vk_free(&cmd_buffer->vk.pool->alloc, bbo); } @@ -879,7 +874,7 @@ anv_cmd_buffer_fini_batch_bo_chain(struct anv_cmd_buffer *cmd_buffer) anv_binding_table_pool_free(cmd_buffer->device, *bt_block); u_vector_finish(&cmd_buffer->bt_block_states); - anv_reloc_list_finish(&cmd_buffer->surface_relocs, &cmd_buffer->vk.pool->alloc); + anv_reloc_list_finish(&cmd_buffer->surface_relocs); u_vector_finish(&cmd_buffer->seen_bbos); @@ -1145,8 +1140,7 @@ anv_cmd_buffer_add_secondary(struct anv_cmd_buffer *primary, assert(!"Invalid execution mode"); } - anv_reloc_list_append(&primary->surface_relocs, &primary->vk.pool->alloc, - &secondary->surface_relocs); + anv_reloc_list_append(&primary->surface_relocs, &secondary->surface_relocs); } void diff --git a/src/intel/vulkan/anv_cmd_buffer.c b/src/intel/vulkan/anv_cmd_buffer.c index cd4ec972271..c27a23afcb3 100644 --- a/src/intel/vulkan/anv_cmd_buffer.c +++ b/src/intel/vulkan/anv_cmd_buffer.c @@ -403,13 +403,11 @@ anv_cmd_buffer_set_ray_query_buffer(struct anv_cmd_buffer *cmd_buffer, /* Add the ray query buffers to the batch list. */ anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, cmd_buffer->state.ray_query_shadow_bo); } /* Add the HW buffer to the list of BO used. */ anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, device->ray_query_bo); /* Fill the push constants & mark them dirty. */ @@ -641,7 +639,6 @@ anv_cmd_buffer_bind_descriptor_set(struct anv_cmd_buffer *cmd_buffer, if (set_addr.bo) { anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, set_addr.bo); } } diff --git a/src/intel/vulkan/anv_pipeline.c b/src/intel/vulkan/anv_pipeline.c index db85fd7c15e..208bd9c2426 100644 --- a/src/intel/vulkan/anv_pipeline.c +++ b/src/intel/vulkan/anv_pipeline.c @@ -306,12 +306,10 @@ anv_pipeline_init_layout(struct anv_pipeline *pipeline, void anv_pipeline_finish(struct anv_pipeline *pipeline, - struct anv_device *device, - const VkAllocationCallbacks *pAllocator) + struct anv_device *device) { anv_pipeline_sets_layout_fini(&pipeline->layout); - anv_reloc_list_finish(&pipeline->batch_relocs, - pAllocator ? pAllocator : &device->vk.alloc); + anv_reloc_list_finish(&pipeline->batch_relocs); ralloc_free(pipeline->mem_ctx); vk_object_base_finish(&pipeline->base); } @@ -375,7 +373,7 @@ void anv_DestroyPipeline( unreachable("invalid pipeline type"); } - anv_pipeline_finish(pipeline, device, pAllocator); + anv_pipeline_finish(pipeline, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); } @@ -2669,7 +2667,7 @@ anv_compute_pipeline_create(struct anv_device *device, result = anv_pipeline_compile_cs(pipeline, cache, pCreateInfo); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base, device, pAllocator); + anv_pipeline_finish(&pipeline->base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } @@ -3004,7 +3002,7 @@ anv_graphics_lib_pipeline_create(struct anv_device *device, NULL /* sp_info */, &pipeline->all_state, NULL, 0, NULL); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base.base, device, pAllocator); + anv_pipeline_finish(&pipeline->base.base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } @@ -3030,7 +3028,7 @@ anv_graphics_lib_pipeline_create(struct anv_device *device, cache, &pipeline_feedback, pCreateInfo, &pipeline->state); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base.base, device, pAllocator); + anv_pipeline_finish(&pipeline->base.base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } @@ -3117,7 +3115,7 @@ anv_graphics_pipeline_create(struct anv_device *device, NULL /* sp_info */, &all, NULL, 0, NULL); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base.base, device, pAllocator); + anv_pipeline_finish(&pipeline->base.base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } @@ -3155,7 +3153,7 @@ anv_graphics_pipeline_create(struct anv_device *device, cache, &pipeline_feedback, pCreateInfo, &state); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base.base, device, pAllocator); + anv_pipeline_finish(&pipeline->base.base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } @@ -4008,7 +4006,7 @@ anv_ray_tracing_pipeline_create( result = anv_ray_tracing_pipeline_init(pipeline, device, cache, pCreateInfo, pAllocator); if (result != VK_SUCCESS) { - anv_pipeline_finish(&pipeline->base, device, pAllocator); + anv_pipeline_finish(&pipeline->base, device); vk_free2(&device->vk.alloc, pAllocator, pipeline); return result; } diff --git a/src/intel/vulkan/anv_private.h b/src/intel/vulkan/anv_private.h index f659b3030e4..b5f0ec92472 100644 --- a/src/intel/vulkan/anv_private.h +++ b/src/intel/vulkan/anv_private.h @@ -1434,15 +1434,14 @@ void anv_vma_free(struct anv_device *device, struct anv_reloc_list { uint32_t dep_words; BITSET_WORD * deps; + const VkAllocationCallbacks *alloc; }; VkResult anv_reloc_list_init(struct anv_reloc_list *list, const VkAllocationCallbacks *alloc); -void anv_reloc_list_finish(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc); +void anv_reloc_list_finish(struct anv_reloc_list *list); VkResult anv_reloc_list_add_bo(struct anv_reloc_list *list, - const VkAllocationCallbacks *alloc, struct anv_bo *target_bo); struct anv_batch_bo { @@ -1542,7 +1541,7 @@ _anv_combine_address(struct anv_batch *batch, void *location, return address.offset + delta; if (batch) - anv_reloc_list_add_bo(batch->relocs, batch->alloc, address.bo); + anv_reloc_list_add_bo(batch->relocs, address.bo); return anv_address_physical(anv_address_add(address, delta)); } @@ -3725,8 +3724,7 @@ anv_pipeline_init(struct anv_pipeline *pipeline, void anv_pipeline_finish(struct anv_pipeline *pipeline, - struct anv_device *device, - const VkAllocationCallbacks *pAllocator); + struct anv_device *device); struct anv_kernel_arg { bool is_ptr; diff --git a/src/intel/vulkan/anv_utrace.c b/src/intel/vulkan/anv_utrace.c index d4c997bc6d7..5151c348207 100644 --- a/src/intel/vulkan/anv_utrace.c +++ b/src/intel/vulkan/anv_utrace.c @@ -84,7 +84,7 @@ anv_utrace_delete_submit(struct u_trace_context *utctx, void *submit_data) anv_bo_pool_free(&device->utrace_bo_pool, submit->trace_bo); if (submit->batch_bo) { - anv_reloc_list_finish(&submit->relocs, &device->vk.alloc); + anv_reloc_list_finish(&submit->relocs); anv_bo_pool_free(&device->utrace_bo_pool, submit->batch_bo); } @@ -216,7 +216,7 @@ anv_device_utrace_flush_cmd_buffers(struct anv_queue *queue, return VK_SUCCESS; error_batch: - anv_reloc_list_finish(&submit->relocs, &device->vk.alloc); + anv_reloc_list_finish(&submit->relocs); error_reloc_list: anv_bo_pool_free(&device->utrace_bo_pool, submit->batch_bo); error_batch_buf: @@ -507,7 +507,7 @@ anv_queue_trace(struct anv_queue *queue, const char *label, bool frame, bool beg return; error_reloc_list: - anv_reloc_list_finish(&submit->relocs, &device->vk.alloc); + anv_reloc_list_finish(&submit->relocs); error_batch_bo: anv_bo_pool_free(&device->utrace_bo_pool, submit->batch_bo); error_sync: diff --git a/src/intel/vulkan/genX_blorp_exec.c b/src/intel/vulkan/genX_blorp_exec.c index 391bdb1655d..7740af88f73 100644 --- a/src/intel/vulkan/genX_blorp_exec.c +++ b/src/intel/vulkan/genX_blorp_exec.c @@ -76,8 +76,7 @@ blorp_emit_reloc(struct blorp_batch *batch, .bo = address.buffer, .offset = address.offset, }; - anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, anv_addr.bo); + anv_reloc_list_add_bo(cmd_buffer->batch.relocs, anv_addr.bo); return anv_address_physical(anv_address_add(anv_addr, delta)); } @@ -88,7 +87,6 @@ blorp_surface_reloc(struct blorp_batch *batch, uint32_t ss_offset, struct anv_cmd_buffer *cmd_buffer = batch->driver_batch; VkResult result = anv_reloc_list_add_bo(&cmd_buffer->surface_relocs, - &cmd_buffer->vk.pool->alloc, address.buffer); if (unlikely(result != VK_SUCCESS)) anv_batch_set_error(&cmd_buffer->batch, result); diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index 24893775472..3395bbee130 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -311,7 +311,6 @@ add_surface_reloc(struct anv_cmd_buffer *cmd_buffer, struct anv_address addr) { VkResult result = anv_reloc_list_add_bo(&cmd_buffer->surface_relocs, - &cmd_buffer->vk.pool->alloc, addr.bo); if (unlikely(result != VK_SUCCESS)) @@ -328,7 +327,6 @@ add_surface_state_relocs(struct anv_cmd_buffer *cmd_buffer, if (!anv_address_is_null(state->aux_address)) { VkResult result = anv_reloc_list_add_bo(&cmd_buffer->surface_relocs, - &cmd_buffer->vk.pool->alloc, state->aux_address.bo); if (result != VK_SUCCESS) anv_batch_set_error(&cmd_buffer->batch, result); @@ -337,7 +335,6 @@ add_surface_state_relocs(struct anv_cmd_buffer *cmd_buffer, if (!anv_address_is_null(state->clear_address)) { VkResult result = anv_reloc_list_add_bo(&cmd_buffer->surface_relocs, - &cmd_buffer->vk.pool->alloc, state->clear_address.bo); if (result != VK_SUCCESS) anv_batch_set_error(&cmd_buffer->batch, result); @@ -5607,7 +5604,6 @@ genX(cmd_buffer_ensure_cfe_state)(struct anv_cmd_buffer *cmd_buffer, MESA_SHADER_COMPUTE, total_scratch); anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, scratch_bo); scratch_surf = anv_scratch_pool_get_surf(cmd_buffer->device, @@ -6337,10 +6333,8 @@ cmd_buffer_trace_rays(struct anv_cmd_buffer *cmd_buffer, * TODO(RT): This is a bit of a hack */ anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, rt->scratch.bo); anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, cmd_buffer->device->btd_fifo_bo); /* Allocate and set up our RT_DISPATCH_GLOBALS */ @@ -6464,7 +6458,6 @@ cmd_buffer_trace_rays(struct anv_cmd_buffer *cmd_buffer, MESA_SHADER_COMPUTE, pipeline->scratch_size); anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, scratch_bo); uint32_t scratch_surf = anv_scratch_pool_get_surf(cmd_buffer->device, @@ -7064,8 +7057,7 @@ cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer) const struct anv_address depth_address = anv_image_address(image, &depth_surface->memory_range); - anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, depth_address.bo); + anv_reloc_list_add_bo(cmd_buffer->batch.relocs, depth_address.bo); info.depth_surf = &depth_surface->isl; info.depth_address = anv_address_physical(depth_address); @@ -7081,8 +7073,7 @@ cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer) const struct anv_address hiz_address = anv_image_address(image, &hiz_surface->memory_range); - anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, hiz_address.bo); + anv_reloc_list_add_bo(cmd_buffer->batch.relocs, hiz_address.bo); info.hiz_surf = &hiz_surface->isl; info.hiz_address = anv_address_physical(hiz_address); @@ -7102,8 +7093,7 @@ cmd_buffer_emit_depth_stencil(struct anv_cmd_buffer *cmd_buffer) const struct anv_address stencil_address = anv_image_address(image, &stencil_surface->memory_range); - anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, stencil_address.bo); + anv_reloc_list_add_bo(cmd_buffer->batch.relocs, stencil_address.bo); info.stencil_surf = &stencil_surface->isl; @@ -7161,8 +7151,7 @@ cmd_buffer_emit_cps_control_buffer(struct anv_cmd_buffer *cmd_buffer, if (fsr_iview) { const struct anv_image_binding *binding = &fsr_iview->image->bindings[0]; - anv_reloc_list_add_bo(cmd_buffer->batch.relocs, - cmd_buffer->batch.alloc, binding->address.bo); + anv_reloc_list_add_bo(cmd_buffer->batch.relocs, binding->address.bo); struct anv_address addr = anv_address_add(binding->address, binding->memory_range.offset); diff --git a/src/intel/vulkan/genX_pipeline.c b/src/intel/vulkan/genX_pipeline.c index 028778ecd05..ece3ba51258 100644 --- a/src/intel/vulkan/genX_pipeline.c +++ b/src/intel/vulkan/genX_pipeline.c @@ -1159,8 +1159,7 @@ get_scratch_surf(struct anv_pipeline *pipeline, anv_scratch_pool_alloc(pipeline->device, &pipeline->device->scratch_pool, stage, bin->prog_data->total_scratch); - anv_reloc_list_add_bo(pipeline->batch.relocs, - pipeline->batch.alloc, bo); + anv_reloc_list_add_bo(pipeline->batch.relocs, bo); return anv_scratch_pool_get_surf(pipeline->device, &pipeline->device->scratch_pool, bin->prog_data->total_scratch) >> 4;