2023-09-06 10:35:28 -05:00
|
|
|
/*
|
|
|
|
* Copyright © 2022 Collabora Ltd. and Red Hat Inc.
|
|
|
|
* SPDX-License-Identifier: MIT
|
|
|
|
*/
|
2023-01-30 20:11:46 -06:00
|
|
|
#include "nvk_buffer.h"
|
|
|
|
|
2023-09-06 09:44:46 -05:00
|
|
|
#include "nvk_entrypoints.h"
|
2023-01-30 20:11:46 -06:00
|
|
|
#include "nvk_device.h"
|
2022-05-29 08:14:51 +10:00
|
|
|
#include "nvk_device_memory.h"
|
2022-05-20 15:27:09 +02:00
|
|
|
#include "nvk_physical_device.h"
|
2023-01-30 20:11:46 -06:00
|
|
|
|
2023-07-19 17:34:27 -05:00
|
|
|
uint32_t
|
2023-08-10 15:43:55 -05:00
|
|
|
nvk_get_buffer_alignment(UNUSED const struct nv_device_info *info,
|
2023-08-04 10:41:23 -05:00
|
|
|
VkBufferUsageFlags2KHR usage_flags,
|
2023-07-19 16:45:27 -05:00
|
|
|
VkBufferCreateFlags create_flags)
|
2023-07-19 17:34:27 -05:00
|
|
|
{
|
|
|
|
uint32_t alignment = 16;
|
|
|
|
|
2023-08-04 10:41:23 -05:00
|
|
|
if (usage_flags & VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR)
|
2023-07-19 17:34:27 -05:00
|
|
|
alignment = MAX2(alignment, NVK_MIN_UBO_ALIGNMENT);
|
|
|
|
|
2023-08-04 10:41:23 -05:00
|
|
|
if (usage_flags & VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR)
|
2023-07-19 17:34:27 -05:00
|
|
|
alignment = MAX2(alignment, NVK_MIN_SSBO_ALIGNMENT);
|
|
|
|
|
2023-08-04 10:41:23 -05:00
|
|
|
if (usage_flags & (VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR |
|
|
|
|
VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR))
|
2023-07-19 17:34:27 -05:00
|
|
|
alignment = MAX2(alignment, NVK_MIN_UBO_ALIGNMENT);
|
|
|
|
|
2023-07-19 16:45:27 -05:00
|
|
|
if (create_flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT)
|
|
|
|
alignment = MAX2(alignment, 4096);
|
|
|
|
|
2023-07-19 17:34:27 -05:00
|
|
|
return alignment;
|
|
|
|
}
|
|
|
|
|
2023-01-30 20:11:49 -06:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2023-07-19 15:08:04 -05:00
|
|
|
nvk_CreateBuffer(VkDevice device,
|
2023-01-30 20:11:49 -06:00
|
|
|
const VkBufferCreateInfo *pCreateInfo,
|
|
|
|
const VkAllocationCallbacks *pAllocator,
|
|
|
|
VkBuffer *pBuffer)
|
2023-01-30 20:11:46 -06:00
|
|
|
{
|
2023-07-19 15:08:04 -05:00
|
|
|
VK_FROM_HANDLE(nvk_device, dev, device);
|
2023-01-30 20:11:46 -06:00
|
|
|
struct nvk_buffer *buffer;
|
|
|
|
|
2023-07-19 15:08:04 -05:00
|
|
|
buffer = vk_buffer_create(&dev->vk, pCreateInfo, pAllocator,
|
|
|
|
sizeof(*buffer));
|
2023-01-30 20:11:46 -06:00
|
|
|
if (!buffer)
|
2023-07-19 15:08:04 -05:00
|
|
|
return vk_error(dev, VK_ERROR_OUT_OF_HOST_MEMORY);
|
2023-01-30 20:11:46 -06:00
|
|
|
|
2023-07-19 16:45:27 -05:00
|
|
|
if (buffer->vk.create_flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) {
|
|
|
|
const uint32_t alignment =
|
2023-08-10 15:43:55 -05:00
|
|
|
nvk_get_buffer_alignment(&nvk_device_physical(dev)->info,
|
2023-07-19 16:45:27 -05:00
|
|
|
buffer->vk.usage,
|
|
|
|
buffer->vk.create_flags);
|
|
|
|
assert(alignment >= 4096);
|
|
|
|
buffer->vma_size_B = ALIGN_POT(buffer->vk.size, alignment);
|
|
|
|
|
|
|
|
const bool sparse_residency =
|
|
|
|
buffer->vk.create_flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
|
|
|
|
|
|
|
|
buffer->addr = nouveau_ws_alloc_vma(dev->ws_dev, buffer->vma_size_B,
|
|
|
|
alignment, sparse_residency);
|
|
|
|
}
|
|
|
|
|
2023-01-30 20:11:46 -06:00
|
|
|
*pBuffer = nvk_buffer_to_handle(buffer);
|
|
|
|
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
|
|
|
|
2023-01-30 20:11:49 -06:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2023-07-19 15:08:04 -05:00
|
|
|
nvk_DestroyBuffer(VkDevice device,
|
2023-01-30 20:11:49 -06:00
|
|
|
VkBuffer _buffer,
|
|
|
|
const VkAllocationCallbacks *pAllocator)
|
2023-01-30 20:11:46 -06:00
|
|
|
{
|
2023-07-19 15:08:04 -05:00
|
|
|
VK_FROM_HANDLE(nvk_device, dev, device);
|
2023-01-30 20:11:46 -06:00
|
|
|
VK_FROM_HANDLE(nvk_buffer, buffer, _buffer);
|
|
|
|
|
|
|
|
if (!buffer)
|
|
|
|
return;
|
|
|
|
|
2023-07-19 16:45:27 -05:00
|
|
|
if (buffer->vma_size_B > 0) {
|
|
|
|
const bool sparse_residency =
|
|
|
|
buffer->vk.create_flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT;
|
|
|
|
|
|
|
|
nouveau_ws_bo_unbind_vma(dev->ws_dev, buffer->addr, buffer->vma_size_B);
|
|
|
|
nouveau_ws_free_vma(dev->ws_dev, buffer->addr, buffer->vma_size_B,
|
|
|
|
sparse_residency);
|
|
|
|
}
|
|
|
|
|
2023-07-19 15:08:04 -05:00
|
|
|
vk_buffer_destroy(&dev->vk, pAllocator, &buffer->vk);
|
2023-01-30 20:11:46 -06:00
|
|
|
}
|
2022-05-20 15:27:09 +02:00
|
|
|
|
2023-01-30 20:11:49 -06:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
2023-07-19 15:08:04 -05:00
|
|
|
nvk_GetDeviceBufferMemoryRequirements(
|
|
|
|
VkDevice device,
|
|
|
|
const VkDeviceBufferMemoryRequirements *pInfo,
|
|
|
|
VkMemoryRequirements2 *pMemoryRequirements)
|
2022-05-20 15:27:09 +02:00
|
|
|
{
|
2023-07-19 15:08:04 -05:00
|
|
|
VK_FROM_HANDLE(nvk_device, dev, device);
|
2022-05-20 15:27:09 +02:00
|
|
|
|
2023-07-19 17:34:27 -05:00
|
|
|
const uint32_t alignment =
|
2023-08-10 15:43:55 -05:00
|
|
|
nvk_get_buffer_alignment(&nvk_device_physical(dev)->info,
|
2023-07-19 17:34:27 -05:00
|
|
|
pInfo->pCreateInfo->usage,
|
|
|
|
pInfo->pCreateInfo->flags);
|
|
|
|
|
2022-05-20 15:27:09 +02:00
|
|
|
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
|
2023-07-19 17:34:27 -05:00
|
|
|
.size = ALIGN_POT(pInfo->pCreateInfo->size, alignment),
|
|
|
|
.alignment = alignment,
|
2023-07-19 15:08:04 -05:00
|
|
|
.memoryTypeBits = BITFIELD_MASK(dev->pdev->mem_type_cnt),
|
2022-05-20 15:27:09 +02:00
|
|
|
};
|
|
|
|
|
2023-01-30 20:11:50 -06:00
|
|
|
vk_foreach_struct_const(ext, pMemoryRequirements->pNext) {
|
2022-05-20 15:27:09 +02:00
|
|
|
switch (ext->sType) {
|
2023-01-30 20:11:50 -06:00
|
|
|
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
|
|
|
|
VkMemoryDedicatedRequirements *dedicated = (void *)ext;
|
|
|
|
dedicated->prefersDedicatedAllocation = false;
|
|
|
|
dedicated->requiresDedicatedAllocation = false;
|
|
|
|
break;
|
|
|
|
}
|
2022-05-20 15:27:09 +02:00
|
|
|
default:
|
|
|
|
nvk_debug_ignored_stype(ext->sType);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-05-29 08:14:51 +10:00
|
|
|
|
2023-07-17 15:11:05 -05:00
|
|
|
VKAPI_ATTR void VKAPI_CALL
|
|
|
|
nvk_GetPhysicalDeviceExternalBufferProperties(
|
|
|
|
VkPhysicalDevice physicalDevice,
|
|
|
|
const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
|
|
|
|
VkExternalBufferProperties *pExternalBufferProperties)
|
|
|
|
{
|
|
|
|
/* The Vulkan 1.3.256 spec says:
|
|
|
|
*
|
|
|
|
* VUID-VkPhysicalDeviceExternalBufferInfo-handleType-parameter
|
|
|
|
*
|
|
|
|
* "handleType must be a valid VkExternalMemoryHandleTypeFlagBits value"
|
|
|
|
*
|
|
|
|
* This differs from VkPhysicalDeviceExternalImageFormatInfo, which
|
|
|
|
* surprisingly permits handleType == 0.
|
|
|
|
*/
|
|
|
|
assert(pExternalBufferInfo->handleType != 0);
|
|
|
|
|
|
|
|
/* All of the current flags are for sparse which we don't support yet.
|
|
|
|
* Even when we do support it, doing sparse on external memory sounds
|
|
|
|
* sketchy. Also, just disallowing flags is the safe option.
|
|
|
|
*/
|
|
|
|
if (pExternalBufferInfo->flags)
|
|
|
|
goto unsupported;
|
|
|
|
|
|
|
|
switch (pExternalBufferInfo->handleType) {
|
|
|
|
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
|
|
|
|
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
|
2023-08-03 19:54:35 -05:00
|
|
|
pExternalBufferProperties->externalMemoryProperties =
|
|
|
|
nvk_dma_buf_mem_props;
|
2023-07-17 15:11:05 -05:00
|
|
|
return;
|
|
|
|
default:
|
|
|
|
goto unsupported;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsupported:
|
|
|
|
/* From the Vulkan 1.3.256 spec:
|
|
|
|
*
|
|
|
|
* compatibleHandleTypes must include at least handleType.
|
|
|
|
*/
|
|
|
|
pExternalBufferProperties->externalMemoryProperties =
|
|
|
|
(VkExternalMemoryProperties) {
|
|
|
|
.compatibleHandleTypes = pExternalBufferInfo->handleType,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2022-05-29 08:14:51 +10:00
|
|
|
VKAPI_ATTR VkResult VKAPI_CALL
|
2023-07-19 15:08:04 -05:00
|
|
|
nvk_BindBufferMemory2(VkDevice device,
|
2023-01-30 20:11:49 -06:00
|
|
|
uint32_t bindInfoCount,
|
|
|
|
const VkBindBufferMemoryInfo *pBindInfos)
|
2022-05-29 08:14:51 +10:00
|
|
|
{
|
|
|
|
for (uint32_t i = 0; i < bindInfoCount; ++i) {
|
|
|
|
VK_FROM_HANDLE(nvk_device_memory, mem, pBindInfos[i].memory);
|
|
|
|
VK_FROM_HANDLE(nvk_buffer, buffer, pBindInfos[i].buffer);
|
|
|
|
|
2023-07-23 17:10:27 +02:00
|
|
|
buffer->is_local = !(mem->bo->flags & NOUVEAU_WS_BO_GART);
|
2023-07-19 16:45:27 -05:00
|
|
|
if (buffer->vma_size_B) {
|
|
|
|
VK_FROM_HANDLE(nvk_device, dev, device);
|
|
|
|
nouveau_ws_bo_bind_vma(dev->ws_dev,
|
|
|
|
mem->bo,
|
|
|
|
buffer->addr,
|
|
|
|
buffer->vma_size_B,
|
|
|
|
pBindInfos[i].memoryOffset,
|
|
|
|
0 /* pte_kind */);
|
|
|
|
} else {
|
|
|
|
buffer->addr = mem->bo->offset + pBindInfos[i].memoryOffset;
|
|
|
|
}
|
2022-05-29 08:14:51 +10:00
|
|
|
}
|
|
|
|
return VK_SUCCESS;
|
|
|
|
}
|
2023-01-30 20:12:02 -06:00
|
|
|
|
|
|
|
VKAPI_ATTR VkDeviceAddress VKAPI_CALL
|
|
|
|
nvk_GetBufferDeviceAddress(UNUSED VkDevice device,
|
|
|
|
const VkBufferDeviceAddressInfo *pInfo)
|
|
|
|
{
|
|
|
|
VK_FROM_HANDLE(nvk_buffer, buffer, pInfo->buffer);
|
|
|
|
|
|
|
|
return nvk_buffer_address(buffer, 0);
|
|
|
|
}
|
|
|
|
|
|
|
|
VKAPI_ATTR uint64_t VKAPI_CALL
|
|
|
|
nvk_GetBufferOpaqueCaptureAddress(UNUSED VkDevice device,
|
|
|
|
const VkBufferDeviceAddressInfo *pInfo)
|
|
|
|
{
|
|
|
|
VK_FROM_HANDLE(nvk_buffer, buffer, pInfo->buffer);
|
|
|
|
|
|
|
|
return nvk_buffer_address(buffer, 0);
|
|
|
|
}
|