From b97b15d7626b1d0bbab7fcbc6bba87ef1a719804 Mon Sep 17 00:00:00 2001 From: Frank Binns Date: Sat, 20 Aug 2022 18:49:52 +0100 Subject: [PATCH] pvr: add required pixel formats As per section 33.3 ("Required Format Support") of the Vulkan 1.0 spec - see tables 42 to 52. Signed-off-by: Frank Binns Reviewed-by: Karmjit Mahil Part-of: --- src/imagination/vulkan/pvr_formats.c | 130 ++++++++++++++++++++++++++- 1 file changed, 129 insertions(+), 1 deletion(-) diff --git a/src/imagination/vulkan/pvr_formats.c b/src/imagination/vulkan/pvr_formats.c index 8e32e54c6ca..e216687fc85 100644 --- a/src/imagination/vulkan/pvr_formats.c +++ b/src/imagination/vulkan/pvr_formats.c @@ -47,6 +47,15 @@ .supported = true, \ } +#define FORMAT_COMPRESSED(vk, tex_fmt) \ + [VK_FORMAT_##vk] = { \ + .vk_format = VK_FORMAT_##vk, \ + .tex_format = ROGUE_TEXSTATE_FORMAT_COMPRESSED_##tex_fmt, \ + .pbe_packmode = ROGUE_PBESTATE_PACKMODE_INVALID, \ + .pbe_accum_format = PVR_PBE_ACCUM_FORMAT_INVALID, \ + .supported = true, \ + } + struct pvr_format { VkFormat vk_format; uint32_t tex_format; @@ -55,22 +64,141 @@ struct pvr_format { bool supported; }; -/* TODO: Add all supported core formats. */ static const struct pvr_format pvr_format_table[] = { + /* VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3. */ + FORMAT(B4G4R4A4_UNORM_PACK16, A4R4G4B4, A4R4G4B4, U8), + /* VK_FORMAT_R5G6B5_UNORM_PACK16 = 4. */ + FORMAT(R5G6B5_UNORM_PACK16, R5G6B5, R5G6B5, U8), + /* VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8. */ + FORMAT(A1R5G5B5_UNORM_PACK16, A1R5G5B5, A1R5G5B5, U8), + /* VK_FORMAT_R8_UNORM = 9. */ + FORMAT(R8_UNORM, U8, U8, U8), + /* VK_FORMAT_R8_SNORM = 10. */ + FORMAT(R8_SNORM, S8, S8, S8), /* VK_FORMAT_R8_UINT = 13. */ FORMAT(R8_UINT, U8, U8, UINT8), + /* VK_FORMAT_R8_SINT = 14. */ + FORMAT(R8_SINT, S8, S8, SINT8), + /* VK_FORMAT_R8G8_UNORM = 16. */ + FORMAT(R8G8_UNORM, U8U8, U8U8, U8), + /* VK_FORMAT_R8G8_SNORM = 17. */ + FORMAT(R8G8_SNORM, S8S8, S8S8, S8), + /* VK_FORMAT_R8G8_UINT = 20. */ + FORMAT(R8G8_UINT, U8U8, U8U8, UINT8), + /* VK_FORMAT_R8G8_SINT = 21. */ + FORMAT(R8G8_SINT, S8S8, S8S8, SINT8), /* VK_FORMAT_R8G8B8A8_UNORM = 37. */ FORMAT(R8G8B8A8_UNORM, U8U8U8U8, U8U8U8U8, U8), + /* VK_FORMAT_R8G8B8A8_SNORM = 38. */ + FORMAT(R8G8B8A8_SNORM, S8S8S8S8, S8S8S8S8, S8), + /* VK_FORMAT_R8G8B8A8_UINT = 41. */ + FORMAT(R8G8B8A8_UINT, U8U8U8U8, U8U8U8U8, UINT8), + /* VK_FORMAT_R8G8B8A8_SINT = 42. */ + FORMAT(R8G8B8A8_SINT, S8S8S8S8, S8S8S8S8, SINT8), + /* VK_FORMAT_R8G8B8A8_SRGB = 43. */ + FORMAT(R8G8B8A8_SRGB, U8U8U8U8, U8U8U8U8, F16), /* VK_FORMAT_B8G8R8A8_UNORM = 44. */ FORMAT(B8G8R8A8_UNORM, U8U8U8U8, U8U8U8U8, U8), + /* VK_FORMAT_B8G8R8A8_SRGB = 50. */ + FORMAT(B8G8R8A8_SRGB, U8U8U8U8, U8U8U8U8, F16), + /* VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51. */ + FORMAT(A8B8G8R8_UNORM_PACK32, U8U8U8U8, U8U8U8U8, U8), + /* VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52. */ + FORMAT(A8B8G8R8_SNORM_PACK32, S8S8S8S8, S8S8S8S8, S8), + /* VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55. */ + FORMAT(A8B8G8R8_UINT_PACK32, U8U8U8U8, U8U8U8U8, UINT8), + /* VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56. */ + FORMAT(A8B8G8R8_SINT_PACK32, S8S8S8S8, S8S8S8S8, SINT8), + /* VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57. */ + FORMAT(A8B8G8R8_SRGB_PACK32, U8U8U8U8, U8U8U8U8, F16), + /* VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64. */ + FORMAT(A2B10G10R10_UNORM_PACK32, A2R10B10G10, A2R10B10G10, F16), + /* VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68. */ + FORMAT(A2B10G10R10_UINT_PACK32, A2R10B10G10, U32, UINT32), + /* VK_FORMAT_R16_UNORM = 70. */ + FORMAT(R16_UNORM, U16, U16, U16), + /* VK_FORMAT_R16_SNORM = 71. */ + FORMAT(R16_SNORM, S16, S16, S16), + /* VK_FORMAT_R16_UINT = 74. */ + FORMAT(R16_UINT, U16, U16, UINT16), + /* VK_FORMAT_R16_SINT = 75. */ + FORMAT(R16_SINT, S16, S16, SINT16), + /* VK_FORMAT_R16_SFLOAT = 76. */ + FORMAT(R16_SFLOAT, F16, F16, F16), + /* VK_FORMAT_R16G16_UNORM = 77. */ + FORMAT(R16G16_UNORM, U16U16, U16U16, U16), + /* VK_FORMAT_R16G16_SNORM = 78. */ + FORMAT(R16G16_SNORM, S16S16, S16S16, S16), + /* VK_FORMAT_R16G16_UINT = 81. */ + FORMAT(R16G16_UINT, U16U16, U16U16, UINT16), + /* VK_FORMAT_R16G16_SINT = 82. */ + FORMAT(R16G16_SINT, S16S16, S16S16, SINT16), + /* VK_FORMAT_R16G16_SFLOAT = 83. */ + FORMAT(R16G16_SFLOAT, F16F16, F16F16, F16), + /* VK_FORMAT_R16G16B16A16_UNORM = 91. */ + FORMAT(R16G16B16A16_UNORM, U16U16U16U16, U16U16U16U16, U16), + /* VK_FORMAT_R16G16B16A16_SNORM = 92. */ + FORMAT(R16G16B16A16_SNORM, S16S16S16S16, S16S16S16S16, S16), + /* VK_FORMAT_R16G16B16A16_UINT = 95. */ + FORMAT(R16G16B16A16_UINT, U16U16U16U16, U16U16U16U16, UINT16), + /* VK_FORMAT_R16G16B16A16_SINT = 96 */ + FORMAT(R16G16B16A16_SINT, S16S16S16S16, S16S16S16S16, SINT16), + /* VK_FORMAT_R16G16B16A16_SFLOAT = 97. */ + FORMAT(R16G16B16A16_SFLOAT, F16F16F16F16, F16F16F16F16, F16), /* VK_FORMAT_R32_UINT = 98. */ FORMAT(R32_UINT, U32, U32, UINT32), + /* VK_FORMAT_R32_SINT = 99. */ + FORMAT(R32_SINT, S32, S32, SINT32), + /* VK_FORMAT_R32_SFLOAT = 100. */ + FORMAT(R32_SFLOAT, F32, F32, F32), + /* VK_FORMAT_R32G32_UINT = 101. */ + FORMAT(R32G32_UINT, U32U32, U32U32, UINT32), + /* VK_FORMAT_R32G32_SINT = 102. */ + FORMAT(R32G32_SINT, S32S32, S32S32, SINT32), + /* VK_FORMAT_R32G32_SFLOAT = 103. */ + FORMAT(R32G32_SFLOAT, F32F32, F32F32, F32), + /* VK_FORMAT_R32G32B32_UINT = 104. */ + FORMAT(R32G32B32_UINT, U32U32U32, U32U32U32, UINT32), + /* VK_FORMAT_R32G32B32_SINT = 105. */ + FORMAT(R32G32B32_SINT, S32S32S32, S32S32S32, SINT32), + /* VK_FORMAT_R32G32B32_SFLOAT = 106. */ + FORMAT(R32G32B32_SFLOAT, F32F32F32, F32F32F32, F32), /* VK_FORMAT_R32G32B32A32_UINT = 107. */ FORMAT(R32G32B32A32_UINT, U32U32U32U32, U32U32U32U32, UINT32), + /* VK_FORMAT_R32G32B32A32_SINT = 108. */ + FORMAT(R32G32B32A32_SINT, S32S32S32S32, S32S32S32S32, SINT32), /* VK_FORMAT_R32G32B32A32_SFLOAT = 109. */ FORMAT(R32G32B32A32_SFLOAT, F32F32F32F32, F32F32F32F32, F32), + /* VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122. */ + FORMAT(B10G11R11_UFLOAT_PACK32, F10F11F11, F10F11F11, F16), + /* VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123. */ + FORMAT(E5B9G9R9_UFLOAT_PACK32, SE9995, SE9995, INVALID), + /* VK_FORMAT_D16_UNORM = 124. */ + FORMAT(D16_UNORM, U16, U16, F16), /* VK_FORMAT_D32_SFLOAT = 126. */ FORMAT(D32_SFLOAT, F32, F32, F16), + /* VK_FORMAT_D24_UNORM_S8_UINT = 129. */ + FORMAT(D24_UNORM_S8_UINT, ST8U24, ST8U24, F16), + /* VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147. */ + FORMAT_COMPRESSED(ETC2_R8G8B8_UNORM_BLOCK, ETC2_RGB), + /* VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148. */ + FORMAT_COMPRESSED(ETC2_R8G8B8_SRGB_BLOCK, ETC2_RGB), + /* VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149. */ + FORMAT_COMPRESSED(ETC2_R8G8B8A1_UNORM_BLOCK, ETC2_PUNCHTHROUGHA), + /* VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150. */ + FORMAT_COMPRESSED(ETC2_R8G8B8A1_SRGB_BLOCK, ETC2_PUNCHTHROUGHA), + /* VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 150. */ + FORMAT_COMPRESSED(ETC2_R8G8B8A8_UNORM_BLOCK, ETC2A_RGBA), + /* VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152. */ + FORMAT_COMPRESSED(ETC2_R8G8B8A8_SRGB_BLOCK, ETC2A_RGBA), + /* VK_FORMAT_EAC_R11_UNORM_BLOCK = 153. */ + FORMAT_COMPRESSED(EAC_R11_UNORM_BLOCK, EAC_R11_UNSIGNED), + /* VK_FORMAT_EAC_R11_SNORM_BLOCK = 154. */ + FORMAT_COMPRESSED(EAC_R11_SNORM_BLOCK, EAC_R11_SIGNED), + /* VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155. */ + FORMAT_COMPRESSED(EAC_R11G11_UNORM_BLOCK, EAC_RG11_UNSIGNED), + /* VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156. */ + FORMAT_COMPRESSED(EAC_R11G11_SNORM_BLOCK, EAC_RG11_SIGNED), }; #undef FORMAT