2016-08-22 21:37:28 -07:00
|
|
|
/*
|
|
|
|
* Copyright © 2016 Intel Corporation
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
* Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
|
|
* IN THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <assert.h>
|
|
|
|
|
|
|
|
#include "anv_private.h"
|
2021-01-05 19:34:51 -08:00
|
|
|
#include "anv_measure.h"
|
2016-08-22 21:37:28 -07:00
|
|
|
|
|
|
|
/* These are defined in anv_private.h and blorp_genX_exec.h */
|
|
|
|
#undef __gen_address_type
|
|
|
|
#undef __gen_user_data
|
|
|
|
#undef __gen_combine_address
|
|
|
|
|
2021-03-03 13:20:06 -08:00
|
|
|
#include "common/intel_l3_config.h"
|
2016-08-22 21:37:28 -07:00
|
|
|
#include "blorp/blorp_genX_exec.h"
|
|
|
|
|
2021-11-21 18:23:57 +02:00
|
|
|
#include "ds/intel_tracepoints.h"
|
2021-11-18 17:45:57 +02:00
|
|
|
|
2020-06-30 15:00:13 -07:00
|
|
|
static void blorp_measure_start(struct blorp_batch *_batch,
|
2021-01-05 19:34:51 -08:00
|
|
|
const struct blorp_params *params)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch;
|
2022-05-19 13:09:25 +03:00
|
|
|
trace_intel_begin_blorp(&cmd_buffer->trace);
|
2021-01-05 19:34:51 -08:00
|
|
|
anv_measure_snapshot(cmd_buffer,
|
2022-07-01 10:51:39 +03:00
|
|
|
blorp_op_to_intel_measure_snapshot(params->op),
|
2021-01-05 19:34:51 -08:00
|
|
|
NULL, 0);
|
|
|
|
}
|
2020-06-30 15:00:13 -07:00
|
|
|
|
2021-11-26 18:22:40 +02:00
|
|
|
static void blorp_measure_end(struct blorp_batch *_batch,
|
|
|
|
const struct blorp_params *params)
|
|
|
|
{
|
2021-11-18 17:45:57 +02:00
|
|
|
struct anv_cmd_buffer *cmd_buffer = _batch->driver_batch;
|
2022-05-19 13:09:25 +03:00
|
|
|
trace_intel_end_blorp(&cmd_buffer->trace,
|
2022-07-01 10:52:45 +03:00
|
|
|
params->op,
|
2021-11-21 18:23:57 +02:00
|
|
|
params->x1 - params->x0,
|
|
|
|
params->y1 - params->y0,
|
2022-07-01 10:52:45 +03:00
|
|
|
params->num_samples,
|
|
|
|
params->shader_pipeline,
|
|
|
|
params->dst.view.format,
|
2023-05-31 11:23:45 +03:00
|
|
|
params->src.view.format,
|
|
|
|
(_batch->flags & BLORP_BATCH_PREDICATE_ENABLE));
|
2021-11-26 18:22:40 +02:00
|
|
|
}
|
|
|
|
|
2016-08-22 21:37:28 -07:00
|
|
|
static void *
|
|
|
|
blorp_emit_dwords(struct blorp_batch *batch, unsigned n)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
return anv_batch_emit_dwords(&cmd_buffer->batch, n);
|
|
|
|
}
|
|
|
|
|
|
|
|
static uint64_t
|
|
|
|
blorp_emit_reloc(struct blorp_batch *batch,
|
|
|
|
void *location, struct blorp_address address, uint32_t delta)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2022-08-31 00:41:51 -07:00
|
|
|
struct anv_address anv_addr = {
|
|
|
|
.bo = address.buffer,
|
|
|
|
.offset = address.offset,
|
|
|
|
};
|
2023-07-31 11:13:04 -07:00
|
|
|
anv_reloc_list_add_bo(cmd_buffer->batch.relocs, anv_addr.bo);
|
2022-08-31 00:41:51 -07:00
|
|
|
return anv_address_physical(anv_address_add(anv_addr, delta));
|
2016-08-22 21:37:28 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
blorp_surface_reloc(struct blorp_batch *batch, uint32_t ss_offset,
|
|
|
|
struct blorp_address address, uint32_t delta)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2021-06-08 09:22:44 -05:00
|
|
|
|
2022-08-30 16:09:05 -07:00
|
|
|
VkResult result = anv_reloc_list_add_bo(&cmd_buffer->surface_relocs,
|
|
|
|
address.buffer);
|
|
|
|
if (unlikely(result != VK_SUCCESS))
|
2017-03-03 10:55:19 +01:00
|
|
|
anv_batch_set_error(&cmd_buffer->batch, result);
|
2016-08-22 21:37:28 -07:00
|
|
|
}
|
|
|
|
|
blorp: Add blorp_get_surface_address to the driver interface.
Currently, BLORP expects drivers to provide two functions for dealing
with buffers: blorp_emit_reloc and blorp_surface_reloc. Both record a
relocation and combine the BO address and offset into a full 64-bit
address. Traditionally, blorp_surface_reloc has written that combined
address to an implicitly-known buffer where surface states are stored.
(In contrast, blorp_emit_reloc returns the value.)
The upcoming Iris driver stores surface states in multiple buffers,
which makes it impossible for blorp_surface_reloc to write the combined
address - it only takes an offset, not the actual buffer to write to.
This commit adds a third function, blorp_get_surface_address, which
combines and returns an address, which is then passed to ISL's surface
state fill functions. Softpin-only drivers can return a real address
here and skip writing it in blorp_surface_reloc. Relocation-based
drivers are have options. They can simply return 0 from the new
function, and continue writing the address from blorp_surface_reloc.
Or, they can return a presumed address from blorp_get_surface_address,
and have other relocation processing write the real value later.
For now, i965 and anv simply return 0.
Reviewed-by: Jordan Justen <jordan.l.justen@intel.com>
Reviewed-by: Jason Ekstrand <jason@jlekstrand.net>
2019-01-09 13:31:18 -08:00
|
|
|
static uint64_t
|
|
|
|
blorp_get_surface_address(struct blorp_batch *blorp_batch,
|
|
|
|
struct blorp_address address)
|
|
|
|
{
|
2022-08-30 16:09:05 -07:00
|
|
|
struct anv_address anv_addr = {
|
|
|
|
.bo = address.buffer,
|
|
|
|
.offset = address.offset,
|
|
|
|
};
|
|
|
|
return anv_address_physical(anv_addr);
|
blorp: Add blorp_get_surface_address to the driver interface.
Currently, BLORP expects drivers to provide two functions for dealing
with buffers: blorp_emit_reloc and blorp_surface_reloc. Both record a
relocation and combine the BO address and offset into a full 64-bit
address. Traditionally, blorp_surface_reloc has written that combined
address to an implicitly-known buffer where surface states are stored.
(In contrast, blorp_emit_reloc returns the value.)
The upcoming Iris driver stores surface states in multiple buffers,
which makes it impossible for blorp_surface_reloc to write the combined
address - it only takes an offset, not the actual buffer to write to.
This commit adds a third function, blorp_get_surface_address, which
combines and returns an address, which is then passed to ISL's surface
state fill functions. Softpin-only drivers can return a real address
here and skip writing it in blorp_surface_reloc. Relocation-based
drivers are have options. They can simply return 0 from the new
function, and continue writing the address from blorp_surface_reloc.
Or, they can return a presumed address from blorp_get_surface_address,
and have other relocation processing write the real value later.
For now, i965 and anv simply return 0.
Reviewed-by: Jordan Justen <jordan.l.justen@intel.com>
Reviewed-by: Jason Ekstrand <jason@jlekstrand.net>
2019-01-09 13:31:18 -08:00
|
|
|
}
|
|
|
|
|
2022-08-30 14:50:51 -07:00
|
|
|
#if GFX_VER == 9
|
2017-11-11 11:10:59 -08:00
|
|
|
static struct blorp_address
|
|
|
|
blorp_get_surface_base_address(struct blorp_batch *batch)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
return (struct blorp_address) {
|
2022-10-24 14:12:28 +03:00
|
|
|
.buffer = cmd_buffer->device->internal_surface_state_pool.block_pool.bo,
|
2022-11-23 09:21:36 +02:00
|
|
|
.offset = -cmd_buffer->device->internal_surface_state_pool.start_offset,
|
2017-11-11 11:10:59 -08:00
|
|
|
};
|
|
|
|
}
|
2018-03-15 16:14:34 +00:00
|
|
|
#endif
|
2017-11-11 11:10:59 -08:00
|
|
|
|
2016-08-22 21:37:28 -07:00
|
|
|
static void *
|
|
|
|
blorp_alloc_dynamic_state(struct blorp_batch *batch,
|
|
|
|
uint32_t size,
|
|
|
|
uint32_t alignment,
|
|
|
|
uint32_t *offset)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
|
|
|
struct anv_state state =
|
|
|
|
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, size, alignment);
|
|
|
|
|
|
|
|
*offset = state.offset;
|
|
|
|
return state.map;
|
|
|
|
}
|
|
|
|
|
2021-06-21 12:45:24 -07:00
|
|
|
UNUSED static void *
|
|
|
|
blorp_alloc_general_state(struct blorp_batch *batch,
|
|
|
|
uint32_t size,
|
|
|
|
uint32_t alignment,
|
|
|
|
uint32_t *offset)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
|
|
|
struct anv_state state =
|
2023-10-12 09:28:43 +03:00
|
|
|
anv_cmd_buffer_alloc_general_state(cmd_buffer, size, alignment);
|
2021-06-21 12:45:24 -07:00
|
|
|
|
|
|
|
*offset = state.offset;
|
|
|
|
return state.map;
|
|
|
|
}
|
|
|
|
|
2023-10-18 13:39:20 +03:00
|
|
|
static bool
|
2016-08-22 21:37:28 -07:00
|
|
|
blorp_alloc_binding_table(struct blorp_batch *batch, unsigned num_entries,
|
|
|
|
unsigned state_size, unsigned state_alignment,
|
|
|
|
uint32_t *bt_offset,
|
|
|
|
uint32_t *surface_offsets, void **surface_maps)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
|
|
|
uint32_t state_offset;
|
2017-03-09 11:49:01 +01:00
|
|
|
struct anv_state bt_state;
|
|
|
|
|
|
|
|
VkResult result =
|
2016-10-21 17:01:17 -07:00
|
|
|
anv_cmd_buffer_alloc_blorp_binding_table(cmd_buffer, num_entries,
|
2017-03-09 11:49:01 +01:00
|
|
|
&state_offset, &bt_state);
|
|
|
|
if (result != VK_SUCCESS)
|
2023-10-18 13:39:20 +03:00
|
|
|
return false;
|
2016-08-22 21:37:28 -07:00
|
|
|
|
|
|
|
uint32_t *bt_map = bt_state.map;
|
|
|
|
*bt_offset = bt_state.offset;
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < num_entries; i++) {
|
|
|
|
struct anv_state surface_state =
|
2023-10-12 09:28:43 +03:00
|
|
|
anv_cmd_buffer_alloc_surface_states(cmd_buffer, 1);
|
|
|
|
if (surface_state.map == NULL)
|
|
|
|
return false;
|
2023-10-18 13:39:20 +03:00
|
|
|
|
2016-08-22 21:37:28 -07:00
|
|
|
bt_map[i] = surface_state.offset + state_offset;
|
|
|
|
surface_offsets[i] = surface_state.offset;
|
|
|
|
surface_maps[i] = surface_state.map;
|
|
|
|
}
|
2023-10-18 13:39:20 +03:00
|
|
|
|
|
|
|
return true;
|
2016-08-22 21:37:28 -07:00
|
|
|
}
|
|
|
|
|
2020-05-11 13:49:55 -05:00
|
|
|
static uint32_t
|
|
|
|
blorp_binding_table_offset_to_pointer(struct blorp_batch *batch,
|
|
|
|
uint32_t offset)
|
|
|
|
{
|
|
|
|
return offset;
|
|
|
|
}
|
|
|
|
|
2016-08-22 21:37:28 -07:00
|
|
|
static void *
|
|
|
|
blorp_alloc_vertex_buffer(struct blorp_batch *batch, uint32_t size,
|
|
|
|
struct blorp_address *addr)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
struct anv_state vb_state =
|
2017-03-31 15:21:04 -07:00
|
|
|
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, size, 64);
|
2023-10-18 22:25:12 +03:00
|
|
|
struct anv_address vb_addr =
|
|
|
|
anv_state_pool_state_address(&cmd_buffer->device->dynamic_state_pool,
|
|
|
|
vb_state);
|
2016-08-22 21:37:28 -07:00
|
|
|
|
|
|
|
*addr = (struct blorp_address) {
|
2023-10-18 22:25:12 +03:00
|
|
|
.buffer = vb_addr.bo,
|
|
|
|
.offset = vb_addr.offset,
|
2020-10-07 07:44:56 -07:00
|
|
|
.mocs = isl_mocs(&cmd_buffer->device->isl_dev,
|
2020-12-14 11:11:59 +02:00
|
|
|
ISL_SURF_USAGE_VERTEX_BUFFER_BIT, false),
|
2016-08-22 21:37:28 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
return vb_state.map;
|
|
|
|
}
|
|
|
|
|
2018-05-31 16:39:16 -07:00
|
|
|
static void
|
|
|
|
blorp_vf_invalidate_for_vb_48b_transitions(struct blorp_batch *batch,
|
|
|
|
const struct blorp_address *addrs,
|
2019-11-25 12:42:42 -06:00
|
|
|
uint32_t *sizes,
|
2018-05-31 16:39:16 -07:00
|
|
|
unsigned num_vbs)
|
|
|
|
{
|
2022-10-03 01:41:04 +03:00
|
|
|
#if GFX_VER == 9
|
2019-11-25 21:55:51 -06:00
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
|
|
|
for (unsigned i = 0; i < num_vbs; i++) {
|
|
|
|
struct anv_address anv_addr = {
|
|
|
|
.bo = addrs[i].buffer,
|
|
|
|
.offset = addrs[i].offset,
|
|
|
|
};
|
2021-03-29 15:40:04 -07:00
|
|
|
genX(cmd_buffer_set_binding_for_gfx8_vb_flush)(cmd_buffer,
|
2019-11-25 21:55:51 -06:00
|
|
|
i, anv_addr, sizes[i]);
|
|
|
|
}
|
|
|
|
|
|
|
|
genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer);
|
|
|
|
|
|
|
|
/* Technically, we should call this *after* 3DPRIMITIVE but it doesn't
|
|
|
|
* really matter for blorp because we never call apply_pipe_flushes after
|
|
|
|
* this point.
|
2018-05-31 16:39:16 -07:00
|
|
|
*/
|
2021-03-29 15:40:04 -07:00
|
|
|
genX(cmd_buffer_update_dirty_vbs_for_gfx8_vb_flush)(cmd_buffer, SEQUENTIAL,
|
2019-11-25 21:55:51 -06:00
|
|
|
(1 << num_vbs) - 1);
|
2022-10-03 01:41:04 +03:00
|
|
|
#endif
|
2018-05-31 16:39:16 -07:00
|
|
|
}
|
|
|
|
|
2020-01-17 12:09:13 -06:00
|
|
|
UNUSED static struct blorp_address
|
2020-02-23 14:34:49 +02:00
|
|
|
blorp_get_workaround_address(struct blorp_batch *batch)
|
2017-06-05 14:19:28 -07:00
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
|
|
|
return (struct blorp_address) {
|
2020-02-21 17:36:36 +02:00
|
|
|
.buffer = cmd_buffer->device->workaround_address.bo,
|
|
|
|
.offset = cmd_buffer->device->workaround_address.offset,
|
2017-06-05 14:19:28 -07:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2017-02-20 11:03:04 -08:00
|
|
|
static void
|
|
|
|
blorp_flush_range(struct blorp_batch *batch, void *start, size_t size)
|
|
|
|
{
|
2019-01-15 15:48:43 -08:00
|
|
|
/* We don't need to flush states anymore, since everything will be snooped.
|
|
|
|
*/
|
2017-02-20 11:03:04 -08:00
|
|
|
}
|
|
|
|
|
2024-01-23 14:31:32 +02:00
|
|
|
static void
|
|
|
|
blorp_pre_emit_urb_config(struct blorp_batch *blorp_batch,
|
|
|
|
struct intel_urb_config *urb_cfg)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = blorp_batch->driver_batch;
|
|
|
|
genX(urb_workaround)(cmd_buffer, urb_cfg);
|
|
|
|
|
|
|
|
/* Update urb config. */
|
|
|
|
memcpy(&cmd_buffer->state.gfx.urb_cfg, urb_cfg,
|
|
|
|
sizeof(struct intel_urb_config));
|
|
|
|
}
|
|
|
|
|
2021-03-03 13:49:18 -08:00
|
|
|
static const struct intel_l3_config *
|
2020-01-17 12:09:13 -06:00
|
|
|
blorp_get_l3_config(struct blorp_batch *batch)
|
2016-08-22 21:37:28 -07:00
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2020-01-17 12:09:13 -06:00
|
|
|
return cmd_buffer->state.current_l3_config;
|
2016-08-22 21:37:28 -07:00
|
|
|
}
|
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
static void
|
|
|
|
blorp_exec_on_render(struct blorp_batch *batch,
|
|
|
|
const struct blorp_params *params)
|
2016-08-22 21:37:28 -07:00
|
|
|
{
|
2021-12-21 00:36:10 -08:00
|
|
|
assert((batch->flags & BLORP_BATCH_USE_COMPUTE) == 0);
|
2016-08-22 21:37:28 -07:00
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2022-02-07 15:22:54 -06:00
|
|
|
assert(cmd_buffer->queue_family->queueFlags & VK_QUEUE_GRAPHICS_BIT);
|
2016-08-22 21:37:28 -07:00
|
|
|
|
2023-08-02 11:36:39 +03:00
|
|
|
struct anv_gfx_dynamic_state *hw_state =
|
|
|
|
&cmd_buffer->state.gfx.dyn_state;
|
|
|
|
|
2019-07-17 18:41:38 -07:00
|
|
|
const unsigned scale = params->fast_clear_op ? UINT_MAX : 1;
|
|
|
|
genX(cmd_buffer_emit_hashing_mode)(cmd_buffer, params->x1 - params->x0,
|
|
|
|
params->y1 - params->y0, scale);
|
|
|
|
|
2021-03-16 10:14:30 -07:00
|
|
|
#if GFX_VER >= 11
|
2018-04-17 15:06:46 -07:00
|
|
|
/* The PIPE_CONTROL command description says:
|
|
|
|
*
|
2022-06-22 18:31:08 +02:00
|
|
|
* "Whenever a Binding Table Index (BTI) used by a Render Target Message
|
2018-04-17 15:06:46 -07:00
|
|
|
* points to a different RENDER_SURFACE_STATE, SW must issue a Render
|
|
|
|
* Target Cache Flush by enabling this bit. When render target flush
|
|
|
|
* is set due to new association of BTI, PS Scoreboard Stall bit must
|
|
|
|
* be set in this packet."
|
|
|
|
*/
|
2023-02-21 11:32:41 -08:00
|
|
|
if (blorp_uses_bti_rt_writes(batch, params)) {
|
|
|
|
anv_add_pending_pipe_bits(cmd_buffer,
|
|
|
|
ANV_PIPE_RENDER_TARGET_CACHE_FLUSH_BIT |
|
|
|
|
ANV_PIPE_STALL_AT_SCOREBOARD_BIT,
|
|
|
|
"before blorp BTI change");
|
|
|
|
}
|
2018-04-17 15:06:46 -07:00
|
|
|
#endif
|
|
|
|
|
2023-10-12 07:20:18 -07:00
|
|
|
#if GFX_VERx10 >= 125
|
2023-07-31 13:44:05 +03:00
|
|
|
/* Check if blorp ds state matches ours. */
|
|
|
|
if (intel_needs_workaround(cmd_buffer->device->info, 18019816803)) {
|
|
|
|
bool blorp_ds_state = params->depth.enabled || params->stencil.enabled;
|
|
|
|
if (cmd_buffer->state.gfx.ds_write_state != blorp_ds_state) {
|
2023-11-08 12:14:09 +02:00
|
|
|
/* Flag the change in ds_write_state so that the next pipeline use
|
|
|
|
* will trigger a PIPE_CONTROL too.
|
|
|
|
*/
|
2023-07-31 13:44:05 +03:00
|
|
|
cmd_buffer->state.gfx.ds_write_state = blorp_ds_state;
|
2023-08-02 11:36:39 +03:00
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_WA_18019816803);
|
2023-11-08 12:14:09 +02:00
|
|
|
|
|
|
|
/* Add the stall that will flush prior to the blorp operation by
|
|
|
|
* genX(cmd_buffer_apply_pipe_flushes)
|
|
|
|
*/
|
|
|
|
anv_add_pending_pipe_bits(cmd_buffer,
|
|
|
|
ANV_PIPE_PSS_STALL_SYNC_BIT,
|
|
|
|
"Wa_18019816803");
|
2023-07-31 13:44:05 +03:00
|
|
|
}
|
|
|
|
}
|
2023-08-02 11:36:39 +03:00
|
|
|
#endif
|
2023-07-31 13:44:05 +03:00
|
|
|
|
2021-06-15 10:38:38 -07:00
|
|
|
if (params->depth.enabled &&
|
|
|
|
!(batch->flags & BLORP_BATCH_NO_EMIT_DEPTH_STENCIL))
|
|
|
|
genX(cmd_buffer_emit_gfx12_depth_wa)(cmd_buffer, ¶ms->depth.surf);
|
2021-05-21 14:55:26 -07:00
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
genX(flush_pipeline_select_3d)(cmd_buffer);
|
2016-08-22 21:37:28 -07:00
|
|
|
|
2022-12-29 09:52:36 +02:00
|
|
|
/* Wa_14015814527 */
|
|
|
|
genX(apply_task_urb_workaround)(cmd_buffer);
|
|
|
|
|
2021-12-23 23:20:10 -08:00
|
|
|
/* Apply any outstanding flushes in case pipeline select haven't. */
|
|
|
|
genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer);
|
|
|
|
|
2016-12-06 17:52:14 -08:00
|
|
|
/* BLORP doesn't do anything fancy with depth such as discards, so we want
|
|
|
|
* the PMA fix off. Also, off is always the safe option.
|
|
|
|
*/
|
|
|
|
genX(cmd_buffer_enable_pma_fix)(cmd_buffer, false);
|
|
|
|
|
2016-08-22 21:37:28 -07:00
|
|
|
blorp_exec(batch, params);
|
|
|
|
|
2021-03-16 10:14:30 -07:00
|
|
|
#if GFX_VER >= 11
|
2020-01-22 22:37:10 -06:00
|
|
|
/* The PIPE_CONTROL command description says:
|
|
|
|
*
|
2022-06-22 18:31:08 +02:00
|
|
|
* "Whenever a Binding Table Index (BTI) used by a Render Target Message
|
2020-01-22 22:37:10 -06:00
|
|
|
* points to a different RENDER_SURFACE_STATE, SW must issue a Render
|
|
|
|
* Target Cache Flush by enabling this bit. When render target flush
|
|
|
|
* is set due to new association of BTI, PS Scoreboard Stall bit must
|
|
|
|
* be set in this packet."
|
|
|
|
*/
|
2023-02-21 11:32:41 -08:00
|
|
|
if (blorp_uses_bti_rt_writes(batch, params)) {
|
|
|
|
anv_add_pending_pipe_bits(cmd_buffer,
|
|
|
|
ANV_PIPE_RENDER_TARGET_CACHE_FLUSH_BIT |
|
|
|
|
ANV_PIPE_STALL_AT_SCOREBOARD_BIT,
|
|
|
|
"after blorp BTI change");
|
|
|
|
}
|
2020-01-22 22:37:10 -06:00
|
|
|
#endif
|
|
|
|
|
2023-08-02 11:36:39 +03:00
|
|
|
/* Flag all the instructions emitted by BLORP. */
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_URB);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VF_STATISTICS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VF);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VF_TOPOLOGY);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VERTEX_INPUT);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VF_SGVS);
|
|
|
|
#if GFX_VER >= 11
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VF_SGVS_2);
|
|
|
|
#endif
|
|
|
|
#if GFX_VER >= 12
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_PRIMITIVE_REPLICATION);
|
|
|
|
#endif
|
2023-11-09 13:51:38 +02:00
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VIEWPORT_CC);
|
2023-08-02 11:36:39 +03:00
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_STREAMOUT);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_RASTER);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_CLIP);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_SAMPLE_MASK);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_MULTISAMPLE);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_SF);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_SBE);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_SBE_SWIZ);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_DEPTH_BOUNDS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_WM);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_WM_DEPTH_STENCIL);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_VS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_HS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_DS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_TE);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_GS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_PS);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_PS_EXTRA);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_BLEND_STATE_POINTERS);
|
|
|
|
if (batch->blorp->config.use_mesh_shading) {
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_MESH_CONTROL);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_TASK_CONTROL);
|
|
|
|
}
|
|
|
|
if (params->wm_prog_data) {
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_CC_STATE);
|
|
|
|
BITSET_SET(hw_state->dirty, ANV_GFX_STATE_PS_BLEND);
|
2021-08-25 21:12:36 +00:00
|
|
|
}
|
|
|
|
|
2023-11-09 14:48:26 +02:00
|
|
|
anv_cmd_dirty_mask_t dirty = ~(ANV_CMD_DIRTY_INDEX_BUFFER |
|
2023-08-02 11:36:39 +03:00
|
|
|
ANV_CMD_DIRTY_XFB_ENABLE);
|
|
|
|
|
2017-12-15 16:39:53 -08:00
|
|
|
cmd_buffer->state.gfx.vb_dirty = ~0;
|
2022-07-14 15:09:46 -05:00
|
|
|
cmd_buffer->state.gfx.dirty |= dirty;
|
2021-12-21 00:36:10 -08:00
|
|
|
cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_ALL_GRAPHICS;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
blorp_exec_on_compute(struct blorp_batch *batch,
|
|
|
|
const struct blorp_params *params)
|
|
|
|
{
|
|
|
|
assert(batch->flags & BLORP_BATCH_USE_COMPUTE);
|
|
|
|
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2022-02-07 15:22:54 -06:00
|
|
|
assert(cmd_buffer->queue_family->queueFlags & VK_QUEUE_COMPUTE_BIT);
|
2021-12-21 00:36:10 -08:00
|
|
|
|
|
|
|
genX(flush_pipeline_select_gpgpu)(cmd_buffer);
|
2021-12-23 23:20:10 -08:00
|
|
|
|
|
|
|
/* Apply any outstanding flushes in case pipeline select haven't. */
|
|
|
|
genX(cmd_buffer_apply_pipe_flushes)(cmd_buffer);
|
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
blorp_exec(batch, params);
|
|
|
|
|
|
|
|
cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
|
|
|
|
}
|
|
|
|
|
2022-06-16 09:05:30 -07:00
|
|
|
static void
|
|
|
|
blorp_exec_on_blitter(struct blorp_batch *batch,
|
|
|
|
const struct blorp_params *params)
|
|
|
|
{
|
|
|
|
assert(batch->flags & BLORP_BATCH_USE_BLITTER);
|
|
|
|
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
assert(cmd_buffer->queue_family->queueFlags == VK_QUEUE_TRANSFER_BIT);
|
|
|
|
|
|
|
|
blorp_exec(batch, params);
|
|
|
|
}
|
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
void
|
|
|
|
genX(blorp_exec)(struct blorp_batch *batch,
|
|
|
|
const struct blorp_params *params)
|
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
|
|
|
|
2022-12-27 15:30:23 +02:00
|
|
|
/* Turn on preemption if it was toggled off. */
|
|
|
|
if (!cmd_buffer->state.gfx.object_preemption)
|
|
|
|
genX(cmd_buffer_set_preemption)(cmd_buffer, true);
|
|
|
|
|
2021-12-21 00:36:10 -08:00
|
|
|
if (!cmd_buffer->state.current_l3_config) {
|
|
|
|
const struct intel_l3_config *cfg =
|
2022-08-04 12:56:17 -07:00
|
|
|
intel_get_default_l3_config(cmd_buffer->device->info);
|
2021-12-21 00:36:10 -08:00
|
|
|
genX(cmd_buffer_config_l3)(cmd_buffer, cfg);
|
|
|
|
}
|
|
|
|
|
2022-06-16 09:05:30 -07:00
|
|
|
if (batch->flags & BLORP_BATCH_USE_BLITTER)
|
|
|
|
blorp_exec_on_blitter(batch, params);
|
|
|
|
else if (batch->flags & BLORP_BATCH_USE_COMPUTE)
|
2021-12-21 00:36:10 -08:00
|
|
|
blorp_exec_on_compute(batch, params);
|
|
|
|
else
|
|
|
|
blorp_exec_on_render(batch, params);
|
2016-08-22 21:37:28 -07:00
|
|
|
}
|
2023-08-01 09:52:09 -07:00
|
|
|
|
|
|
|
static void
|
2023-10-27 11:30:44 +02:00
|
|
|
blorp_emit_pre_draw(struct blorp_batch *batch, const struct blorp_params *params)
|
2023-08-01 09:52:09 -07:00
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2023-10-27 11:30:44 +02:00
|
|
|
blorp_measure_start(batch, params);
|
2023-08-01 09:52:09 -07:00
|
|
|
genX(emit_breakpoint)(&cmd_buffer->batch, cmd_buffer->device, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2023-10-27 11:30:44 +02:00
|
|
|
blorp_emit_post_draw(struct blorp_batch *batch, const struct blorp_params *params)
|
2023-08-01 09:52:09 -07:00
|
|
|
{
|
|
|
|
struct anv_cmd_buffer *cmd_buffer = batch->driver_batch;
|
2023-10-26 17:35:58 +02:00
|
|
|
|
|
|
|
genX(batch_emit_post_3dprimitive_was)(&cmd_buffer->batch,
|
|
|
|
cmd_buffer->device,
|
|
|
|
_3DPRIM_RECTLIST,
|
|
|
|
3);
|
|
|
|
|
2023-08-01 09:52:09 -07:00
|
|
|
genX(emit_breakpoint)(&cmd_buffer->batch, cmd_buffer->device, false);
|
2023-10-27 11:30:44 +02:00
|
|
|
blorp_measure_end(batch, params);
|
2023-08-01 09:52:09 -07:00
|
|
|
}
|