vk: Use consistent names for anv_cmd_state dirty bits

Prefix all anv_cmd_state dirty bit tokens with ANV_CMD_DIRTY. For
example:

    old                           -> new
    ANV_DYNAMIC_VIEWPORT_DIRTY    -> ANV_CMD_DIRTY_DYNAMIC_VIEWPORT
    ANV_CMD_BUFFER_PIPELINE_DIRTY -> ANV_CMD_DIRTY_PIPELINE

Change type of anv_cmd_state::dirty and ::compute_dirty from uint32_t to
the self-documenting type anv_cmd_dirty_mask_t.
This commit is contained in:
Chad Versace
2015-10-16 20:03:46 -07:00
parent 2484d1a01f
commit 4d4e559b6a
6 changed files with 63 additions and 62 deletions

View File

@@ -312,14 +312,14 @@ void anv_CmdBindPipeline(
switch (pipelineBindPoint) { switch (pipelineBindPoint) {
case VK_PIPELINE_BIND_POINT_COMPUTE: case VK_PIPELINE_BIND_POINT_COMPUTE:
cmd_buffer->state.compute_pipeline = pipeline; cmd_buffer->state.compute_pipeline = pipeline;
cmd_buffer->state.compute_dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; cmd_buffer->state.compute_dirty |= ANV_CMD_DIRTY_PIPELINE;
cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT; cmd_buffer->state.push_constants_dirty |= VK_SHADER_STAGE_COMPUTE_BIT;
break; break;
case VK_PIPELINE_BIND_POINT_GRAPHICS: case VK_PIPELINE_BIND_POINT_GRAPHICS:
cmd_buffer->state.pipeline = pipeline; cmd_buffer->state.pipeline = pipeline;
cmd_buffer->state.vb_dirty |= pipeline->vb_used; cmd_buffer->state.vb_dirty |= pipeline->vb_used;
cmd_buffer->state.dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_PIPELINE;
cmd_buffer->state.push_constants_dirty |= pipeline->active_stages; cmd_buffer->state.push_constants_dirty |= pipeline->active_stages;
/* Apply the dynamic state from the pipeline */ /* Apply the dynamic state from the pipeline */
@@ -346,7 +346,7 @@ void anv_CmdSetViewport(
memcpy(cmd_buffer->state.dynamic.viewport.viewports, memcpy(cmd_buffer->state.dynamic.viewport.viewports,
pViewports, viewportCount * sizeof(*pViewports)); pViewports, viewportCount * sizeof(*pViewports));
cmd_buffer->state.dirty |= ANV_DYNAMIC_VIEWPORT_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_VIEWPORT;
} }
void anv_CmdSetScissor( void anv_CmdSetScissor(
@@ -360,7 +360,7 @@ void anv_CmdSetScissor(
memcpy(cmd_buffer->state.dynamic.scissor.scissors, memcpy(cmd_buffer->state.dynamic.scissor.scissors,
pScissors, scissorCount * sizeof(*pScissors)); pScissors, scissorCount * sizeof(*pScissors));
cmd_buffer->state.dirty |= ANV_DYNAMIC_SCISSOR_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_SCISSOR;
} }
void anv_CmdSetLineWidth( void anv_CmdSetLineWidth(
@@ -370,8 +370,7 @@ void anv_CmdSetLineWidth(
ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer); ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer);
cmd_buffer->state.dynamic.line_width = lineWidth; cmd_buffer->state.dynamic.line_width = lineWidth;
cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH;
cmd_buffer->state.dirty |= ANV_DYNAMIC_LINE_WIDTH_DIRTY;
} }
void anv_CmdSetDepthBias( void anv_CmdSetDepthBias(
@@ -386,7 +385,7 @@ void anv_CmdSetDepthBias(
cmd_buffer->state.dynamic.depth_bias.clamp = depthBiasClamp; cmd_buffer->state.dynamic.depth_bias.clamp = depthBiasClamp;
cmd_buffer->state.dynamic.depth_bias.slope_scaled = slopeScaledDepthBias; cmd_buffer->state.dynamic.depth_bias.slope_scaled = slopeScaledDepthBias;
cmd_buffer->state.dirty |= ANV_DYNAMIC_DEPTH_BIAS_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS;
} }
void anv_CmdSetBlendConstants( void anv_CmdSetBlendConstants(
@@ -398,7 +397,7 @@ void anv_CmdSetBlendConstants(
memcpy(cmd_buffer->state.dynamic.blend_constants, memcpy(cmd_buffer->state.dynamic.blend_constants,
blendConst, sizeof(float) * 4); blendConst, sizeof(float) * 4);
cmd_buffer->state.dirty |= ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS;
} }
void anv_CmdSetDepthBounds( void anv_CmdSetDepthBounds(
@@ -411,7 +410,7 @@ void anv_CmdSetDepthBounds(
cmd_buffer->state.dynamic.depth_bounds.min = minDepthBounds; cmd_buffer->state.dynamic.depth_bounds.min = minDepthBounds;
cmd_buffer->state.dynamic.depth_bounds.max = maxDepthBounds; cmd_buffer->state.dynamic.depth_bounds.max = maxDepthBounds;
cmd_buffer->state.dirty |= ANV_DYNAMIC_DEPTH_BOUNDS_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS;
} }
void anv_CmdSetStencilCompareMask( void anv_CmdSetStencilCompareMask(
@@ -426,7 +425,7 @@ void anv_CmdSetStencilCompareMask(
if (faceMask & VK_STENCIL_FACE_BACK_BIT) if (faceMask & VK_STENCIL_FACE_BACK_BIT)
cmd_buffer->state.dynamic.stencil_compare_mask.back = stencilCompareMask; cmd_buffer->state.dynamic.stencil_compare_mask.back = stencilCompareMask;
cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK;
} }
void anv_CmdSetStencilWriteMask( void anv_CmdSetStencilWriteMask(
@@ -441,7 +440,7 @@ void anv_CmdSetStencilWriteMask(
if (faceMask & VK_STENCIL_FACE_BACK_BIT) if (faceMask & VK_STENCIL_FACE_BACK_BIT)
cmd_buffer->state.dynamic.stencil_write_mask.back = stencilWriteMask; cmd_buffer->state.dynamic.stencil_write_mask.back = stencilWriteMask;
cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK;
} }
void anv_CmdSetStencilReference( void anv_CmdSetStencilReference(
@@ -456,7 +455,7 @@ void anv_CmdSetStencilReference(
if (faceMask & VK_STENCIL_FACE_BACK_BIT) if (faceMask & VK_STENCIL_FACE_BACK_BIT)
cmd_buffer->state.dynamic.stencil_reference.back = stencilReference; cmd_buffer->state.dynamic.stencil_reference.back = stencilReference;
cmd_buffer->state.dirty |= ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE;
} }
void anv_CmdBindDescriptorSets( void anv_CmdBindDescriptorSets(

View File

@@ -345,7 +345,7 @@ anv_cmd_buffer_restore(struct anv_cmd_buffer *cmd_buffer,
sizeof(state->old_vertex_bindings)); sizeof(state->old_vertex_bindings));
cmd_buffer->state.vb_dirty |= (1 << NUM_VB_USED) - 1; cmd_buffer->state.vb_dirty |= (1 << NUM_VB_USED) - 1;
cmd_buffer->state.dirty |= ANV_CMD_BUFFER_PIPELINE_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_PIPELINE;
cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_VERTEX_BIT; cmd_buffer->state.descriptors_dirty |= VK_SHADER_STAGE_VERTEX_BIT;
anv_dynamic_state_copy(&cmd_buffer->state.dynamic, &state->dynamic, anv_dynamic_state_copy(&cmd_buffer->state.dynamic, &state->dynamic,

View File

@@ -181,7 +181,7 @@ static void
anv_pipeline_init_dynamic_state(struct anv_pipeline *pipeline, anv_pipeline_init_dynamic_state(struct anv_pipeline *pipeline,
const VkGraphicsPipelineCreateInfo *pCreateInfo) const VkGraphicsPipelineCreateInfo *pCreateInfo)
{ {
uint32_t states = ANV_DYNAMIC_STATE_DIRTY_MASK; anv_cmd_dirty_mask_t states = ANV_CMD_DIRTY_DYNAMIC_ALL;
ANV_FROM_HANDLE(anv_render_pass, pass, pCreateInfo->renderPass); ANV_FROM_HANDLE(anv_render_pass, pass, pCreateInfo->renderPass);
struct anv_subpass *subpass = &pass->subpasses[pCreateInfo->subpass]; struct anv_subpass *subpass = &pass->subpasses[pCreateInfo->subpass];

View File

@@ -797,19 +797,21 @@ struct anv_buffer {
VkDeviceSize offset; VkDeviceSize offset;
}; };
/* The first 9 correspond to 1 << VK_DYNAMIC_STATE_FOO */ enum anv_cmd_dirty_bits {
#define ANV_DYNAMIC_VIEWPORT_DIRTY (1 << 0) ANV_CMD_DIRTY_DYNAMIC_VIEWPORT = 1 << 0, /* VK_DYNAMIC_STATE_VIEWPORT */
#define ANV_DYNAMIC_SCISSOR_DIRTY (1 << 1) ANV_CMD_DIRTY_DYNAMIC_SCISSOR = 1 << 1, /* VK_DYNAMIC_STATE_SCISSOR */
#define ANV_DYNAMIC_LINE_WIDTH_DIRTY (1 << 2) ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH = 1 << 2, /* VK_DYNAMIC_STATE_LINE_WIDTH */
#define ANV_DYNAMIC_DEPTH_BIAS_DIRTY (1 << 3) ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS = 1 << 3, /* VK_DYNAMIC_STATE_DEPTH_BIAS */
#define ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY (1 << 4) ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS = 1 << 4, /* VK_DYNAMIC_STATE_BLEND_CONSTANTS */
#define ANV_DYNAMIC_DEPTH_BOUNDS_DIRTY (1 << 5) ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS = 1 << 5, /* VK_DYNAMIC_STATE_DEPTH_BOUNDS */
#define ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY (1 << 6) ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK = 1 << 6, /* VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK */
#define ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY (1 << 7) ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK = 1 << 7, /* VK_DYNAMIC_STATE_STENCIL_WRITE_MASK */
#define ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY (1 << 8) ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE = 1 << 8, /* VK_DYNAMIC_STATE_STENCIL_REFERENCE */
#define ANV_DYNAMIC_STATE_DIRTY_MASK ((1 << 9) - 1) ANV_CMD_DIRTY_DYNAMIC_ALL = (1 << 9) - 1,
#define ANV_CMD_BUFFER_PIPELINE_DIRTY (1 << 9) ANV_CMD_DIRTY_PIPELINE = 1 << 9,
#define ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY (1 << 10) ANV_CMD_DIRTY_INDEX_BUFFER = 1 << 10,
};
typedef uint32_t anv_cmd_dirty_mask_t;
struct anv_vertex_binding { struct anv_vertex_binding {
struct anv_buffer * buffer; struct anv_buffer * buffer;
@@ -892,8 +894,8 @@ void anv_dynamic_state_copy(struct anv_dynamic_state *dest,
struct anv_cmd_state { struct anv_cmd_state {
uint32_t current_pipeline; uint32_t current_pipeline;
uint32_t vb_dirty; uint32_t vb_dirty;
uint32_t dirty; anv_cmd_dirty_mask_t dirty;
uint32_t compute_dirty; anv_cmd_dirty_mask_t compute_dirty;
VkShaderStageFlags descriptors_dirty; VkShaderStageFlags descriptors_dirty;
VkShaderStageFlags push_constants_dirty; VkShaderStageFlags push_constants_dirty;
uint32_t scratch_size; uint32_t scratch_size;

View File

@@ -128,7 +128,7 @@ void gen7_CmdBindIndexBuffer(
ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer); ANV_FROM_HANDLE(anv_cmd_buffer, cmd_buffer, cmdBuffer);
ANV_FROM_HANDLE(anv_buffer, buffer, _buffer); ANV_FROM_HANDLE(anv_buffer, buffer, _buffer);
cmd_buffer->state.dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER;
cmd_buffer->state.gen7.index_buffer = buffer; cmd_buffer->state.gen7.index_buffer = buffer;
cmd_buffer->state.gen7.index_type = vk_to_gen_index_type[indexType]; cmd_buffer->state.gen7.index_type = vk_to_gen_index_type[indexType];
cmd_buffer->state.gen7.index_offset = offset; cmd_buffer->state.gen7.index_offset = offset;
@@ -185,11 +185,11 @@ gen7_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer)
cmd_buffer->state.current_pipeline = GPGPU; cmd_buffer->state.current_pipeline = GPGPU;
} }
if (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) if (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)
anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch); anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) || if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) ||
(cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) { (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)) {
/* FIXME: figure out descriptors for gen7 */ /* FIXME: figure out descriptors for gen7 */
result = gen7_flush_compute_descriptor_set(cmd_buffer); result = gen7_flush_compute_descriptor_set(cmd_buffer);
assert(result == VK_SUCCESS); assert(result == VK_SUCCESS);
@@ -242,7 +242,7 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
} }
} }
if (cmd_buffer->state.dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) { if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_PIPELINE) {
/* If somebody compiled a pipeline after starting a command buffer the /* If somebody compiled a pipeline after starting a command buffer the
* scratch bo may have grown since we started this cmd buffer (and * scratch bo may have grown since we started this cmd buffer (and
* emitted STATE_BASE_ADDRESS). If we're binding that pipeline now, * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now,
@@ -256,15 +256,15 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
if (cmd_buffer->state.descriptors_dirty) if (cmd_buffer->state.descriptors_dirty)
anv_flush_descriptor_sets(cmd_buffer); anv_flush_descriptor_sets(cmd_buffer);
if (cmd_buffer->state.dirty & ANV_DYNAMIC_VIEWPORT_DIRTY) if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)
anv_cmd_buffer_emit_viewport(cmd_buffer); anv_cmd_buffer_emit_viewport(cmd_buffer);
if (cmd_buffer->state.dirty & ANV_DYNAMIC_SCISSOR_DIRTY) if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_SCISSOR)
anv_cmd_buffer_emit_scissor(cmd_buffer); anv_cmd_buffer_emit_scissor(cmd_buffer);
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_DYNAMIC_LINE_WIDTH_DIRTY | ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH |
ANV_DYNAMIC_DEPTH_BIAS_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS)) {
bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f || bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f ||
cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f; cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f;
@@ -285,8 +285,8 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen7.sf); anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen7.sf);
} }
if (cmd_buffer->state.dirty & (ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
struct anv_state cc_state = struct anv_state cc_state =
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
GEN7_COLOR_CALC_STATE_length, 64); GEN7_COLOR_CALC_STATE_length, 64);
@@ -307,9 +307,9 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
.ColorCalcStatePointer = cc_state.offset); .ColorCalcStatePointer = cc_state.offset);
} }
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY | ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK)) {
uint32_t depth_stencil_dw[GEN7_DEPTH_STENCIL_STATE_length]; uint32_t depth_stencil_dw[GEN7_DEPTH_STENCIL_STATE_length];
struct GEN7_DEPTH_STENCIL_STATE depth_stencil = { struct GEN7_DEPTH_STENCIL_STATE depth_stencil = {
@@ -340,8 +340,8 @@ gen7_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
} }
if (cmd_buffer->state.gen7.index_buffer && if (cmd_buffer->state.gen7.index_buffer &&
cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) { ANV_CMD_DIRTY_INDEX_BUFFER)) {
struct anv_buffer *buffer = cmd_buffer->state.gen7.index_buffer; struct anv_buffer *buffer = cmd_buffer->state.gen7.index_buffer;
uint32_t offset = cmd_buffer->state.gen7.index_offset; uint32_t offset = cmd_buffer->state.gen7.index_offset;

View File

@@ -104,7 +104,7 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
} }
} }
if (cmd_buffer->state.dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) { if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_PIPELINE) {
/* If somebody compiled a pipeline after starting a command buffer the /* If somebody compiled a pipeline after starting a command buffer the
* scratch bo may have grown since we started this cmd buffer (and * scratch bo may have grown since we started this cmd buffer (and
* emitted STATE_BASE_ADDRESS). If we're binding that pipeline now, * emitted STATE_BASE_ADDRESS). If we're binding that pipeline now,
@@ -121,14 +121,14 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
if (cmd_buffer->state.push_constants_dirty) if (cmd_buffer->state.push_constants_dirty)
gen8_cmd_buffer_flush_push_constants(cmd_buffer); gen8_cmd_buffer_flush_push_constants(cmd_buffer);
if (cmd_buffer->state.dirty & ANV_DYNAMIC_VIEWPORT_DIRTY) if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)
anv_cmd_buffer_emit_viewport(cmd_buffer); anv_cmd_buffer_emit_viewport(cmd_buffer);
if (cmd_buffer->state.dirty & ANV_DYNAMIC_SCISSOR_DIRTY) if (cmd_buffer->state.dirty & ANV_CMD_DIRTY_DYNAMIC_SCISSOR)
anv_cmd_buffer_emit_scissor(cmd_buffer); anv_cmd_buffer_emit_scissor(cmd_buffer);
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_DYNAMIC_LINE_WIDTH_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) {
uint32_t sf_dw[GEN8_3DSTATE_SF_length]; uint32_t sf_dw[GEN8_3DSTATE_SF_length];
struct GEN8_3DSTATE_SF sf = { struct GEN8_3DSTATE_SF sf = {
GEN8_3DSTATE_SF_header, GEN8_3DSTATE_SF_header,
@@ -138,8 +138,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen8.sf); anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gen8.sf);
} }
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_DYNAMIC_DEPTH_BIAS_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS)){
bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f || bool enable_bias = cmd_buffer->state.dynamic.depth_bias.bias != 0.0f ||
cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f; cmd_buffer->state.dynamic.depth_bias.slope_scaled != 0.0f;
@@ -158,8 +158,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
pipeline->gen8.raster); pipeline->gen8.raster);
} }
if (cmd_buffer->state.dirty & (ANV_DYNAMIC_BLEND_CONSTANTS_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_DYNAMIC_STENCIL_REFERENCE_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
struct anv_state cc_state = struct anv_state cc_state =
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
GEN8_COLOR_CALC_STATE_length, 64); GEN8_COLOR_CALC_STATE_length, 64);
@@ -181,9 +181,9 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
.ColorCalcStatePointerValid = true); .ColorCalcStatePointerValid = true);
} }
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_DYNAMIC_STENCIL_COMPARE_MASK_DIRTY | ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_DYNAMIC_STENCIL_WRITE_MASK_DIRTY)) { ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK)) {
uint32_t wm_depth_stencil_dw[GEN8_3DSTATE_WM_DEPTH_STENCIL_length]; uint32_t wm_depth_stencil_dw[GEN8_3DSTATE_WM_DEPTH_STENCIL_length];
struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = { struct GEN8_3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil = {
@@ -210,8 +210,8 @@ gen8_cmd_buffer_flush_state(struct anv_cmd_buffer *cmd_buffer)
pipeline->gen8.wm_depth_stencil); pipeline->gen8.wm_depth_stencil);
} }
if (cmd_buffer->state.dirty & (ANV_CMD_BUFFER_PIPELINE_DIRTY | if (cmd_buffer->state.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY)) { ANV_CMD_DIRTY_INDEX_BUFFER)) {
anv_batch_emit_merge(&cmd_buffer->batch, anv_batch_emit_merge(&cmd_buffer->batch,
cmd_buffer->state.state_vf, pipeline->gen8.vf); cmd_buffer->state.state_vf, pipeline->gen8.vf);
} }
@@ -331,7 +331,7 @@ void gen8_CmdBindIndexBuffer(
}; };
GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state.state_vf, &vf); GEN8_3DSTATE_VF_pack(NULL, cmd_buffer->state.state_vf, &vf);
cmd_buffer->state.dirty |= ANV_CMD_BUFFER_INDEX_BUFFER_DIRTY; cmd_buffer->state.dirty |= ANV_CMD_DIRTY_INDEX_BUFFER;
anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER, anv_batch_emit(&cmd_buffer->batch, GEN8_3DSTATE_INDEX_BUFFER,
.IndexFormat = vk_to_gen_index_type[indexType], .IndexFormat = vk_to_gen_index_type[indexType],
@@ -394,11 +394,11 @@ gen8_cmd_buffer_flush_compute_state(struct anv_cmd_buffer *cmd_buffer)
cmd_buffer->state.current_pipeline = GPGPU; cmd_buffer->state.current_pipeline = GPGPU;
} }
if (cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY) if (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)
anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch); anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->batch);
if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) || if ((cmd_buffer->state.descriptors_dirty & VK_SHADER_STAGE_COMPUTE_BIT) ||
(cmd_buffer->state.compute_dirty & ANV_CMD_BUFFER_PIPELINE_DIRTY)) { (cmd_buffer->state.compute_dirty & ANV_CMD_DIRTY_PIPELINE)) {
result = gen8_flush_compute_descriptor_set(cmd_buffer); result = gen8_flush_compute_descriptor_set(cmd_buffer);
assert(result == VK_SUCCESS); assert(result == VK_SUCCESS);
cmd_buffer->state.descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE; cmd_buffer->state.descriptors_dirty &= ~VK_SHADER_STAGE_COMPUTE;