nir: Replace the scoped_memory barrier by a scoped_barrier

SPIRV OpControlBarrier can have both a memory and a control barrier
which some hardware can handle with a single instruction. Let's
turn the scoped_memory_barrier into a scoped barrier which can embed
both barrier types. Note that control-only or memory-only barriers can
be supported through this new intrinsic by passing NIR_SCOPE_NONE to the
unused barrier type.

Signed-off-by: Boris Brezillon <boris.brezillon@collabora.com>
Suggested-by: Caio Marcelo de Oliveira Filho <caio.oliveira@intel.com>
Reviewed-by: Caio Marcelo de Oliveira Filho <caio.oliveira@intel.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/4900>
This commit is contained in:
Boris Brezillon
2020-05-05 09:13:20 +02:00
committed by Marge Bot
parent 94438a64bf
commit 345b5847b4
14 changed files with 61 additions and 36 deletions

View File

@@ -1525,6 +1525,7 @@ typedef enum {
} nir_memory_semantics; } nir_memory_semantics;
typedef enum { typedef enum {
NIR_SCOPE_NONE,
NIR_SCOPE_INVOCATION, NIR_SCOPE_INVOCATION,
NIR_SCOPE_SUBGROUP, NIR_SCOPE_SUBGROUP,
NIR_SCOPE_WORKGROUP, NIR_SCOPE_WORKGROUP,
@@ -1696,6 +1697,11 @@ typedef enum {
*/ */
NIR_INTRINSIC_MEMORY_SCOPE, NIR_INTRINSIC_MEMORY_SCOPE,
/**
* Value of nir_scope.
*/
NIR_INTRINSIC_EXECUTION_SCOPE,
NIR_INTRINSIC_NUM_INDEX_FLAGS, NIR_INTRINSIC_NUM_INDEX_FLAGS,
} nir_intrinsic_index_flag; } nir_intrinsic_index_flag;
@@ -1835,6 +1841,7 @@ INTRINSIC_IDX_ACCESSORS(driver_location, DRIVER_LOCATION, unsigned)
INTRINSIC_IDX_ACCESSORS(memory_semantics, MEMORY_SEMANTICS, nir_memory_semantics) INTRINSIC_IDX_ACCESSORS(memory_semantics, MEMORY_SEMANTICS, nir_memory_semantics)
INTRINSIC_IDX_ACCESSORS(memory_modes, MEMORY_MODES, nir_variable_mode) INTRINSIC_IDX_ACCESSORS(memory_modes, MEMORY_MODES, nir_variable_mode)
INTRINSIC_IDX_ACCESSORS(memory_scope, MEMORY_SCOPE, nir_scope) INTRINSIC_IDX_ACCESSORS(memory_scope, MEMORY_SCOPE, nir_scope)
INTRINSIC_IDX_ACCESSORS(execution_scope, EXECUTION_SCOPE, nir_scope)
static inline void static inline void
nir_intrinsic_set_align(nir_intrinsic_instr *intrin, nir_intrinsic_set_align(nir_intrinsic_instr *intrin,
@@ -3127,10 +3134,10 @@ typedef struct nir_shader_compiler_options {
* to imul with masked inputs and iadd */ * to imul with masked inputs and iadd */
bool has_umad24; bool has_umad24;
/* Whether to generate only scoped_memory_barrier intrinsics instead of the /* Whether to generate only scoped_barrier intrinsics instead of the set of
* set of memory barrier intrinsics based on GLSL. * memory and control barrier intrinsics based on GLSL.
*/ */
bool use_scoped_memory_barrier; bool use_scoped_barrier;
/** /**
* Is this the Intel vec4 backend? * Is this the Intel vec4 backend?

View File

@@ -1359,18 +1359,29 @@ nir_compare_func(nir_builder *b, enum compare_func func,
unreachable("bad compare func"); unreachable("bad compare func");
} }
static inline void
nir_scoped_barrier(nir_builder *b,
nir_scope exec_scope,
nir_scope mem_scope,
nir_memory_semantics mem_semantics,
nir_variable_mode mem_modes)
{
nir_intrinsic_instr *intrin =
nir_intrinsic_instr_create(b->shader, nir_intrinsic_scoped_barrier);
nir_intrinsic_set_execution_scope(intrin, exec_scope);
nir_intrinsic_set_memory_scope(intrin, mem_scope);
nir_intrinsic_set_memory_semantics(intrin, mem_semantics);
nir_intrinsic_set_memory_modes(intrin, mem_modes);
nir_builder_instr_insert(b, &intrin->instr);
}
static inline void static inline void
nir_scoped_memory_barrier(nir_builder *b, nir_scoped_memory_barrier(nir_builder *b,
nir_scope scope, nir_scope scope,
nir_memory_semantics semantics, nir_memory_semantics semantics,
nir_variable_mode modes) nir_variable_mode modes)
{ {
nir_intrinsic_instr *intrin = nir_scoped_barrier(b, NIR_SCOPE_NONE, scope, semantics, modes);
nir_intrinsic_instr_create(b->shader, nir_intrinsic_scoped_memory_barrier);
nir_intrinsic_set_memory_scope(intrin, scope);
nir_intrinsic_set_memory_semantics(intrin, semantics);
nir_intrinsic_set_memory_modes(intrin, modes);
nir_builder_instr_insert(b, &intrin->instr);
} }
#endif /* NIR_BUILDER_H */ #endif /* NIR_BUILDER_H */

View File

@@ -132,6 +132,8 @@ MEMORY_SEMANTICS = "NIR_INTRINSIC_MEMORY_SEMANTICS"
MEMORY_MODES = "NIR_INTRINSIC_MEMORY_MODES" MEMORY_MODES = "NIR_INTRINSIC_MEMORY_MODES"
# Scope of a memory operation # Scope of a memory operation
MEMORY_SCOPE = "NIR_INTRINSIC_MEMORY_SCOPE" MEMORY_SCOPE = "NIR_INTRINSIC_MEMORY_SCOPE"
# Scope of a control barrier
EXECUTION_SCOPE = "NIR_INTRINSIC_EXECUTION_SCOPE"
# #
# Possible flags: # Possible flags:
@@ -219,11 +221,12 @@ barrier("control_barrier")
# intrinsic. # intrinsic.
barrier("memory_barrier") barrier("memory_barrier")
# Memory barrier with explicit scope. Follows the semantics of SPIR-V # Control/Memory barrier with explicit scope. Follows the semantics of SPIR-V
# OpMemoryBarrier, used to implement Vulkan Memory Model. Storage that the # OpMemoryBarrier and OpControlBarrier, used to implement Vulkan Memory Model.
# barrierr applies is represented using NIR variable modes. # Storage that the barrier applies is represented using NIR variable modes.
intrinsic("scoped_memory_barrier", # For an OpMemoryBarrier, set EXECUTION_SCOPE to NIR_SCOPE_NONE.
indices=[MEMORY_SEMANTICS, MEMORY_MODES, MEMORY_SCOPE]) intrinsic("scoped_barrier",
indices=[EXECUTION_SCOPE, MEMORY_SEMANTICS, MEMORY_MODES, MEMORY_SCOPE])
# Shader clock intrinsic with semantics analogous to the clock2x32ARB() # Shader clock intrinsic with semantics analogous to the clock2x32ARB()
# GLSL intrinsic. # GLSL intrinsic.

View File

@@ -134,7 +134,7 @@ gather_intrinsic(struct access_state *state, nir_intrinsic_instr *instr)
state->image_barriers = true; state->image_barriers = true;
break; break;
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
/* TODO: Could be more granular if we had nir_var_mem_image. */ /* TODO: Could be more granular if we had nir_var_mem_image. */
if (nir_intrinsic_memory_modes(instr) & (nir_var_mem_ubo | if (nir_intrinsic_memory_modes(instr) & (nir_var_mem_ubo |
nir_var_mem_ssbo | nir_var_mem_ssbo |

View File

@@ -39,7 +39,8 @@ nir_opt_combine_memory_barriers_impl(
} }
nir_intrinsic_instr *current = nir_instr_as_intrinsic(instr); nir_intrinsic_instr *current = nir_instr_as_intrinsic(instr);
if (current->intrinsic != nir_intrinsic_scoped_memory_barrier) { if (current->intrinsic != nir_intrinsic_scoped_barrier ||
nir_intrinsic_memory_scope(current) != NIR_SCOPE_NONE) {
prev = NULL; prev = NULL;
continue; continue;
} }

View File

@@ -338,7 +338,7 @@ combine_stores_block(struct combine_stores_state *state, nir_block *block)
combine_stores_with_modes(state, nir_var_shader_out); combine_stores_with_modes(state, nir_var_shader_out);
break; break;
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) { if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) {
combine_stores_with_modes(state, combine_stores_with_modes(state,
nir_intrinsic_memory_modes(intrin)); nir_intrinsic_memory_modes(intrin));

View File

@@ -174,7 +174,7 @@ gather_vars_written(struct copy_prop_var_state *state,
nir_var_mem_global; nir_var_mem_global;
break; break;
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE) if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE)
written->modes |= nir_intrinsic_memory_modes(intrin); written->modes |= nir_intrinsic_memory_modes(intrin);
break; break;
@@ -831,7 +831,7 @@ copy_prop_vars_block(struct copy_prop_var_state *state,
apply_barrier_for_modes(copies, nir_var_shader_out); apply_barrier_for_modes(copies, nir_var_shader_out);
break; break;
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
if (debug) dump_instr(instr); if (debug) dump_instr(instr);
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE) if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE)

View File

@@ -155,7 +155,7 @@ remove_dead_write_vars_local(void *mem_ctx, nir_block *block)
clear_unused_for_modes(&unused_writes, nir_var_shader_out); clear_unused_for_modes(&unused_writes, nir_var_shader_out);
break; break;
case nir_intrinsic_scoped_memory_barrier: { case nir_intrinsic_scoped_barrier: {
if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) { if (nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE) {
clear_unused_for_modes(&unused_writes, clear_unused_for_modes(&unused_writes,
nir_intrinsic_memory_modes(intrin)); nir_intrinsic_memory_modes(intrin));

View File

@@ -1225,7 +1225,10 @@ handle_barrier(struct vectorize_ctx *ctx, bool *progress, nir_function_impl *imp
case nir_intrinsic_memory_barrier_shared: case nir_intrinsic_memory_barrier_shared:
modes = nir_var_mem_shared; modes = nir_var_mem_shared;
break; break;
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
if (nir_intrinsic_memory_scope(intrin) == NIR_SCOPE_NONE)
break;
modes = nir_intrinsic_memory_modes(intrin); modes = nir_intrinsic_memory_modes(intrin);
acquire = nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE; acquire = nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_ACQUIRE;
release = nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE; release = nir_intrinsic_memory_semantics(intrin) & NIR_MEMORY_RELEASE;

View File

@@ -786,6 +786,7 @@ print_intrinsic_instr(nir_intrinsic_instr *instr, print_state *state)
[NIR_INTRINSIC_MEMORY_SEMANTICS] = "mem_semantics", [NIR_INTRINSIC_MEMORY_SEMANTICS] = "mem_semantics",
[NIR_INTRINSIC_MEMORY_MODES] = "mem_modes", [NIR_INTRINSIC_MEMORY_MODES] = "mem_modes",
[NIR_INTRINSIC_MEMORY_SCOPE] = "mem_scope", [NIR_INTRINSIC_MEMORY_SCOPE] = "mem_scope",
[NIR_INTRINSIC_EXECUTION_SCOPE] = "exec_scope",
}; };
for (unsigned idx = 1; idx < NIR_INTRINSIC_NUM_INDEX_FLAGS; idx++) { for (unsigned idx = 1; idx < NIR_INTRINSIC_NUM_INDEX_FLAGS; idx++) {
if (!info->index_map[idx]) if (!info->index_map[idx])
@@ -896,9 +897,11 @@ print_intrinsic_instr(nir_intrinsic_instr *instr, print_state *state)
break; break;
} }
case NIR_INTRINSIC_EXECUTION_SCOPE:
case NIR_INTRINSIC_MEMORY_SCOPE: { case NIR_INTRINSIC_MEMORY_SCOPE: {
fprintf(fp, " mem_scope="); fprintf(fp, " %s=", index_name[idx]);
switch (nir_intrinsic_memory_scope(instr)) { switch (nir_intrinsic_memory_scope(instr)) {
case NIR_SCOPE_NONE: fprintf(fp, "NONE"); break;
case NIR_SCOPE_DEVICE: fprintf(fp, "DEVICE"); break; case NIR_SCOPE_DEVICE: fprintf(fp, "DEVICE"); break;
case NIR_SCOPE_QUEUE_FAMILY: fprintf(fp, "QUEUE_FAMILY"); break; case NIR_SCOPE_QUEUE_FAMILY: fprintf(fp, "QUEUE_FAMILY"); break;
case NIR_SCOPE_WORKGROUP: fprintf(fp, "WORKGROUP"); break; case NIR_SCOPE_WORKGROUP: fprintf(fp, "WORKGROUP"); break;

View File

@@ -2137,14 +2137,8 @@ vtn_emit_scoped_memory_barrier(struct vtn_builder *b, SpvScope scope,
if (nir_semantics == 0 || modes == 0) if (nir_semantics == 0 || modes == 0)
return; return;
nir_scope nir_scope = vtn_scope_to_nir_scope(b, scope); nir_scope nir_mem_scope = vtn_scope_to_nir_scope(b, scope);
nir_intrinsic_instr *intrin = nir_scoped_barrier(&b->nb, NIR_SCOPE_NONE, nir_mem_scope, nir_semantics, modes);
nir_intrinsic_instr_create(b->shader, nir_intrinsic_scoped_memory_barrier);
nir_intrinsic_set_memory_semantics(intrin, nir_semantics);
nir_intrinsic_set_memory_modes(intrin, modes);
nir_intrinsic_set_memory_scope(intrin, nir_scope);
nir_builder_instr_insert(&b->nb, &intrin->instr);
} }
struct vtn_ssa_value * struct vtn_ssa_value *
@@ -3525,7 +3519,7 @@ void
vtn_emit_memory_barrier(struct vtn_builder *b, SpvScope scope, vtn_emit_memory_barrier(struct vtn_builder *b, SpvScope scope,
SpvMemorySemanticsMask semantics) SpvMemorySemanticsMask semantics)
{ {
if (b->shader->options->use_scoped_memory_barrier) { if (b->shader->options->use_scoped_barrier) {
vtn_emit_scoped_memory_barrier(b, scope, semantics); vtn_emit_scoped_memory_barrier(b, scope, semantics);
return; return;
} }

View File

@@ -48,7 +48,7 @@
.use_interpolated_input_intrinsics = true, \ .use_interpolated_input_intrinsics = true, \
.vertex_id_zero_based = true, \ .vertex_id_zero_based = true, \
.lower_base_vertex = true, \ .lower_base_vertex = true, \
.use_scoped_memory_barrier = true, \ .use_scoped_barrier = true, \
.support_8bit_alu = true, \ .support_8bit_alu = true, \
.support_16bit_alu = true .support_16bit_alu = true

View File

@@ -4225,7 +4225,8 @@ fs_visitor::nir_emit_intrinsic(const fs_builder &bld, nir_intrinsic_instr *instr
break; break;
} }
case nir_intrinsic_scoped_memory_barrier: case nir_intrinsic_scoped_barrier:
assert(nir_intrinsic_execution_scope(instr) == NIR_SCOPE_NONE);
case nir_intrinsic_group_memory_barrier: case nir_intrinsic_group_memory_barrier:
case nir_intrinsic_memory_barrier_shared: case nir_intrinsic_memory_barrier_shared:
case nir_intrinsic_memory_barrier_buffer: case nir_intrinsic_memory_barrier_buffer:
@@ -4239,7 +4240,7 @@ fs_visitor::nir_emit_intrinsic(const fs_builder &bld, nir_intrinsic_instr *instr
SHADER_OPCODE_INTERLOCK : SHADER_OPCODE_MEMORY_FENCE; SHADER_OPCODE_INTERLOCK : SHADER_OPCODE_MEMORY_FENCE;
switch (instr->intrinsic) { switch (instr->intrinsic) {
case nir_intrinsic_scoped_memory_barrier: { case nir_intrinsic_scoped_barrier: {
nir_variable_mode modes = nir_intrinsic_memory_modes(instr); nir_variable_mode modes = nir_intrinsic_memory_modes(instr);
l3_fence = modes & (nir_var_shader_out | l3_fence = modes & (nir_var_shader_out |
nir_var_mem_ssbo | nir_var_mem_ssbo |

View File

@@ -700,8 +700,10 @@ vec4_visitor::nir_emit_intrinsic(nir_intrinsic_instr *instr)
break; break;
} }
case nir_intrinsic_memory_barrier: case nir_intrinsic_scoped_barrier:
case nir_intrinsic_scoped_memory_barrier: { assert(nir_intrinsic_execution_scope(instr) == NIR_SCOPE_NONE);
/* Fall through. */
case nir_intrinsic_memory_barrier: {
const vec4_builder bld = const vec4_builder bld =
vec4_builder(this).at_end().annotate(current_annotation, base_ir); vec4_builder(this).at_end().annotate(current_annotation, base_ir);
const dst_reg tmp = bld.vgrf(BRW_REGISTER_TYPE_UD); const dst_reg tmp = bld.vgrf(BRW_REGISTER_TYPE_UD);