2014-12-01 22:01:05 -08:00
|
|
|
/*
|
|
|
|
* Copyright © 2014 Intel Corporation
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
* Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
|
|
|
|
* IN THE SOFTWARE.
|
|
|
|
*
|
|
|
|
* Authors:
|
|
|
|
* Connor Abbott (cwabbott0@gmail.com)
|
|
|
|
* Jason Ekstrand (jason@jlekstrand.net)
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*
|
|
|
|
* This lowering pass converts references to input/output variables with
|
|
|
|
* loads/stores to actual input/output intrinsics.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "nir.h"
|
2015-08-12 11:26:34 -07:00
|
|
|
#include "nir_builder.h"
|
2014-12-01 22:01:05 -08:00
|
|
|
|
|
|
|
struct lower_io_state {
|
2015-08-12 11:26:34 -07:00
|
|
|
nir_builder builder;
|
2014-12-01 22:01:05 -08:00
|
|
|
void *mem_ctx;
|
2015-08-12 14:29:25 -07:00
|
|
|
int (*type_size)(const struct glsl_type *type);
|
2016-04-11 13:43:27 -07:00
|
|
|
nir_variable_mode modes;
|
2014-12-01 22:01:05 -08:00
|
|
|
};
|
|
|
|
|
2015-03-18 15:04:15 -07:00
|
|
|
void
|
2015-08-12 14:29:25 -07:00
|
|
|
nir_assign_var_locations(struct exec_list *var_list, unsigned *size,
|
2016-05-18 10:26:05 +10:00
|
|
|
unsigned base_offset,
|
2015-08-12 14:29:25 -07:00
|
|
|
int (*type_size)(const struct glsl_type *))
|
2014-12-01 22:01:05 -08:00
|
|
|
{
|
|
|
|
unsigned location = 0;
|
|
|
|
|
2016-05-18 10:26:05 +10:00
|
|
|
/* There are 32 regular and 32 patch varyings allowed */
|
|
|
|
int locations[64][2];
|
|
|
|
for (unsigned i = 0; i < 64; i++) {
|
|
|
|
for (unsigned j = 0; j < 2; j++)
|
|
|
|
locations[i][j] = -1;
|
|
|
|
}
|
|
|
|
|
2015-10-02 18:15:06 -07:00
|
|
|
nir_foreach_variable(var, var_list) {
|
2014-12-01 22:01:05 -08:00
|
|
|
/*
|
|
|
|
* UBO's have their own address spaces, so don't count them towards the
|
|
|
|
* number of global uniforms
|
|
|
|
*/
|
2015-05-18 15:47:18 +02:00
|
|
|
if ((var->data.mode == nir_var_uniform || var->data.mode == nir_var_shader_storage) &&
|
|
|
|
var->interface_type != NULL)
|
2014-12-01 22:01:05 -08:00
|
|
|
continue;
|
|
|
|
|
2016-05-18 10:26:05 +10:00
|
|
|
/* Make sure we give the same location to varyings packed with
|
|
|
|
* ARB_enhanced_layouts.
|
|
|
|
*/
|
|
|
|
int idx = var->data.location - base_offset;
|
|
|
|
if (base_offset && idx >= 0) {
|
|
|
|
assert(idx < ARRAY_SIZE(locations));
|
|
|
|
|
|
|
|
if (locations[idx][var->data.index] == -1) {
|
|
|
|
var->data.driver_location = location;
|
|
|
|
locations[idx][var->data.index] = location;
|
|
|
|
location += type_size(var->type);
|
|
|
|
} else {
|
|
|
|
var->data.driver_location = locations[idx][var->data.index];
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
var->data.driver_location = location;
|
|
|
|
location += type_size(var->type);
|
|
|
|
}
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
*size = location;
|
|
|
|
}
|
|
|
|
|
2015-10-01 00:46:19 -07:00
|
|
|
/**
|
|
|
|
* Returns true if we're processing a stage whose inputs are arrays indexed
|
|
|
|
* by a vertex number (such as geometry shader inputs).
|
|
|
|
*/
|
|
|
|
static bool
|
2015-09-30 17:17:35 -07:00
|
|
|
is_per_vertex_input(struct lower_io_state *state, nir_variable *var)
|
2015-10-01 00:46:19 -07:00
|
|
|
{
|
|
|
|
gl_shader_stage stage = state->builder.shader->stage;
|
2015-09-30 17:17:35 -07:00
|
|
|
|
|
|
|
return var->data.mode == nir_var_shader_in && !var->data.patch &&
|
|
|
|
(stage == MESA_SHADER_TESS_CTRL ||
|
|
|
|
stage == MESA_SHADER_TESS_EVAL ||
|
|
|
|
stage == MESA_SHADER_GEOMETRY);
|
2015-10-01 00:46:19 -07:00
|
|
|
}
|
|
|
|
|
2015-10-02 00:11:01 -07:00
|
|
|
static bool
|
|
|
|
is_per_vertex_output(struct lower_io_state *state, nir_variable *var)
|
|
|
|
{
|
|
|
|
gl_shader_stage stage = state->builder.shader->stage;
|
|
|
|
return var->data.mode == nir_var_shader_out && !var->data.patch &&
|
|
|
|
stage == MESA_SHADER_TESS_CTRL;
|
|
|
|
}
|
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
static nir_ssa_def *
|
2015-11-25 12:33:38 -08:00
|
|
|
get_io_offset(nir_builder *b, nir_deref_var *deref,
|
2015-10-01 00:46:19 -07:00
|
|
|
nir_ssa_def **vertex_index,
|
2015-11-25 12:33:38 -08:00
|
|
|
int (*type_size)(const struct glsl_type *))
|
2014-12-01 22:01:05 -08:00
|
|
|
{
|
|
|
|
nir_deref *tail = &deref->deref;
|
2015-10-01 00:46:19 -07:00
|
|
|
|
|
|
|
/* For per-vertex input arrays (i.e. geometry shader inputs), keep the
|
|
|
|
* outermost array index separate. Process the rest normally.
|
|
|
|
*/
|
|
|
|
if (vertex_index != NULL) {
|
|
|
|
tail = tail->child;
|
|
|
|
assert(tail->deref_type == nir_deref_type_array);
|
|
|
|
nir_deref_array *deref_array = nir_deref_as_array(tail);
|
|
|
|
|
|
|
|
nir_ssa_def *vtx = nir_imm_int(b, deref_array->base_offset);
|
|
|
|
if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
|
|
|
|
vtx = nir_iadd(b, vtx, nir_ssa_for_src(b, deref_array->indirect, 1));
|
|
|
|
}
|
|
|
|
*vertex_index = vtx;
|
|
|
|
}
|
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
/* Just emit code and let constant-folding go to town */
|
|
|
|
nir_ssa_def *offset = nir_imm_int(b, 0);
|
2015-11-25 12:33:38 -08:00
|
|
|
|
2014-12-01 22:01:05 -08:00
|
|
|
while (tail->child != NULL) {
|
|
|
|
const struct glsl_type *parent_type = tail->type;
|
|
|
|
tail = tail->child;
|
|
|
|
|
|
|
|
if (tail->deref_type == nir_deref_type_array) {
|
|
|
|
nir_deref_array *deref_array = nir_deref_as_array(tail);
|
2015-11-25 12:33:38 -08:00
|
|
|
unsigned size = type_size(tail->type);
|
2014-12-01 22:01:05 -08:00
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
offset = nir_iadd(b, offset,
|
|
|
|
nir_imm_int(b, size * deref_array->base_offset));
|
2014-12-01 22:01:05 -08:00
|
|
|
|
|
|
|
if (deref_array->deref_array_type == nir_deref_array_type_indirect) {
|
2015-08-12 11:26:34 -07:00
|
|
|
nir_ssa_def *mul =
|
|
|
|
nir_imul(b, nir_imm_int(b, size),
|
|
|
|
nir_ssa_for_src(b, deref_array->indirect, 1));
|
2014-12-01 22:01:05 -08:00
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
offset = nir_iadd(b, offset, mul);
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
} else if (tail->deref_type == nir_deref_type_struct) {
|
|
|
|
nir_deref_struct *deref_struct = nir_deref_as_struct(tail);
|
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
unsigned field_offset = 0;
|
2015-08-12 14:29:25 -07:00
|
|
|
for (unsigned i = 0; i < deref_struct->index; i++) {
|
2015-11-25 14:14:05 -08:00
|
|
|
field_offset += type_size(glsl_get_struct_field(parent_type, i));
|
2015-08-12 14:29:25 -07:00
|
|
|
}
|
2015-11-25 14:14:05 -08:00
|
|
|
offset = nir_iadd(b, offset, nir_imm_int(b, field_offset));
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-11-25 14:14:05 -08:00
|
|
|
return offset;
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
|
2016-07-12 02:07:29 -07:00
|
|
|
static nir_intrinsic_instr *
|
|
|
|
lower_load(nir_intrinsic_instr *intrin, struct lower_io_state *state,
|
|
|
|
nir_ssa_def *vertex_index, nir_ssa_def *offset)
|
2015-08-12 10:57:31 -07:00
|
|
|
{
|
2016-07-12 02:07:29 -07:00
|
|
|
nir_variable *var = intrin->variables[0]->var;
|
|
|
|
nir_variable_mode mode = var->data.mode;
|
|
|
|
|
2015-08-12 10:57:31 -07:00
|
|
|
nir_intrinsic_op op;
|
|
|
|
switch (mode) {
|
|
|
|
case nir_var_shader_in:
|
2016-07-12 02:07:29 -07:00
|
|
|
op = vertex_index ? nir_intrinsic_load_per_vertex_input :
|
|
|
|
nir_intrinsic_load_input;
|
2015-08-12 10:57:31 -07:00
|
|
|
break;
|
2015-10-19 11:44:28 -07:00
|
|
|
case nir_var_shader_out:
|
2016-07-12 02:07:29 -07:00
|
|
|
op = vertex_index ? nir_intrinsic_load_per_vertex_output :
|
|
|
|
nir_intrinsic_load_output;
|
2015-10-19 11:44:28 -07:00
|
|
|
break;
|
2015-08-12 10:57:31 -07:00
|
|
|
case nir_var_uniform:
|
2015-11-25 14:14:05 -08:00
|
|
|
op = nir_intrinsic_load_uniform;
|
2015-08-12 10:57:31 -07:00
|
|
|
break;
|
2016-01-18 09:44:31 -08:00
|
|
|
case nir_var_shared:
|
|
|
|
op = nir_intrinsic_load_shared;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
unreachable("Unknown variable mode");
|
|
|
|
}
|
2016-07-12 02:07:29 -07:00
|
|
|
|
|
|
|
nir_intrinsic_instr *load = nir_intrinsic_instr_create(state->mem_ctx, op);
|
|
|
|
load->num_components = intrin->num_components;
|
|
|
|
|
|
|
|
nir_intrinsic_set_base(load, var->data.driver_location);
|
|
|
|
if (mode == nir_var_shader_in || mode == nir_var_shader_out)
|
|
|
|
nir_intrinsic_set_component(load, var->data.location_frac);
|
|
|
|
|
|
|
|
if (load->intrinsic == nir_intrinsic_load_uniform)
|
|
|
|
nir_intrinsic_set_range(load, state->type_size(var->type));
|
|
|
|
|
|
|
|
if (vertex_index)
|
|
|
|
load->src[0] = nir_src_for_ssa(vertex_index);
|
|
|
|
|
|
|
|
load->src[vertex_index ? 1 : 0] = nir_src_for_ssa(offset);
|
|
|
|
|
|
|
|
return load;
|
2016-01-18 09:44:31 -08:00
|
|
|
}
|
|
|
|
|
2016-07-12 02:07:29 -07:00
|
|
|
static nir_intrinsic_instr *
|
|
|
|
lower_store(nir_intrinsic_instr *intrin, struct lower_io_state *state,
|
|
|
|
nir_ssa_def *vertex_index, nir_ssa_def *offset)
|
2016-01-18 09:44:31 -08:00
|
|
|
{
|
2016-07-12 02:07:29 -07:00
|
|
|
nir_variable *var = intrin->variables[0]->var;
|
|
|
|
nir_variable_mode mode = var->data.mode;
|
|
|
|
|
2016-01-18 09:44:31 -08:00
|
|
|
nir_intrinsic_op op;
|
2016-07-12 02:07:29 -07:00
|
|
|
if (mode == nir_var_shared) {
|
2016-01-18 09:44:31 -08:00
|
|
|
op = nir_intrinsic_store_shared;
|
2016-07-12 02:07:29 -07:00
|
|
|
} else {
|
|
|
|
assert(mode == nir_var_shader_out);
|
|
|
|
op = vertex_index ? nir_intrinsic_store_per_vertex_output :
|
|
|
|
nir_intrinsic_store_output;
|
2015-08-12 10:57:31 -07:00
|
|
|
}
|
2016-07-12 02:07:29 -07:00
|
|
|
|
|
|
|
nir_intrinsic_instr *store = nir_intrinsic_instr_create(state->mem_ctx, op);
|
|
|
|
store->num_components = intrin->num_components;
|
|
|
|
|
|
|
|
nir_src_copy(&store->src[0], &intrin->src[0], store);
|
|
|
|
|
|
|
|
nir_intrinsic_set_base(store, var->data.driver_location);
|
|
|
|
|
|
|
|
if (mode == nir_var_shader_out)
|
|
|
|
nir_intrinsic_set_component(store, var->data.location_frac);
|
|
|
|
|
|
|
|
nir_intrinsic_set_write_mask(store, nir_intrinsic_write_mask(intrin));
|
|
|
|
|
|
|
|
if (vertex_index)
|
|
|
|
store->src[1] = nir_src_for_ssa(vertex_index);
|
|
|
|
|
|
|
|
store->src[vertex_index ? 2 : 1] = nir_src_for_ssa(offset);
|
|
|
|
|
|
|
|
return store;
|
2015-08-12 10:57:31 -07:00
|
|
|
}
|
|
|
|
|
2016-07-12 02:07:29 -07:00
|
|
|
static nir_intrinsic_instr *
|
|
|
|
lower_atomic(nir_intrinsic_instr *intrin, struct lower_io_state *state,
|
|
|
|
nir_ssa_def *offset)
|
2016-01-18 09:59:19 -08:00
|
|
|
{
|
2016-07-12 02:07:29 -07:00
|
|
|
nir_variable *var = intrin->variables[0]->var;
|
|
|
|
|
|
|
|
assert(var->data.mode == nir_var_shared);
|
|
|
|
|
|
|
|
nir_intrinsic_op op;
|
|
|
|
switch (intrin->intrinsic) {
|
|
|
|
#define OP(O) case nir_intrinsic_var_##O: op = nir_intrinsic_shared_##O; break;
|
2016-01-18 09:59:19 -08:00
|
|
|
OP(atomic_exchange)
|
|
|
|
OP(atomic_comp_swap)
|
|
|
|
OP(atomic_add)
|
|
|
|
OP(atomic_imin)
|
|
|
|
OP(atomic_umin)
|
|
|
|
OP(atomic_imax)
|
|
|
|
OP(atomic_umax)
|
|
|
|
OP(atomic_and)
|
|
|
|
OP(atomic_or)
|
|
|
|
OP(atomic_xor)
|
|
|
|
#undef OP
|
|
|
|
default:
|
|
|
|
unreachable("Invalid atomic");
|
|
|
|
}
|
2016-07-12 02:07:29 -07:00
|
|
|
|
|
|
|
nir_intrinsic_instr *atomic =
|
|
|
|
nir_intrinsic_instr_create(state->mem_ctx, op);
|
|
|
|
|
2016-07-14 14:18:33 -07:00
|
|
|
nir_intrinsic_set_base(atomic, var->data.driver_location);
|
2016-07-12 02:07:29 -07:00
|
|
|
|
2016-07-14 14:18:33 -07:00
|
|
|
atomic->src[0] = nir_src_for_ssa(offset);
|
2016-07-12 02:07:29 -07:00
|
|
|
for (unsigned i = 0; i < nir_op_infos[intrin->intrinsic].num_inputs; i++) {
|
|
|
|
nir_src_copy(&atomic->src[i+1], &intrin->src[i], atomic);
|
|
|
|
}
|
|
|
|
|
|
|
|
return atomic;
|
2016-01-18 09:59:19 -08:00
|
|
|
}
|
|
|
|
|
2014-12-01 22:01:05 -08:00
|
|
|
static bool
|
2016-04-08 16:16:56 -04:00
|
|
|
nir_lower_io_block(nir_block *block,
|
|
|
|
struct lower_io_state *state)
|
2014-12-01 22:01:05 -08:00
|
|
|
{
|
2015-11-25 12:33:38 -08:00
|
|
|
nir_builder *b = &state->builder;
|
|
|
|
|
2016-04-26 18:34:19 -07:00
|
|
|
nir_foreach_instr_safe(instr, block) {
|
2014-12-01 22:01:05 -08:00
|
|
|
if (instr->type != nir_instr_type_intrinsic)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
|
|
|
|
|
2016-01-18 09:59:19 -08:00
|
|
|
switch (intrin->intrinsic) {
|
|
|
|
case nir_intrinsic_load_var:
|
|
|
|
case nir_intrinsic_store_var:
|
|
|
|
case nir_intrinsic_var_atomic_add:
|
|
|
|
case nir_intrinsic_var_atomic_imin:
|
|
|
|
case nir_intrinsic_var_atomic_umin:
|
|
|
|
case nir_intrinsic_var_atomic_imax:
|
|
|
|
case nir_intrinsic_var_atomic_umax:
|
|
|
|
case nir_intrinsic_var_atomic_and:
|
|
|
|
case nir_intrinsic_var_atomic_or:
|
|
|
|
case nir_intrinsic_var_atomic_xor:
|
|
|
|
case nir_intrinsic_var_atomic_exchange:
|
|
|
|
case nir_intrinsic_var_atomic_comp_swap:
|
|
|
|
/* We can lower the io for this nir instrinsic */
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
/* We can't lower the io for this nir instrinsic, so skip it */
|
2015-08-12 15:14:35 -07:00
|
|
|
continue;
|
2016-01-18 09:59:19 -08:00
|
|
|
}
|
2015-08-12 15:14:35 -07:00
|
|
|
|
2016-07-12 01:51:04 -07:00
|
|
|
nir_variable *var = intrin->variables[0]->var;
|
|
|
|
nir_variable_mode mode = var->data.mode;
|
2015-08-12 15:14:35 -07:00
|
|
|
|
2016-04-11 13:43:27 -07:00
|
|
|
if ((state->modes & mode) == 0)
|
2015-08-12 15:14:35 -07:00
|
|
|
continue;
|
|
|
|
|
2015-10-19 11:44:28 -07:00
|
|
|
if (mode != nir_var_shader_in &&
|
|
|
|
mode != nir_var_shader_out &&
|
2016-01-18 09:44:31 -08:00
|
|
|
mode != nir_var_shared &&
|
2015-10-19 11:44:28 -07:00
|
|
|
mode != nir_var_uniform)
|
|
|
|
continue;
|
|
|
|
|
2015-11-25 12:33:38 -08:00
|
|
|
b->cursor = nir_before_instr(instr);
|
|
|
|
|
2016-07-12 02:16:30 -07:00
|
|
|
const bool per_vertex =
|
|
|
|
is_per_vertex_input(state, var) || is_per_vertex_output(state, var);
|
2015-10-01 00:46:19 -07:00
|
|
|
|
2016-07-12 02:16:30 -07:00
|
|
|
nir_ssa_def *offset;
|
2016-07-12 02:07:29 -07:00
|
|
|
nir_ssa_def *vertex_index = NULL;
|
2015-10-01 00:36:25 -07:00
|
|
|
|
2016-07-12 02:16:30 -07:00
|
|
|
offset = get_io_offset(b, intrin->variables[0],
|
|
|
|
per_vertex ? &vertex_index : NULL,
|
|
|
|
state->type_size);
|
2014-12-01 22:01:05 -08:00
|
|
|
|
2016-07-12 02:30:02 -07:00
|
|
|
nir_intrinsic_instr *replacement;
|
|
|
|
|
2016-07-12 02:16:30 -07:00
|
|
|
switch (intrin->intrinsic) {
|
2016-07-12 02:07:29 -07:00
|
|
|
case nir_intrinsic_load_var:
|
|
|
|
replacement = lower_load(intrin, state, vertex_index, offset);
|
2014-12-01 22:01:05 -08:00
|
|
|
break;
|
|
|
|
|
2016-07-12 02:07:29 -07:00
|
|
|
case nir_intrinsic_store_var:
|
|
|
|
replacement = lower_store(intrin, state, vertex_index, offset);
|
2014-12-01 22:01:05 -08:00
|
|
|
break;
|
|
|
|
|
2016-01-18 09:59:19 -08:00
|
|
|
case nir_intrinsic_var_atomic_add:
|
|
|
|
case nir_intrinsic_var_atomic_imin:
|
|
|
|
case nir_intrinsic_var_atomic_umin:
|
|
|
|
case nir_intrinsic_var_atomic_imax:
|
|
|
|
case nir_intrinsic_var_atomic_umax:
|
|
|
|
case nir_intrinsic_var_atomic_and:
|
|
|
|
case nir_intrinsic_var_atomic_or:
|
|
|
|
case nir_intrinsic_var_atomic_xor:
|
|
|
|
case nir_intrinsic_var_atomic_exchange:
|
2016-07-12 02:07:29 -07:00
|
|
|
case nir_intrinsic_var_atomic_comp_swap:
|
|
|
|
assert(vertex_index == NULL);
|
|
|
|
replacement = lower_atomic(intrin, state, offset);
|
2016-07-12 02:30:02 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
2016-07-18 22:42:44 -07:00
|
|
|
continue;
|
2016-07-12 02:30:02 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
if (nir_intrinsic_infos[intrin->intrinsic].has_dest) {
|
2016-01-18 09:59:19 -08:00
|
|
|
if (intrin->dest.is_ssa) {
|
2016-07-12 02:30:02 -07:00
|
|
|
nir_ssa_dest_init(&replacement->instr, &replacement->dest,
|
2016-07-15 17:53:13 -07:00
|
|
|
intrin->dest.ssa.num_components,
|
2015-11-17 13:57:54 +01:00
|
|
|
intrin->dest.ssa.bit_size, NULL);
|
2016-01-18 09:59:19 -08:00
|
|
|
nir_ssa_def_rewrite_uses(&intrin->dest.ssa,
|
2016-07-12 02:30:02 -07:00
|
|
|
nir_src_for_ssa(&replacement->dest.ssa));
|
2016-01-18 09:59:19 -08:00
|
|
|
} else {
|
2016-07-12 02:30:02 -07:00
|
|
|
nir_dest_copy(&replacement->dest, &intrin->dest, state->mem_ctx);
|
2016-01-18 09:59:19 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-12 02:30:02 -07:00
|
|
|
nir_instr_insert_before(&intrin->instr, &replacement->instr);
|
|
|
|
nir_instr_remove(&intrin->instr);
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2015-08-12 15:14:35 -07:00
|
|
|
nir_lower_io_impl(nir_function_impl *impl,
|
2016-04-11 13:43:27 -07:00
|
|
|
nir_variable_mode modes,
|
2015-08-12 15:14:35 -07:00
|
|
|
int (*type_size)(const struct glsl_type *))
|
2014-12-01 22:01:05 -08:00
|
|
|
{
|
|
|
|
struct lower_io_state state;
|
|
|
|
|
2015-08-12 11:26:34 -07:00
|
|
|
nir_builder_init(&state.builder, impl);
|
2014-12-01 22:01:05 -08:00
|
|
|
state.mem_ctx = ralloc_parent(impl);
|
2016-04-11 13:43:27 -07:00
|
|
|
state.modes = modes;
|
2015-08-12 14:29:25 -07:00
|
|
|
state.type_size = type_size;
|
2014-12-01 22:01:05 -08:00
|
|
|
|
2016-04-08 16:16:56 -04:00
|
|
|
nir_foreach_block(block, impl) {
|
|
|
|
nir_lower_io_block(block, &state);
|
|
|
|
}
|
2014-12-12 16:25:38 -08:00
|
|
|
|
|
|
|
nir_metadata_preserve(impl, nir_metadata_block_index |
|
|
|
|
nir_metadata_dominance);
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2016-04-11 13:43:27 -07:00
|
|
|
nir_lower_io(nir_shader *shader, nir_variable_mode modes,
|
2015-08-12 15:14:35 -07:00
|
|
|
int (*type_size)(const struct glsl_type *))
|
2014-12-01 22:01:05 -08:00
|
|
|
{
|
2016-04-26 20:26:42 -07:00
|
|
|
nir_foreach_function(function, shader) {
|
2015-12-26 10:00:47 -08:00
|
|
|
if (function->impl)
|
2016-04-11 13:43:27 -07:00
|
|
|
nir_lower_io_impl(function->impl, modes, type_size);
|
2014-12-01 22:01:05 -08:00
|
|
|
}
|
|
|
|
}
|
2015-11-07 22:35:33 -08:00
|
|
|
|
|
|
|
/**
|
2015-11-25 14:14:05 -08:00
|
|
|
* Return the offset soruce for a load/store intrinsic.
|
2015-11-07 22:35:33 -08:00
|
|
|
*/
|
|
|
|
nir_src *
|
2015-11-25 14:14:05 -08:00
|
|
|
nir_get_io_offset_src(nir_intrinsic_instr *instr)
|
2015-11-07 22:35:33 -08:00
|
|
|
{
|
|
|
|
switch (instr->intrinsic) {
|
2015-11-25 14:14:05 -08:00
|
|
|
case nir_intrinsic_load_input:
|
|
|
|
case nir_intrinsic_load_output:
|
|
|
|
case nir_intrinsic_load_uniform:
|
2015-11-07 22:35:33 -08:00
|
|
|
return &instr->src[0];
|
2016-04-14 10:31:27 -07:00
|
|
|
case nir_intrinsic_load_ubo:
|
|
|
|
case nir_intrinsic_load_ssbo:
|
2015-11-25 14:14:05 -08:00
|
|
|
case nir_intrinsic_load_per_vertex_input:
|
|
|
|
case nir_intrinsic_load_per_vertex_output:
|
|
|
|
case nir_intrinsic_store_output:
|
2015-11-07 22:35:33 -08:00
|
|
|
return &instr->src[1];
|
2016-04-14 10:31:27 -07:00
|
|
|
case nir_intrinsic_store_ssbo:
|
2015-11-25 14:14:05 -08:00
|
|
|
case nir_intrinsic_store_per_vertex_output:
|
2015-11-07 22:35:33 -08:00
|
|
|
return &instr->src[2];
|
|
|
|
default:
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Return the vertex index source for a load/store per_vertex intrinsic.
|
|
|
|
*/
|
|
|
|
nir_src *
|
|
|
|
nir_get_io_vertex_index_src(nir_intrinsic_instr *instr)
|
|
|
|
{
|
|
|
|
switch (instr->intrinsic) {
|
|
|
|
case nir_intrinsic_load_per_vertex_input:
|
|
|
|
case nir_intrinsic_load_per_vertex_output:
|
|
|
|
return &instr->src[0];
|
|
|
|
case nir_intrinsic_store_per_vertex_output:
|
|
|
|
return &instr->src[1];
|
|
|
|
default:
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|