2
* Copyright © 2017 Red Hat
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
24
* Rob Clark <robclark@freedesktop.org>
28
#include "nir_builder.h"
31
* Remap atomic counters to SSBOs, starting from the shader's next SSBO slot
36
lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
40
b->cursor = nir_before_instr(&instr->instr);
42
switch (instr->intrinsic) {
43
case nir_intrinsic_memory_barrier_atomic_counter:
44
/* Atomic counters are now SSBOs so memoryBarrierAtomicCounter() is now
45
* memoryBarrierBuffer().
47
instr->intrinsic = nir_intrinsic_memory_barrier_buffer;
50
case nir_intrinsic_atomic_counter_inc:
51
case nir_intrinsic_atomic_counter_add:
52
case nir_intrinsic_atomic_counter_pre_dec:
53
case nir_intrinsic_atomic_counter_post_dec:
54
/* inc and dec get remapped to add: */
55
op = nir_intrinsic_ssbo_atomic_add;
57
case nir_intrinsic_atomic_counter_read:
58
op = nir_intrinsic_load_ssbo;
60
case nir_intrinsic_atomic_counter_min:
61
op = nir_intrinsic_ssbo_atomic_umin;
63
case nir_intrinsic_atomic_counter_max:
64
op = nir_intrinsic_ssbo_atomic_umax;
66
case nir_intrinsic_atomic_counter_and:
67
op = nir_intrinsic_ssbo_atomic_and;
69
case nir_intrinsic_atomic_counter_or:
70
op = nir_intrinsic_ssbo_atomic_or;
72
case nir_intrinsic_atomic_counter_xor:
73
op = nir_intrinsic_ssbo_atomic_xor;
75
case nir_intrinsic_atomic_counter_exchange:
76
op = nir_intrinsic_ssbo_atomic_exchange;
78
case nir_intrinsic_atomic_counter_comp_swap:
79
op = nir_intrinsic_ssbo_atomic_comp_swap;
85
nir_ssa_def *buffer = nir_imm_int(b, ssbo_offset + nir_intrinsic_base(instr));
86
nir_ssa_def *temp = NULL;
87
nir_intrinsic_instr *new_instr =
88
nir_intrinsic_instr_create(b->shader, op);
90
/* a couple instructions need special handling since they don't map
91
* 1:1 with ssbo atomics
93
switch (instr->intrinsic) {
94
case nir_intrinsic_atomic_counter_inc:
95
/* remapped to ssbo_atomic_add: { buffer_idx, offset, +1 } */
96
temp = nir_imm_int(b, +1);
97
new_instr->src[0] = nir_src_for_ssa(buffer);
98
nir_src_copy(&new_instr->src[1], &instr->src[0]);
99
new_instr->src[2] = nir_src_for_ssa(temp);
101
case nir_intrinsic_atomic_counter_pre_dec:
102
case nir_intrinsic_atomic_counter_post_dec:
103
/* remapped to ssbo_atomic_add: { buffer_idx, offset, -1 } */
104
/* NOTE semantic difference so we adjust the return value below */
105
temp = nir_imm_int(b, -1);
106
new_instr->src[0] = nir_src_for_ssa(buffer);
107
nir_src_copy(&new_instr->src[1], &instr->src[0]);
108
new_instr->src[2] = nir_src_for_ssa(temp);
110
case nir_intrinsic_atomic_counter_read:
111
/* remapped to load_ssbo: { buffer_idx, offset } */
112
new_instr->src[0] = nir_src_for_ssa(buffer);
113
nir_src_copy(&new_instr->src[1], &instr->src[0]);
116
/* remapped to ssbo_atomic_x: { buffer_idx, offset, data, (compare)? } */
117
new_instr->src[0] = nir_src_for_ssa(buffer);
118
nir_src_copy(&new_instr->src[1], &instr->src[0]);
119
nir_src_copy(&new_instr->src[2], &instr->src[1]);
120
if (op == nir_intrinsic_ssbo_atomic_comp_swap ||
121
op == nir_intrinsic_ssbo_atomic_fcomp_swap)
122
nir_src_copy(&new_instr->src[3], &instr->src[2]);
126
if (new_instr->intrinsic == nir_intrinsic_load_ssbo) {
127
nir_intrinsic_set_align(new_instr, 4, 0);
129
/* we could be replacing an intrinsic with fixed # of dest
130
* num_components with one that has variable number. So
131
* best to take this from the dest:
133
new_instr->num_components = instr->dest.ssa.num_components;
136
nir_ssa_dest_init(&new_instr->instr, &new_instr->dest,
137
instr->dest.ssa.num_components,
138
instr->dest.ssa.bit_size, NULL);
139
nir_instr_insert_before(&instr->instr, &new_instr->instr);
140
nir_instr_remove(&instr->instr);
142
if (instr->intrinsic == nir_intrinsic_atomic_counter_pre_dec) {
143
b->cursor = nir_after_instr(&new_instr->instr);
144
nir_ssa_def *result = nir_iadd(b, &new_instr->dest.ssa, temp);
145
nir_ssa_def_rewrite_uses(&instr->dest.ssa, result);
147
nir_ssa_def_rewrite_uses(&instr->dest.ssa, &new_instr->dest.ssa);
154
is_atomic_uint(const struct glsl_type *type)
156
if (glsl_get_base_type(type) == GLSL_TYPE_ARRAY)
157
return is_atomic_uint(glsl_get_array_element(type));
158
return glsl_get_base_type(type) == GLSL_TYPE_ATOMIC_UINT;
162
nir_lower_atomics_to_ssbo(nir_shader *shader)
164
unsigned ssbo_offset = shader->info.num_ssbos;
165
bool progress = false;
167
nir_foreach_function(function, shader) {
168
if (function->impl) {
170
nir_builder_init(&builder, function->impl);
171
nir_foreach_block(block, function->impl) {
172
nir_foreach_instr_safe(instr, block) {
173
if (instr->type == nir_instr_type_intrinsic)
174
progress |= lower_instr(nir_instr_as_intrinsic(instr),
175
ssbo_offset, &builder);
179
nir_metadata_preserve(function->impl, nir_metadata_block_index |
180
nir_metadata_dominance);
185
/* replace atomic_uint uniforms with ssbo's: */
186
unsigned replaced = 0;
187
nir_foreach_uniform_variable_safe(var, shader) {
188
if (is_atomic_uint(var->type)) {
189
exec_node_remove(&var->node);
191
if (replaced & (1 << var->data.binding))
197
/* A length of 0 is used to denote unsized arrays */
198
const struct glsl_type *type = glsl_array_type(glsl_uint_type(), 0, 0);
200
snprintf(name, sizeof(name), "counter%d", var->data.binding);
202
ssbo = nir_variable_create(shader, nir_var_mem_ssbo, type, name);
203
ssbo->data.binding = ssbo_offset + var->data.binding;
204
ssbo->data.explicit_binding = var->data.explicit_binding;
206
/* We can't use num_abos, because it only represents the number of
207
* active atomic counters, and currently unlike SSBO's they aren't
208
* compacted so num_abos actually isn't a bound on the index passed
209
* to nir_intrinsic_atomic_counter_*. e.g. if we have a single atomic
210
* counter declared like:
212
* layout(binding=1) atomic_uint counter0;
214
* then when we lower accesses to it the atomic_counter_* intrinsics
215
* will have 1 as the index but num_abos will still be 1.
217
shader->info.num_ssbos = MAX2(shader->info.num_ssbos,
218
ssbo->data.binding + 1);
220
struct glsl_struct_field field = {
226
ssbo->interface_type =
227
glsl_interface_type(&field, 1, GLSL_INTERFACE_PACKING_STD430,
230
replaced |= (1 << var->data.binding);
234
shader->info.num_abos = 0;