1/*
2 * Copyright © 2019 Collabora, Ltd.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 *
23 * Authors (Collabora):
24 *    Alyssa Rosenzweig <alyssa.rosenzweig@collabora.com>
25 */
26
27#include "nir.h"
28#include "nir_builder.h"
29
30/*
31 * Lowers SSBOs to globals, for hardware that lack native SSBO support. When
32 * lowering, *_ssbo_* instructions will become *_global_* instructions,
33 * augmented with load_ssbo_address.
34 *
35 * DOES NOT PERFORM BOUNDS CHECKING. DO NOT USE IN PRODUCTION ON UNTRUSTED
36 * CONTEXTS INCLUDING WEBGL 2.
37 */
38
39static nir_intrinsic_op
40lower_ssbo_op(nir_intrinsic_op op)
41{
42   switch (op) {
43   case nir_intrinsic_load_ssbo:
44      return nir_intrinsic_load_global;
45
46   case nir_intrinsic_store_ssbo:
47      return nir_intrinsic_store_global;
48
49   case nir_intrinsic_ssbo_atomic_add:
50      return nir_intrinsic_global_atomic_add;
51   case nir_intrinsic_ssbo_atomic_imin:
52      return nir_intrinsic_global_atomic_imin;
53   case nir_intrinsic_ssbo_atomic_umin:
54      return nir_intrinsic_global_atomic_umin;
55   case nir_intrinsic_ssbo_atomic_imax:
56      return nir_intrinsic_global_atomic_imax;
57   case nir_intrinsic_ssbo_atomic_umax:
58      return nir_intrinsic_global_atomic_umax;
59   case nir_intrinsic_ssbo_atomic_and:
60      return nir_intrinsic_global_atomic_and;
61   case nir_intrinsic_ssbo_atomic_or:
62      return nir_intrinsic_global_atomic_or;
63   case nir_intrinsic_ssbo_atomic_xor:
64      return nir_intrinsic_global_atomic_xor;
65   case nir_intrinsic_ssbo_atomic_exchange:
66      return nir_intrinsic_global_atomic_exchange;
67   case nir_intrinsic_ssbo_atomic_comp_swap:
68      return nir_intrinsic_global_atomic_comp_swap;
69
70   case nir_intrinsic_ssbo_atomic_fadd:
71      return nir_intrinsic_global_atomic_fadd;
72   case nir_intrinsic_ssbo_atomic_fmin:
73      return nir_intrinsic_global_atomic_fmin;
74   case nir_intrinsic_ssbo_atomic_fmax:
75      return nir_intrinsic_global_atomic_fmax;
76   case nir_intrinsic_ssbo_atomic_fcomp_swap:
77      return nir_intrinsic_global_atomic_fcomp_swap;
78
79   default:
80      unreachable("Invalid SSBO op");
81   }
82}
83
84/* Like SSBO property sysvals, though SSBO index may be indirect. C.f.
85 * nir_load_system_value */
86
87static inline nir_ssa_def *
88nir_load_ssbo_prop(nir_builder *b, nir_intrinsic_op op,
89      nir_src *idx, unsigned bitsize)
90{
91   nir_intrinsic_instr *load = nir_intrinsic_instr_create(b->shader, op);
92   load->num_components = 1;
93   nir_src_copy(&load->src[0], idx);
94   nir_ssa_dest_init(&load->instr, &load->dest, 1, bitsize, NULL);
95   nir_builder_instr_insert(b, &load->instr);
96   return &load->dest.ssa;
97}
98
99#define nir_ssbo_prop(b, prop, index, bitsize) \
100   nir_load_ssbo_prop(b, nir_intrinsic_##prop, index, bitsize)
101
102static nir_ssa_def *
103lower_ssbo_instr(nir_builder *b, nir_intrinsic_instr *intr)
104{
105   nir_intrinsic_op op = lower_ssbo_op(intr->intrinsic);
106   bool is_store = op == nir_intrinsic_store_global;
107   bool is_atomic = !is_store && op != nir_intrinsic_load_global;
108
109   /* We have to calculate the address:
110    *
111    * &(SSBO[offset]) = &SSBO + offset
112    */
113
114   nir_src index = intr->src[is_store ? 1 : 0];
115   nir_src *offset_src = nir_get_io_offset_src(intr);
116   nir_ssa_def *offset = nir_ssa_for_src(b, *offset_src, 1);
117
118   nir_ssa_def *address =
119      nir_iadd(b,
120            nir_ssbo_prop(b, load_ssbo_address, &index, 64),
121            nir_u2u64(b, offset));
122
123   /* Create the replacement intrinsic */
124
125   nir_intrinsic_instr *global =
126      nir_intrinsic_instr_create(b->shader, op);
127
128   global->num_components = intr->num_components;
129   global->src[is_store ? 1 : 0] = nir_src_for_ssa(address);
130
131   if (!is_atomic) {
132      nir_intrinsic_set_align_mul(global, nir_intrinsic_align_mul(intr));
133      nir_intrinsic_set_align_offset(global, nir_intrinsic_align_offset(intr));
134   }
135
136   if (is_store) {
137      nir_src_copy(&global->src[0], &intr->src[0]);
138      nir_intrinsic_set_write_mask(global, nir_intrinsic_write_mask(intr));
139   } else {
140      nir_ssa_dest_init(&global->instr, &global->dest,
141                        intr->dest.ssa.num_components,
142                        intr->dest.ssa.bit_size, NULL);
143
144      if (is_atomic) {
145         nir_src_copy(&global->src[1], &intr->src[2]);
146         if (nir_intrinsic_infos[op].num_srcs > 2)
147            nir_src_copy(&global->src[2], &intr->src[3]);
148      }
149   }
150
151   nir_builder_instr_insert(b, &global->instr);
152   return is_store ? NULL : &global->dest.ssa;
153}
154
155static bool
156should_lower_ssbo_instr(const nir_instr *instr)
157{
158   if (instr->type != nir_instr_type_intrinsic)
159      return false;
160
161   const nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
162
163   switch (intr->intrinsic) {
164   case nir_intrinsic_load_ssbo:
165   case nir_intrinsic_store_ssbo:
166   case nir_intrinsic_ssbo_atomic_add:
167   case nir_intrinsic_ssbo_atomic_imin:
168   case nir_intrinsic_ssbo_atomic_umin:
169   case nir_intrinsic_ssbo_atomic_imax:
170   case nir_intrinsic_ssbo_atomic_umax:
171   case nir_intrinsic_ssbo_atomic_and:
172   case nir_intrinsic_ssbo_atomic_or:
173   case nir_intrinsic_ssbo_atomic_xor:
174   case nir_intrinsic_ssbo_atomic_exchange:
175   case nir_intrinsic_ssbo_atomic_comp_swap:
176   case nir_intrinsic_ssbo_atomic_fadd:
177   case nir_intrinsic_ssbo_atomic_fmin:
178   case nir_intrinsic_ssbo_atomic_fmax:
179   case nir_intrinsic_ssbo_atomic_fcomp_swap:
180      return true;
181   default:
182      return false;
183   }
184
185   return false;
186}
187
188bool
189nir_lower_ssbo(nir_shader *shader)
190{
191   bool progress = false;
192
193   nir_foreach_function(function, shader) {
194      nir_function_impl *impl = function->impl;
195      nir_builder b;
196      nir_builder_init(&b, impl);
197
198      nir_foreach_block(block, impl) {
199         nir_foreach_instr_safe(instr, block) {
200            if (!should_lower_ssbo_instr(instr)) continue;
201            progress = true;
202            b.cursor = nir_before_instr(instr);
203
204            nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
205            nir_ssa_def *replace = lower_ssbo_instr(&b, intr);
206
207            if (replace)  {
208               nir_ssa_def_rewrite_uses(&intr->dest.ssa,
209                                     replace);
210            }
211
212            nir_instr_remove(instr);
213         }
214      }
215   }
216
217   return progress;
218}
219