Lines Matching defs:instr
164 agx_emit_load_const(agx_builder *b, nir_load_const_instr *instr)
167 unsigned bit_size = instr->def.bit_size;
168 assert(instr->def.num_components == 1);
173 agx_get_index(instr->def.index, agx_size_for_bits(bit_size)),
174 nir_const_value_as_uint(instr->value[0], bit_size));
222 agx_emit_load_attr(agx_builder *b, agx_index *dests, nir_intrinsic_instr *instr)
224 nir_src *offset_src = nir_get_io_offset_src(instr);
226 unsigned index = nir_intrinsic_base(instr) +
257 assert(instr->num_components <= 4);
260 agx_index vec = agx_vec_for_dest(b->shader, &instr->dest);
271 for (unsigned i = actual_comps; i < instr->num_components; ++i)
276 agx_emit_load_vary_flat(agx_builder *b, agx_index *dests, nir_intrinsic_instr *instr)
278 unsigned components = instr->num_components;
281 nir_src *offset = nir_get_io_offset_src(instr);
283 unsigned imm_index = b->shader->varyings[nir_intrinsic_base(instr)];
286 assert(nir_dest_bit_size(instr->dest) == 32 && "no 16-bit flat shading");
296 agx_emit_load_vary(agx_builder *b, agx_index *dests, nir_intrinsic_instr *instr)
298 ASSERTED unsigned components = instr->num_components;
299 ASSERTED nir_intrinsic_instr *parent = nir_src_as_intrinsic(instr->src[0]);
307 nir_src *offset = nir_get_io_offset_src(instr);
309 unsigned imm_index = b->shader->varyings[nir_intrinsic_base(instr)];
312 agx_index vec = agx_vec_for_intr(b->shader, instr);
318 agx_emit_store_vary(agx_builder *b, nir_intrinsic_instr *instr)
320 nir_src *offset = nir_get_io_offset_src(instr);
322 unsigned imm_index = b->shader->varyings[nir_intrinsic_base(instr)];
323 imm_index += nir_intrinsic_component(instr);
327 assert(nir_intrinsic_write_mask(instr) == 0x1);
331 agx_src_index(&instr->src[0]));
335 agx_emit_fragment_out(agx_builder *b, nir_intrinsic_instr *instr)
339 nir_var_shader_out, nir_intrinsic_base(instr));
367 return agx_st_tile(b, agx_src_index(&instr->src[0]),
372 agx_emit_load_tile(agx_builder *b, agx_index *dests, nir_intrinsic_instr *instr)
376 nir_var_shader_out, nir_intrinsic_base(instr));
390 agx_index vec = agx_vec_for_dest(b->shader, &instr->dest);
407 agx_emit_load_ubo(agx_builder *b, agx_index dst, nir_intrinsic_instr *instr)
409 bool kernel_input = (instr->intrinsic == nir_intrinsic_load_kernel_input);
410 nir_src *offset = nir_get_io_offset_src(instr);
412 if (!kernel_input && !nir_src_is_const(instr->src[0]))
416 uint32_t block = kernel_input ? 0 : nir_src_as_uint(instr->src[0]);
429 assert(instr->num_components <= 4);
432 agx_format_for_bits(nir_dest_bit_size(instr->dest)),
433 BITFIELD_MASK(instr->num_components), 0);
435 agx_emit_cached_split(b, dst, instr->num_components);
441 agx_emit_load_frag_coord(agx_builder *b, agx_index *dests, nir_intrinsic_instr *instr)
472 agx_emit_discard(agx_builder *b, nir_intrinsic_instr *instr)
483 agx_emit_intrinsic(agx_builder *b, nir_intrinsic_instr *instr)
485 agx_index dst = nir_intrinsic_infos[instr->intrinsic].has_dest ?
486 agx_dest_index(&instr->dest) : agx_null();
490 switch (instr->intrinsic) {
500 agx_emit_load_vary(b, dests, instr);
505 agx_emit_load_vary_flat(b, dests, instr);
507 agx_emit_load_attr(b, dests, instr);
515 return agx_emit_fragment_out(b, instr);
517 return agx_emit_store_vary(b, instr);
523 agx_emit_load_tile(b, dests, instr);
528 return agx_emit_load_ubo(b, dst, instr);
531 agx_emit_load_frag_coord(b, dests, instr);
535 return agx_emit_discard(b, instr);
552 fprintf(stderr, "Unhandled intrinsic %s\n", nir_intrinsic_infos[instr->intrinsic].name);
628 agx_emit_alu(agx_builder *b, nir_alu_instr *instr)
630 unsigned srcs = nir_op_infos[instr->op].num_inputs;
631 unsigned sz = nir_dest_bit_size(instr->dest.dest);
632 unsigned src_sz = srcs ? nir_src_bit_size(instr->src[0].src) : 0;
633 ASSERTED unsigned comps = nir_dest_num_components(instr->dest.dest);
635 assert(comps == 1 || nir_op_is_vec(instr->op));
638 agx_index dst = agx_dest_index(&instr->dest.dest);
639 agx_index s0 = srcs > 0 ? agx_alu_src_index(b, instr->src[0]) : agx_null();
640 agx_index s1 = srcs > 1 ? agx_alu_src_index(b, instr->src[1]) : agx_null();
641 agx_index s2 = srcs > 2 ? agx_alu_src_index(b, instr->src[2]) : agx_null();
642 agx_index s3 = srcs > 3 ? agx_alu_src_index(b, instr->src[3]) : agx_null();
646 return agx_emit_alu_bool(b, instr->op, dst, s0, s1, s2);
655 switch (instr->op) {
840 fprintf(stderr, "Unhandled ALU op %s\n", nir_op_infos[instr->op].name);
886 agx_emit_tex(agx_builder *b, nir_tex_instr *instr)
888 switch (instr->op) {
898 texture = agx_immediate(instr->texture_index),
899 sampler = agx_immediate(instr->sampler_index),
903 for (unsigned i = 0; i < instr->num_srcs; ++i) {
904 agx_index index = agx_src_index(&instr->src[i].src);
906 switch (instr->src[i].src_type) {
920 if (instr->is_array) {
921 unsigned nr = nir_src_num_components(instr->src[i].src);
932 instr->texture_index, 1);
968 agx_index dst = agx_dest_index(&instr->dest);
970 agx_tex_dim(instr->sampler_dim, instr->is_array),
971 agx_lod_mode_for_nir(instr->op),
1011 agx_emit_jump(agx_builder *b, nir_jump_instr *instr)
1014 assert (instr->type == nir_jump_break || instr->type == nir_jump_continue);
1019 if (instr->type == nir_jump_continue) {
1022 } else if (instr->type == nir_jump_break) {
1039 agx_emit_phi(agx_builder *b, nir_phi_instr *instr)
1041 agx_instr *I = agx_phi_to(b, agx_dest_index(&instr->dest));
1044 I->phi = instr;
1088 agx_emit_instr(agx_builder *b, struct nir_instr *instr)
1090 switch (instr->type) {
1092 agx_emit_load_const(b, nir_instr_as_load_const(instr));
1096 agx_emit_intrinsic(b, nir_instr_as_intrinsic(instr));
1100 agx_emit_alu(b, nir_instr_as_alu(instr));
1104 agx_emit_tex(b, nir_instr_as_tex(instr));
1108 agx_emit_jump(b, nir_instr_as_jump(instr));
1112 agx_emit_phi(b, nir_instr_as_phi(instr));
1148 nir_foreach_instr(instr, block) {
1149 agx_emit_instr(&_b, instr);
1354 agx_lower_sincos_filter(const nir_instr *instr, UNUSED const void *_)
1356 if (instr->type != nir_instr_type_alu)
1359 nir_alu_instr *alu = nir_instr_as_alu(instr);
1373 agx_lower_sincos_impl(struct nir_builder *b, nir_instr *instr, UNUSED void *_)
1375 nir_alu_instr *alu = nir_instr_as_alu(instr);
1395 nir_instr *instr, UNUSED void *data)
1397 if (instr->type != nir_instr_type_intrinsic)
1400 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1408 b->cursor = nir_before_instr(&intr->instr);
1415 nir_instr *instr, UNUSED void *data)
1417 if (instr->type != nir_instr_type_intrinsic)
1420 nir_intrinsic_instr *intr = nir_instr_as_intrinsic(instr);
1424 b->cursor = nir_before_instr(&intr->instr);
1436 nir_instr_rewrite_src_ssa(instr, offset, new);