Lines Matching refs:src

369       cond_reg = get_nir_src(cond->src[0].src);
370 cond_reg = offset(cond_reg, bld, cond->src[0].swizzle[0]);
505 if (!instr->src[0].src.is_ssa ||
506 !instr->src[0].src.ssa->parent_instr)
509 if (instr->src[0].src.ssa->parent_instr->type != nir_instr_type_alu)
513 nir_instr_as_alu(instr->src[0].src.ssa->parent_instr);
519 unsigned element = nir_src_as_uint(src0->src[1].src);
526 fs_reg op0 = get_nir_src(src0->src[0].src);
529 nir_src_bit_size(src0->src[0].src)));
530 op0 = offset(op0, bld, src0->src[0].swizzle[0]);
540 nir_intrinsic_instr *src0 = nir_src_as_intrinsic(instr->src[0].src);
544 if (!nir_src_is_const(instr->src[1].src) ||
545 !nir_src_is_const(instr->src[2].src))
548 const float value1 = nir_src_as_float(instr->src[1].src);
549 const float value2 = nir_src_as_float(instr->src[2].src);
624 const fs_reg &src,
628 fs_reg temp = src;
652 bld.ASR(temp, src, brw_imm_d(31));
653 bld.XOR(temp, temp, src);
665 inst->src[0].negate = true;
707 assert(!instr->src[i].abs);
708 assert(!instr->src[i].negate);
710 op[i] = get_nir_src(instr->src[i].src);
713 nir_src_bit_size(instr->src[i].src)));
750 op[i] = offset(op[i], bld, instr->src[i].swizzle[channel]);
761 nir_alu_instr *inot_instr = nir_src_as_alu_instr(instr->src[i].src);
783 nir_alu_instr *inot_instr = nir_src_as_alu_instr(instr->src[0].src);
795 nir_src_bit_size(inot_instr->src[0].src) != 32)
830 nir_src_as_alu_instr(instr->src[fsign_src].src);
840 op[0] = get_nir_src(fsign_instr->src[0].src);
844 nir_src_bit_size(fsign_instr->src[0].src));
857 op[0] = offset(op[0], bld, fsign_instr->src[0].swizzle[channel]);
903 * - 2-src instructions can't operate with 64-bit immediates
952 nir_src_as_alu_instr(instr->src[fsign_src].src);
956 * 1. instr->src[fsign_src] must be a nir_op_fsign.
1023 if (!instr->src[i].src.is_ssa &&
1024 instr->dest.dest.reg.reg == instr->src[i].src.reg.reg) {
1037 offset(op[0], bld, instr->src[0].swizzle[i]));
1040 offset(op[i], bld, instr->src[i].swizzle[0]));
1146 nir_alu_instr *extract_instr = nir_src_as_alu_instr(instr->src[0].src);
1152 const unsigned byte = nir_src_as_uint(extract_instr->src[1].src);
1161 const unsigned word = nir_src_as_uint(extract_instr->src[1].src);
1295 inst->src[1].negate = true;
1430 const uint32_t bit_size = nir_src_bit_size(instr->src[0].src);
1481 nir_alu_instr *inot_src_instr = nir_src_as_alu_instr(instr->src[0].src);
1505 nir_src_bit_size(inot_src_instr->src[0].src)));
1509 nir_src_bit_size(inot_src_instr->src[1].src)));
1588 uint32_t bit_size = nir_src_bit_size(instr->src[0].src);
1795 inst->src[0].negate = true;
1809 fs_reg src = retype(op[0], BRW_REGISTER_TYPE_D);
1810 fs_reg negated_src = src;
1816 src.negate = false;
1818 bld.AND(temp, src, negated_src);
1961 unsigned byte = nir_src_as_uint(instr->src[1].src);
2000 unsigned word = nir_src_as_uint(instr->src[1].src);
2068 fs_visitor::get_nir_src(const nir_src &src)
2071 if (src.is_ssa) {
2072 if (nir_src_is_undef(src)) {
2074 brw_reg_type_from_bit_size(src.ssa->bit_size, BRW_REGISTER_TYPE_D);
2075 reg = bld.vgrf(reg_type, src.ssa->num_components);
2077 reg = nir_ssa_values[src.ssa->index];
2081 assert(src.reg.indirect == NULL);
2082 reg = offset(nir_locals[src.reg.reg->index], bld,
2083 src.reg.base_offset * src.reg.reg->num_components);
2086 if (nir_src_bit_size(src) == 64 && devinfo->ver == 7) {
2094 reg.type = brw_reg_type_from_bit_size(nir_src_bit_size(src),
2111 fs_visitor::get_nir_src_imm(const nir_src &src)
2113 assert(nir_src_bit_size(src) == 32);
2114 return nir_src_is_const(src) ?
2115 fs_reg(brw_imm_d(nir_src_as_int(src))) : get_nir_src(src);
2150 if (new_inst->src[j].file == VGRF)
2151 new_inst->src[j] = offset(new_inst->src[j], bld, i);
2161 const fs_reg &src,
2168 fs_inst *inst = bld.emit(opcode, dst, src, desc);
2703 fs_reg src = fs_reg(ATTR, nir_intrinsic_base(instr) * 4, dest.type);
2704 src = offset(src, bld, nir_intrinsic_component(instr));
2705 src = offset(src, bld, nir_src_as_uint(instr->src[0]));
2708 bld.MOV(offset(dest, bld, i), offset(src, bld, i));
2731 const nir_src &vertex_src = instr->src[0];
2775 const nir_src &vertex_src = instr->src[0];
3055 assert(nir_src_bit_size(instr->src[0]) == 32);
3056 fs_reg value = get_nir_src(instr->src[0]);
3146 fs_reg src = fs_reg(ATTR, imm_offset / 2, dest.type);
3149 bld.MOV(offset(dest, bld, i), component(src, comp));
3244 emit_gs_input_load(dest, instr->src[0], instr->const_index[0],
3245 instr->src[1], instr->num_components,
3250 emit_gs_vertex(instr->src[0], instr->const_index[0]);
3254 emit_gs_end_primitive(instr->src[0]);
3258 bld.MOV(this->final_gs_vertex_count, get_nir_src(instr->src[0]));
3492 const fs_reg src = get_nir_src(instr->src[0]);
3493 const unsigned store_offset = nir_src_as_uint(instr->src[1]);
3497 src.type);
3501 offset(src, bld, j));
3510 const unsigned load_offset = nir_src_as_uint(instr->src[0]);
3542 nir_alu_instr *alu = nir_src_as_alu_instr(instr->src[0]);
3585 cmp = bld.CMP(bld.null_reg_f(), get_nir_src(instr->src[0]),
3651 assert(nir_src_as_uint(instr->src[0]) == 0);
3676 if (nir_src_is_const(instr->src[0])) {
3677 unsigned msg_data = nir_src_as_uint(instr->src[0]) << 4;
3682 fs_reg(), /* src */
3686 const fs_reg sample_src = retype(get_nir_src(instr->src[0]),
3689 if (nir_src_is_always_uniform(instr->src[0])) {
3697 fs_reg(), /* src */
3725 fs_reg(), /* src */
3743 nir_const_value *const_offset = nir_src_as_const_value(instr->src[0]);
3746 assert(nir_src_bit_size(instr->src[0]) == 32);
3753 fs_reg(), /* src */
3757 fs_reg src = retype(get_nir_src(instr->src[0]), BRW_REGISTER_TYPE_D);
3762 src,
3774 assert(instr->src[0].ssa &&
3775 instr->src[0].ssa->parent_instr->type == nir_instr_type_intrinsic);
3777 nir_instr_as_intrinsic(instr->src[0].ssa->parent_instr);
3786 dst_xy = retype(get_nir_src(instr->src[0]), BRW_REGISTER_TYPE_F);
3907 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[0]);
3941 const unsigned bit_size = nir_src_bit_size(instr->src[0]);
3944 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
3951 fs_reg data = get_nir_src(instr->src[0]);
3954 assert(nir_src_bit_size(instr->src[0]) <= 32);
3958 if (nir_src_bit_size(instr->src[0]) == 32 &&
3960 assert(nir_src_num_components(instr->src[0]) <= 4);
3966 assert(nir_src_num_components(instr->src[0]) == 1);
4129 fs_reg image = retype(get_nir_src_imm(instr->src[0]), BRW_REGISTER_TYPE_UD);
4139 /* SSBO stores are weird in that their index is in src[1] */
4143 const unsigned src = is_store ? 1 : 0;
4145 if (nir_src_is_const(instr->src[src])) {
4146 return brw_imm_ud(nir_src_as_uint(instr->src[src]));
4148 return bld.emit_uniformize(get_nir_src(instr->src[src]));
4346 bld.emit_uniformize(get_nir_src(instr->src[0]));
4350 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
4366 srcs[SURFACE_LOGICAL_SRC_DATA] = get_nir_src(instr->src[3]);
4382 data = get_nir_src(instr->src[3]);
4385 fs_reg sources[2] = { data, get_nir_src(instr->src[4]) };
4408 fs_reg image = retype(get_nir_src_imm(instr->src[0]),
4412 assert(nir_src_as_uint(instr->src[1]) == 0);
4444 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
4460 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
4461 srcs[SURFACE_LOGICAL_SRC_DATA] = get_nir_src(instr->src[2]);
4698 fs_reg src(UNIFORM, instr->const_index[0] / 4, dest.type);
4700 if (nir_src_is_const(instr->src[0])) {
4701 unsigned load_offset = nir_src_as_uint(instr->src[0]);
4706 src.offset = load_offset + instr->const_index[0] % 4;
4709 bld.MOV(offset(dest, bld, j), offset(src, bld, j));
4712 fs_reg indirect = retype(get_nir_src(instr->src[0]),
4731 offset(dest, bld, j), offset(src, bld, j),
4746 subscript(offset(src, bld, j), BRW_REGISTER_TYPE_UD, i),
4757 if (nir_src_is_const(instr->src[0])) {
4758 const unsigned index = nir_src_as_uint(instr->src[0]);
4766 bld.MOV(surf_index, get_nir_src(instr->src[0]));
4770 if (!nir_src_is_const(instr->src[1])) {
4771 fs_reg base_offset = retype(get_nir_src(instr->src[1]),
4790 const unsigned load_offset = nir_src_as_uint(instr->src[1]);
4793 if (nir_src_is_const(instr->src[0])) {
4794 const unsigned ubo_block = nir_src_as_uint(instr->src[0]);
4855 srcs[A64_LOGICAL_ADDRESS] = get_nir_src(instr->src[0]);
4888 assert(nir_src_bit_size(instr->src[0]) <= 32);
4894 srcs[A64_LOGICAL_ADDRESS] = get_nir_src(instr->src[1]);
4898 if (nir_src_bit_size(instr->src[0]) == 32 &&
4900 assert(nir_src_num_components(instr->src[0]) <= 4);
4902 srcs[A64_LOGICAL_SRC] = get_nir_src(instr->src[0]); /* Data */
4908 assert(nir_src_num_components(instr->src[0]) == 1);
4909 const unsigned bit_size = nir_src_bit_size(instr->src[0]);
4913 bld.MOV(tmp, retype(get_nir_src(instr->src[0]), data_type));
4916 srcs[A64_LOGICAL_ARG] = brw_imm_ud(nir_src_bit_size(instr->src[0]));
4950 bool is_pred_const = nir_src_is_const(instr->src[1]);
4951 if (is_pred_const && nir_src_as_uint(instr->src[1]) == 0) {
4958 fs_reg addr = bld.emit_uniformize(get_nir_src(instr->src[0]));
4967 fs_reg pred = bld.emit_uniformize(get_nir_src(instr->src[1]));
5013 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
5046 const unsigned bit_size = nir_src_bit_size(instr->src[0]);
5050 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[2]);
5054 fs_reg data = get_nir_src(instr->src[0]);
5057 assert(nir_src_bit_size(instr->src[0]) <= 32);
5061 if (nir_src_bit_size(instr->src[0]) == 32 &&
5063 assert(nir_src_num_components(instr->src[0]) <= 4);
5069 assert(nir_src_num_components(instr->src[0]) == 1);
5082 assert(nir_src_bit_size(instr->src[0]) == 32);
5083 fs_reg src = get_nir_src(instr->src[0]);
5085 unsigned store_offset = nir_src_as_uint(instr->src[1]);
5090 4 * store_offset), src.type);
5093 offset(src, bld, j));
5118 assert(nir_src_num_components(instr->src[0]) == 1);
5119 unsigned ssbo_index = nir_src_is_const(instr->src[0]) ?
5120 nir_src_as_uint(instr->src[0]) : 0;
5199 const fs_reg nir_addr = get_nir_src(instr->src[0]);
5245 assert(nir_src_num_components(instr->src[0]) == 1);
5246 const unsigned bit_size = nir_src_bit_size(instr->src[0]);
5272 const fs_reg nir_addr = get_nir_src(instr->src[1]);
5274 fs_reg data = get_nir_src(instr->src[0]);
5277 assert(nir_src_num_components(instr->src[0]) == 1);
5278 assert(nir_src_bit_size(instr->src[0]) <= 32);
5281 if (nir_src_bit_size(instr->src[0]) == 32 &&
5350 bld.CMP(bld.null_reg_d(), get_nir_src(instr->src[0]), brw_imm_d(0), BRW_CONDITIONAL_NZ);
5382 bld.CMP(bld.null_reg_d(), get_nir_src(instr->src[0]), brw_imm_d(0), BRW_CONDITIONAL_NZ);
5402 fs_reg value = get_nir_src(instr->src[0]);
5404 const unsigned bit_size = nir_src_bit_size(instr->src[0]);
5443 const fs_reg value = retype(get_nir_src(instr->src[0]),
5468 const fs_reg value = get_nir_src(instr->src[0]);
5469 const fs_reg invocation = get_nir_src(instr->src[1]);
5480 const fs_reg value = get_nir_src(instr->src[0]);
5486 const fs_reg value = get_nir_src(instr->src[0]);
5487 const fs_reg index = get_nir_src(instr->src[1]);
5510 const fs_reg value = get_nir_src(instr->src[0]);
5511 const unsigned index = nir_src_as_uint(instr->src[1]);
5519 const fs_reg value = get_nir_src(instr->src[0]);
5527 assert(nir_src_bit_size(instr->src[0]) == 32);
5549 const fs_reg value = get_nir_src(instr->src[0]);
5550 if (nir_src_bit_size(instr->src[0]) == 32) {
5570 const fs_reg value = get_nir_src(instr->src[0]);
5571 if (nir_src_bit_size(instr->src[0]) == 32) {
5591 fs_reg src = get_nir_src(instr->src[0]);
5598 src.type = brw_type_for_nir_type(devinfo,
5600 nir_src_bit_size(instr->src[0])));
5602 fs_reg identity = brw_nir_reduction_op_identity(bld, redop, src.type);
5609 fs_reg scan = bld.vgrf(src.type);
5610 bld.exec_all().emit(SHADER_OPCODE_SEL_EXEC, scan, src, identity);
5614 dest.type = src.type;
5615 if (cluster_size * type_sz(src.type) >= REG_SIZE * 2) {
5621 assert((cluster_size * type_sz(src.type)) % (REG_SIZE * 2) == 0);
5623 (dispatch_width * type_sz(src.type)) / (REG_SIZE * 2);
5640 fs_reg src = get_nir_src(instr->src[0]);
5644 src.type = brw_type_for_nir_type(devinfo,
5646 nir_src_bit_size(instr->src[0])));
5648 fs_reg identity = brw_nir_reduction_op_identity(bld, redop, src.type);
5655 fs_reg scan = bld.vgrf(src.type);
5657 allbld.emit(SHADER_OPCODE_SEL_EXEC, scan, src, identity);
5664 fs_reg shifted = bld.vgrf(src.type);
5675 bld.MOV(retype(dest, src.type), scan);
5682 fs_reg address = bld.emit_uniformize(get_nir_src(instr->src[0]));
5716 assert(nir_src_bit_size(instr->src[0]) == 32);
5718 fs_reg address = bld.emit_uniformize(get_nir_src(instr->src[1]));
5719 fs_reg src = get_nir_src(instr->src[0]);
5734 srcs[A64_LOGICAL_SRC] = retype(byte_offset(src, written * 4),
5758 fs_reg address = bld.emit_uniformize(get_nir_src(instr->src[is_ssbo ? 1 : 0]));
5794 assert(nir_src_bit_size(instr->src[0]) == 32);
5799 fs_reg address = bld.emit_uniformize(get_nir_src(instr->src[is_ssbo ? 2 : 1]));
5800 fs_reg src = get_nir_src(instr->src[0]);
5820 retype(byte_offset(src, written * 4), BRW_REGISTER_TYPE_UD);
5936 bld.emit_uniformize(get_nir_src(instr->src[0])),
5937 get_nir_src(instr->src[1]));
5965 fs_reg globals = get_nir_src(instr->src[0]);
5967 srcs[RT_LOGICAL_SRC_BVH_LEVEL] = get_nir_src(instr->src[1]);
5968 srcs[RT_LOGICAL_SRC_TRACE_RAY_CONTROL] = get_nir_src(instr->src[2]);
6013 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
6020 data = get_nir_src(instr->src[2]);
6024 fs_reg sources[2] = { data, get_nir_src(instr->src[3]) };
6046 srcs[SURFACE_LOGICAL_SRC_ADDRESS] = get_nir_src(instr->src[1]);
6051 fs_reg data = get_nir_src(instr->src[2]);
6054 fs_reg sources[2] = { data, get_nir_src(instr->src[3]) };
6082 data = get_nir_src(instr->src[1]);
6085 fs_reg sources[2] = { data, get_nir_src(instr->src[2]) };
6092 if (nir_src_is_const(instr->src[0])) {
6094 brw_imm_ud(instr->const_index[0] + nir_src_as_uint(instr->src[0]));
6098 retype(get_nir_src(instr->src[0]), BRW_REGISTER_TYPE_UD),
6122 fs_reg data = get_nir_src(instr->src[1]);
6125 fs_reg sources[2] = { data, get_nir_src(instr->src[2]) };
6132 if (nir_src_is_const(instr->src[0])) {
6134 brw_imm_ud(instr->const_index[0] + nir_src_as_uint(instr->src[0]));
6138 retype(get_nir_src(instr->src[0]), BRW_REGISTER_TYPE_UD),
6149 expand_to_32bit(const fs_builder &bld, const fs_reg &src)
6151 if (type_sz(src.type) == 2) {
6153 bld.MOV(src32, retype(src, BRW_REGISTER_TYPE_UW));
6156 return src;
6168 fs_reg addr = get_nir_src(instr->src[0]);
6172 data = expand_to_32bit(bld, get_nir_src(instr->src[1]));
6178 expand_to_32bit(bld, get_nir_src(instr->src[2]))
6218 fs_reg addr = get_nir_src(instr->src[0]);
6221 fs_reg data = expand_to_32bit(bld, get_nir_src(instr->src[1]));
6227 expand_to_32bit(bld, get_nir_src(instr->src[2]))
6279 fs_reg src = get_nir_src(instr->src[i].src);
6280 switch (instr->src[i].src_type) {
6283 retype(get_nir_src_imm(instr->src[i].src), BRW_REGISTER_TYPE_F);
6286 srcs[TEX_LOGICAL_SRC_SHADOW_C] = retype(src, BRW_REGISTER_TYPE_F);
6294 srcs[TEX_LOGICAL_SRC_COORDINATE] = retype(src, BRW_REGISTER_TYPE_D);
6297 srcs[TEX_LOGICAL_SRC_COORDINATE] = retype(src, BRW_REGISTER_TYPE_F);
6309 srcs[TEX_LOGICAL_SRC_LOD] = retype(src, BRW_REGISTER_TYPE_F);
6313 srcs[TEX_LOGICAL_SRC_LOD2] = retype(src, BRW_REGISTER_TYPE_F);
6319 retype(get_nir_src_imm(instr->src[i].src), BRW_REGISTER_TYPE_UD);
6323 retype(get_nir_src_imm(instr->src[i].src), BRW_REGISTER_TYPE_D);
6327 retype(get_nir_src_imm(instr->src[i].src), BRW_REGISTER_TYPE_F);
6333 retype(get_nir_src_imm(instr->src[i].src), BRW_REGISTER_TYPE_F);
6336 srcs[TEX_LOGICAL_SRC_SAMPLE_INDEX] = retype(src, BRW_REGISTER_TYPE_UD);
6350 retype(src, BRW_REGISTER_TYPE_D);
6361 bld.ADD(tmp, src, brw_imm_ud(texture));
6369 bld.ADD(tmp, src, brw_imm_ud(sampler));
6377 srcs[TEX_LOGICAL_SRC_SURFACE_HANDLE] = bld.emit_uniformize(src);
6383 srcs[TEX_LOGICAL_SRC_SAMPLER_HANDLE] = bld.emit_uniformize(src);
6388 srcs[TEX_LOGICAL_SRC_MCS] = retype(src, BRW_REGISTER_TYPE_D);
6615 const fs_reg &src,
6619 if (type_sz(src.type) == type_sz(dst.type)) {
6622 offset(src, bld, first_component),
6623 type_sz(src.type) * bld.dispatch_width() * components));
6625 bld.MOV(retype(offset(dst, bld, i), src.type),
6626 offset(src, bld, i + first_component));
6628 } else if (type_sz(src.type) < type_sz(dst.type)) {
6630 unsigned size_ratio = type_sz(dst.type) / type_sz(src.type);
6634 offset(src, bld, first_component),
6635 type_sz(src.type) * bld.dispatch_width() * components));
6638 brw_reg_type_from_bit_size(8 * type_sz(src.type),
6645 retype(offset(src, bld, i + first_component), shuffle_type));
6649 unsigned size_ratio = type_sz(src.type) / type_sz(dst.type);
6652 offset(src, bld, first_component / size_ratio),
6653 type_sz(src.type) * bld.dispatch_width() *
6662 subscript(offset(src, bld, (first_component + i) / size_ratio),
6673 const fs_reg &src,
6677 assert(type_sz(src.type) == 4);
6688 shuffle_src_to_dst(bld, dst, src, first_component, components);