Lines Matching defs:fs_inst

49                                        const fs_inst *inst);
52 fs_inst::init(enum opcode opcode, uint8_t exec_size, const fs_reg &dst,
93 fs_inst::fs_inst()
98 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_size)
103 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_size, const fs_reg &dst)
108 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_size, const fs_reg &dst,
115 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_size, const fs_reg &dst,
122 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_size, const fs_reg &dst,
129 fs_inst::fs_inst(enum opcode opcode, uint8_t exec_width, const fs_reg &dst,
135 fs_inst::fs_inst(const fs_inst &that)
145 fs_inst::~fs_inst()
151 fs_inst::resize_sources(uint8_t num_sources)
194 fs_inst *inst = bld.emit(FS_OPCODE_VARYING_PULL_CONSTANT_LOAD_LOGICAL,
220 fs_inst::is_send_from_grf() const
245 fs_inst::is_control_source(unsigned arg) const
291 fs_inst::is_payload(unsigned arg) const
343 fs_inst::has_source_and_destination_hazard() const
421 fs_inst::can_do_source_mods(const struct intel_device_info *devinfo) const
454 fs_inst::can_do_cmod()
473 fs_inst::can_change_types() const
643 fs_inst::is_partial_write() const
652 fs_inst::components_read(unsigned i) const
880 fs_inst::size_read(int arg) const
988 flag_mask(const fs_inst *inst, unsigned width)
1017 fs_inst::flags_read(const intel_device_info *devinfo) const
1038 fs_inst::flags_written(const intel_device_info *devinfo) const
1066 fs_inst::implied_mrf_writes() const
1530 fs_inst *inst;
1533 foreach_in_list_reverse(fs_inst, prev, &this->instructions) {
1643 fs_inst *send = ubld.group(send_width, 0).emit(SHADER_OPCODE_SEND,
1661 foreach_block_and_inst(block, fs_inst, inst, cfg) {
1978 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2013 fs_visitor::convert_attr_sources_to_hw_regs(fs_inst *inst)
2066 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2077 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2092 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2107 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2156 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2172 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2249 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
2330 foreach_block_and_inst(block, const fs_inst, inst, cfg) {
2359 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2482 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
2535 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
2845 foreach_block_and_inst(block, fs_inst, inst, cfg) {
2849 fs_inst *load_payload = (fs_inst *) inst->prev;
2905 foreach_block_and_inst_safe(block, fs_inst, send, cfg) {
2918 fs_inst *lp = (fs_inst *) send->prev;
2943 fs_inst *lp2 =
2978 foreach_block_and_inst(block, fs_inst, inst, cfg) {
3046 fs_inst *halt_target = NULL;
3048 foreach_block_and_inst(block, fs_inst, inst, cfg) {
3065 for (fs_inst *prev = (fs_inst *) halt_target->prev;
3067 prev = (fs_inst *) halt_target->prev) {
3112 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
3138 foreach_inst_in_block_reverse_starting_from(fs_inst, scan_inst, inst) {
3229 foreach_inst_in_block_reverse_starting_from(fs_inst, scan_inst, inst) {
3297 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
3347 fs_inst *mov;
3363 fs_inst *write = NULL;
3417 fs_inst *last_mrf_move[BRW_MAX_MRF(devinfo->ver)];
3426 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
3433 fs_inst *prev_inst = last_mrf_move[inst->dst.nr];
3514 foreach_inst_in_block_safe (fs_inst, inst, block) {
3535 clear_deps_for_inst_src(fs_inst *inst, bool *deps, int first_grf, int grf_len)
3573 fs_inst *inst)
3590 foreach_inst_in_block_reverse_starting_from(fs_inst, scan_inst, inst) {
3644 fs_visitor::insert_gfx4_post_send_dependency_workarounds(bblock_t *block, fs_inst *inst)
3656 foreach_inst_in_block_starting_from(fs_inst, scan_inst, inst) {
3701 foreach_block_and_inst(block, fs_inst, inst, cfg) {
3732 foreach_block_and_inst (block, fs_inst, inst, cfg) {
3818 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
3922 fs_visitor::lower_mul_dword_inst(fs_inst *inst, bblock_t *block)
4065 fs_visitor::lower_mul_qword_inst(fs_inst *inst, bblock_t *block)
4095 fs_inst *mul = ibld.MUL(acc,
4128 fs_visitor::lower_mulh_inst(fs_inst *inst, bblock_t *block)
4147 fs_inst *mul = ibld.MUL(acc, inst->src[0], inst->src[1]);
4148 fs_inst *mach = ibld.MACH(inst->dst, inst->src[0], inst->src[1]);
4197 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
4249 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
4285 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
4322 fs_inst *add = ibld.ADD(inst->dst, acc, inst->src[0]);
4332 fs_inst *add;
4351 fs_inst *add = ibld.ADD(inst->dst, inst->src[0], inst->src[1]);
4393 brw_fb_write_msg_control(const fs_inst *inst,
4428 brw_emit_predicate_on_sample_mask(const fs_builder &bld, fs_inst *inst)
4479 fs_inst *mov = b.MOV(offset(result, b, i), brw_imm_ud(~0));
4491 is_mixed_float_with_fp32_dst(const fs_inst *inst)
4511 is_mixed_float_with_packed_fp16_dst(const fs_inst *inst)
4549 const fs_inst *inst)
4748 const fs_inst *inst)
4809 const fs_inst *inst)
5125 needs_src_copy(const fs_builder &lbld, const fs_inst *inst, unsigned i)
5140 emit_unzip(const fs_builder &lbld, fs_inst *inst, unsigned i)
5182 needs_dst_copy(const fs_builder &lbld, const fs_inst *inst)
5230 fs_inst *inst)
5285 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
5360 fs_inst split_inst = *inst;
5417 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
5448 fs_inst *mov = ibld.at(block, inst->next).group(8, g)
5478 lower_derivative(fs_visitor *v, bblock_t *block, fs_inst *inst,
5508 foreach_block_and_inst(block, fs_inst, inst, cfg) {
5546 foreach_block_and_inst_safe(block, fs_inst, inst, cfg) {
5651 const fs_inst *inst = (const fs_inst *)be_inst;
6215 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
6257 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
6271 needs_dummy_fence(const intel_device_info *devinfo, fs_inst *inst)
6306 foreach_block_and_inst_safe (block, fs_inst, inst, cfg) {
6320 fs_inst *dummy_fence = ubld.emit(SHADER_OPCODE_MEMORY_FENCE,
6345 static const fs_inst *
6348 foreach_block_and_inst(block, fs_inst, inst, v->cfg) {
6378 const fs_inst *halt_start = find_halt_control_flow_region_start(this);
6392 foreach_inst_in_block_reverse_safe(fs_inst, inst, block) {
6504 * of fs_inst *. This way, we can reset it between scheduling passes to
6508 fs_inst **inst_arr = ralloc_array(mem_ctx, fs_inst *, num_insts);
6511 foreach_block_and_inst(block, fs_inst, inst, cfg) {
6751 fs_inst *inst = bld.emit(SHADER_OPCODE_URB_WRITE_LOGICAL,