Lines Matching refs:add
114 bool read_dreg = now->add && bi_opcode_props[now->add->op].sr_read;
115 bool write_dreg = prev->add && bi_opcode_props[prev->add->op].sr_write;
123 if (now->add) {
124 bi_foreach_src(now->add, src) {
128 if (now->add->op == BI_OPCODE_BLEND && src == 4)
132 bi_assign_slot_read(&now->regs, (now->add)->src[src]);
140 if (prev->add && (!write_dreg || prev->add->op == BI_OPCODE_ATEST)) {
141 bi_index idx = prev->add->dest[0];
203 * encoded by the first), so this does not add additional
329 bool sr_read = tuple->add &&
330 bi_opcode_props[(tuple->add)->op].sr_read;
339 uint64_t add = bi_pack_add(tuple->add,
340 bi_get_src_new(tuple->add, &tuple->regs, sr_read + 0),
341 bi_get_src_new(tuple->add, &tuple->regs, sr_read + 1),
342 bi_get_src_new(tuple->add, &tuple->regs, sr_read + 2),
345 if (tuple->add) {
346 bi_instr *add = tuple->add;
348 bool sr_write = bi_opcode_props[add->op].sr_write &&
349 !bi_is_null(add->dest[0]);
351 if (sr_read && !bi_is_null(add->src[0])) {
352 assert(add->src[0].type == BI_INDEX_REGISTER);
353 clause->staging_register = add->src[0].value;
356 assert(bi_is_equiv(add->src[0], add->dest[0]));
358 assert(add->dest[0].type == BI_INDEX_REGISTER);
359 clause->staging_register = add->dest[0].value;
364 .lo = reg | (fma << 35) | ((add & 0b111111) << 58),
365 .hi = add >> 6
649 bi_instr *add = clause->tuples[i].add;
654 if (add && add->op == BI_OPCODE_CLPER_OLD_I32)
656 else if (add && add->op == BI_OPCODE_CLPER_I32)
720 const bi_instr *ins = tuple->add;