Lines Matching defs:offset

745 parse_base_offset(opt_ctx& ctx, Instruction* instr, unsigned op_index, Temp* base, uint32_t* offset,
791 *offset = add_instr->operands[i].constantValue() * (uint32_t)(is_sub ? -1 : 1);
794 *offset = ctx.info[add_instr->operands[i].tempId()].val * (uint32_t)(is_sub ? -1 : 1);
803 *offset += offset2;
819 /* We don't need to check the constant offset because the address seems to be calculated with
820 * (offset&-4 + const_offset&-4), not (offset+const_offset)&-4.
842 /* skip &-4 before offset additions: load((a + 16) & -4, 0) */
852 uint32_t offset;
859 } else if (parse_base_offset(ctx, instr.get(), 1, &base, &offset, prevent_overflow) &&
860 base.regClass() == s1 && offset <= 0xFFFFF && ctx.program->gfx_level >= GFX9 &&
861 offset % 4u == 0) {
866 smem.operands[1] = Operand::c32(offset);
873 new_instr->operands[1] = Operand::c32(offset);
889 /* skip &-4 after offset additions: load(a & -4, 16) */
1029 unsigned offset = instr->operands[1].constantValue() * size;
1031 return SubdwordSel(size, offset, sext);
1036 unsigned offset = instr->operands[1].constantValue() * size;
1038 return SubdwordSel(size, offset, false);
1055 unsigned offset = instr->operands[1].constantValue() * size;
1056 return SubdwordSel(size, offset, false);
1089 /* the outer offset must be within extracted range */
1090 if (instrSel.offset() >= sel.size())
1121 switch (sel.offset()) {
1128 sel.offset() == 0 &&
1138 if (sel.offset())
1144 unsigned offset = sel.offset() + instrSel.offset();
1148 instr->operands[1] = Operand::c32(offset / size);
1254 is_scratch_offset_valid(opt_ctx& ctx, Instruction* instr, int32_t offset)
1261 if (negative_unaligned_scratch_offset_bug && has_vgpr_offset && offset < 0 && offset % 4)
1264 return offset >= min && offset <= max;
1411 uint32_t offset;
1424 mubuf.offset + info.val < 4096) {
1427 mubuf.offset += info.val;
1430 } else if (i == 2 && info.is_constant_or_literal(32) && mubuf.offset + info.val < 4096) {
1432 mubuf.offset += info.val;
1435 parse_base_offset(ctx, instr.get(), i, &base, &offset,
1437 base.regClass() == v1 && mubuf.offset + offset < 4096) {
1440 mubuf.offset += offset;
1442 } else if (i == 2 && parse_base_offset(ctx, instr.get(), i, &base, &offset, true) &&
1443 base.regClass() == s1 && mubuf.offset + offset < 4096) {
1445 mubuf.offset += offset;
1454 uint32_t offset;
1458 if (i <= 1 && parse_base_offset(ctx, instr.get(), i, &base, &offset, false) &&
1460 is_scratch_offset_valid(ctx, instr.get(), scratch.offset + (int32_t)offset)) {
1462 scratch.offset += (int32_t)offset;
1466 is_scratch_offset_valid(ctx, NULL, scratch.offset + (int32_t)info.val)) {
1469 scratch.offset += (int32_t)info.val;
1479 uint32_t offset;
1482 parse_base_offset(ctx, instr.get(), i, &base, &offset, false) &&
1504 if ((offset & mask) == 0 && ds.offset0 + (offset >> shifts) <= 255 &&
1505 ds.offset1 + (offset >> shifts) <= 255) {
1507 ds.offset0 += offset >> shifts;
1508 ds.offset1 += offset >> shifts;
1511 if (ds.offset0 + offset <= 65535) {
1513 ds.offset0 += offset;
1570 unsigned offset = 0;
1573 bool aligned = offset % 4 == 0 || op.bytes() < 4;
1574 offset += op.bytes();
1673 unsigned offset = 0;
1676 if (offset < dst_offset) {
1677 offset += op.bytes();
1679 } else if (offset != dst_offset || op.bytes() != instr->definitions[0].bytes()) {
3642 vop3p->opsel_lo |= (instr->isSDWA() && instr->sdwa().sel[i].offset()) << (is_add + i);
3743 if (conv->isSDWA() && conv->sdwa().sel[0].offset() == 2)
4311 for (unsigned i = 0, offset = 0; i < instr->definitions.size();
4312 offset += instr->definitions[i++].bytes()) {
4316 split_offset = offset;