Lines Matching defs:block
68 /* We want to evaluate each block from the position of any other
69 * predecessor block, in order that the flags set are the union of
74 * that we can't simply recursively process each predecessor block
75 * before legalizing the current block.
78 * results converge. If the output state of a given block changes
84 legalize_block(struct ir3_legalize_ctx *ctx, struct ir3_block *block)
86 struct ir3_legalize_block_data *bd = block->data;
100 for (unsigned i = 0; i < block->predecessors_count; i++) {
101 struct ir3_block *predecessor = block->predecessors[i];
117 for (unsigned i = 0; i < block->physical_predecessors_count; i++) {
118 struct ir3_block *predecessor = block->physical_predecessors[i];
127 foreach_instr (n, &block->instr_list) {
135 /* Either inputs are in the first block or we expect inputs to be released
139 block == ir3_after_preamble(block->shader));
144 list_replace(&block->instr_list, &instr_list);
145 list_inithead(&block->instr_list);
227 nop = ir3_NOP(block);
233 if (list_is_empty(&block->instr_list) && (opc_cat(n->opc) >= 5))
234 ir3_NOP(block);
251 list_addtail(&n->node, &block->instr_list);
268 ir3_NOP(block)->flags |= IR3_INSTR_SS;
317 baryf = ir3_instr_create(block, OPC_BARY_F, 1, 2);
347 baryf = ir3_instr_create(block, OPC_BARY_F, 1, 2);
354 list_add(&baryf->node, &block->instr_list);
363 for (unsigned i = 0; i < ARRAY_SIZE(block->successors); i++) {
364 if (!block->successors[i])
366 struct ir3_legalize_block_data *pbd = block->successors[i]->data;
384 apply_fine_deriv_macro(struct ir3_legalize_ctx *ctx, struct ir3_block *block)
391 list_replace(&block->instr_list, &instr_list);
392 list_inithead(&block->instr_list);
395 list_addtail(&n->node, &block->instr_list);
411 * in the block. We take advantage of this as we resolve the
433 * that the first instruction in the target block is itself
446 resolve_dest_block(struct ir3_block *block)
448 /* special case for last block: */
449 if (!block->successors[0])
450 return block;
453 * in the target block yet, so conditions to resolve
454 * the dest to the dest block's successor are:
457 * (2) (block-is-empty || only-instr-is-jump)
459 if (block->successors[1] == NULL) {
460 if (list_is_empty(&block->instr_list)) {
461 return block->successors[0];
462 } else if (list_length(&block->instr_list) == 1) {
464 list_first_entry(&block->instr_list, struct ir3_instruction, node);
472 if (block->successors[0]->index <= block->index)
473 return block;
474 return block->successors[0];
478 return block;
487 * fall through to any of the physical successors of this block. But we can
544 struct ir3_block *cur_block = instr->block;
586 foreach_block (block, &ir->block_list)
587 block->index = index++;
589 foreach_block (block, &ir->block_list) {
590 foreach_instr (instr, &block->instr_list) {
598 /* Exit early if we deleted a block to avoid iterator
606 /* Detect the case where the block ends either with:
607 * - A single unconditional jump to the next block.
609 * them jumps to the next block.
610 * We can remove the one that jumps to the next block in either case.
612 if (list_is_empty(&block->instr_list))
617 list_last_entry(&block->instr_list, struct ir3_instruction, node);
618 if (!list_is_singular(&block->instr_list))
632 if (&tblock->node == block->node.next) {
646 foreach_block (block, &ir->block_list)
647 foreach_instr (instr, &block->instr_list)
657 mark_jp(struct ir3_block *block)
659 /* We only call this on the end block (in kill_sched) or after retargeting
663 assert(!list_is_empty(&block->instr_list));
666 list_first_entry(&block->instr_list, struct ir3_instruction, node);
680 foreach_block (block, &ir->block_list) {
682 * our block. This happens if there is a predecessor to our block that may
683 * fallthrough to an earlier block in the physical CFG, either because it
685 * fallthrough for an block in-between that also starts with (jp) and was
688 for (unsigned i = 0; i < block->predecessors_count; i++) {
689 struct ir3_block *pred = block->predecessors[i];
693 pred->physical_successors[j]->start_ip < block->start_ip)
694 mark_jp(block);
696 /* If the predecessor just falls through to this block, we still
697 * need to check if it "falls through" by jumping to the block. This
698 * can happen if opt_jump fails and the block ends in two branches,
700 * with binning shaders after dead-code elimination) and the block
701 * before ends with a conditional branch directly to this block.
703 if (pred->physical_successors[j] == block) {
707 if (instr->cat0.target == block) {
708 mark_jp(block);
720 * block immediately follows the current block (ie. so no jump required),
723 * TODO what ensures that the last write to p0.x in a block is the
729 foreach_block (block, &ir->block_list) {
730 if (block->successors[1]) {
734 if (block->brtype == IR3_BRANCH_GETONE ||
735 block->brtype == IR3_BRANCH_SHPS) {
740 assert(!block->condition);
741 if (block->brtype == IR3_BRANCH_GETONE)
742 br1 = ir3_GETONE(block);
744 br1 = ir3_SHPS(block);
745 br1->cat0.target = block->successors[1];
747 br2 = ir3_JUMP(block);
748 br2->cat0.target = block->successors[0];
750 assert(block->condition);
752 /* create "else" branch first (since "then" block should
755 br1 = ir3_instr_create(block, OPC_B, 0, 1);
757 block->condition->dsts[0];
759 br1->cat0.target = block->successors[1];
762 br2 = ir3_instr_create(block, OPC_B, 0, 1);
764 block->condition->dsts[0];
765 br2->cat0.target = block->successors[0];
767 switch (block->brtype) {
784 } else if (block->successors[0]) {
785 /* otherwise unconditional jump to next block: */
788 jmp = ir3_JUMP(block);
789 jmp->cat0.target = block->successors[0];
804 * block.
814 /* True if we know that this block will always eventually lead to the end
815 * block:
822 foreach_block_rev (block, &ir->block_list) {
823 for (unsigned i = 0; i < 2 && block->successors[i]; i++) {
824 if (block->successors[i]->start_ip <= block->end_ip)
831 foreach_instr_safe (instr, &block->instr_list) {
835 struct ir3_instruction *br = ir3_instr_create(block, OPC_B, 0, 1);
866 foreach_block (block, &ir->block_list) {
873 list_replace(&block->instr_list, &instr_list);
874 list_inithead(&block->instr_list);
877 unsigned delay = ir3_delay_calc(block, instr, so->mergedregs);
902 ir3_NOP(block)->repeat = delay - 1;
905 list_addtail(&instr->node, &block->instr_list);
923 /* allocate per-block data: */
924 foreach_block (block, &ir->block_list) {
932 block->data = bd;
935 /* We may have failed to pull all input loads into the first block.
942 foreach_block (block, &ir->block_list) {
943 foreach_instr (instr, &block->instr_list) {
946 if (block != start_block) {
956 /* process each block: */
959 foreach_block (block, &ir->block_list) {
960 progress |= legalize_block(ctx, block);
970 foreach_block (block, &ir->block_list) {
971 progress |= apply_fine_deriv_macro(ctx, block);