1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_INST_H
17 #define COMPILER_OPTIMIZER_IR_INST_H
18
19 #include <array>
20 #include <vector>
21 #include <iostream>
22 #include "constants.h"
23 #include "datatype.h"
24 #include "ir-dyn-base-types.h"
25 #include "marker.h"
26 #include "utils/arena_containers.h"
27 #include "utils/span.h"
28 #include "utils/bit_field.h"
29 #include "utils/bit_utils.h"
30 #include "utils/bit_vector.h"
31 #include "macros.h"
32 #include "mem/arena_allocator.h"
33 #include "opcodes.h"
34 #include "compiler_options.h"
35 #include "runtime_interface.h"
36 #include "spill_fill_data.h"
37
38 namespace panda::compiler {
39 class Inst;
40 class BasicBlock;
41 class Graph;
42 class GraphVisitor;
43 class VnObject;
44 class SaveStateItem;
45 class LocationsInfo;
46 using InstVector = ArenaVector<Inst *>;
47
48 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
49 #define INST_DEF(opcode, base, ...) class base;
50 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
51 OPCODE_LIST(INST_DEF)
52 #undef INST_DEF
53
54 /*
55 * Condition code, used in Compare, If[Imm] and Select[Imm] instructions.
56 *
57 * N.B. BranchElimination and Peephole rely on the order of these codes. Change carefully.
58 */
59 enum ConditionCode {
60 // All types.
61 CC_EQ = 0, // ==
62 CC_NE, // !=
63 // Signed integers and floating-point numbers.
64 CC_LT, // <
65 CC_LE, // <=
66 CC_GT, // >
67 CC_GE, // >=
68 // Unsigned integers.
69 CC_B, // <
70 CC_BE, // <=
71 CC_A, // >
72 CC_AE, // >=
73 // Compare result of bitwise AND with zero
74 CC_TST_EQ, // (lhs AND rhs) == 0
75 CC_TST_NE, // (lhs AND rhs) != 0
76 // First and last aliases.
77 CC_FIRST = CC_EQ,
78 CC_LAST = CC_TST_NE,
79 };
80
GetInverseConditionCode(ConditionCode code)81 inline ConditionCode GetInverseConditionCode(ConditionCode code)
82 {
83 switch (code) {
84 case ConditionCode::CC_EQ:
85 return ConditionCode::CC_NE;
86 case ConditionCode::CC_NE:
87 return ConditionCode::CC_EQ;
88 default:
89 UNREACHABLE();
90 }
91 }
92
InverseSignednessConditionCode(ConditionCode code)93 inline ConditionCode InverseSignednessConditionCode(ConditionCode code)
94 {
95 switch (code) {
96 case ConditionCode::CC_EQ:
97 return ConditionCode::CC_EQ;
98 case ConditionCode::CC_NE:
99 return ConditionCode::CC_NE;
100
101 case ConditionCode::CC_LT:
102 return ConditionCode::CC_B;
103 case ConditionCode::CC_LE:
104 return ConditionCode::CC_BE;
105 case ConditionCode::CC_GT:
106 return ConditionCode::CC_A;
107 case ConditionCode::CC_GE:
108 return ConditionCode::CC_AE;
109
110 case ConditionCode::CC_B:
111 return ConditionCode::CC_LT;
112 case ConditionCode::CC_BE:
113 return ConditionCode::CC_LE;
114 case ConditionCode::CC_A:
115 return ConditionCode::CC_GT;
116 case ConditionCode::CC_AE:
117 return ConditionCode::CC_GE;
118
119 case ConditionCode::CC_TST_EQ:
120 return ConditionCode::CC_TST_EQ;
121 case ConditionCode::CC_TST_NE:
122 return ConditionCode::CC_TST_NE;
123
124 default:
125 UNREACHABLE();
126 }
127 }
128
IsSignedConditionCode(ConditionCode code)129 inline bool IsSignedConditionCode(ConditionCode code)
130 {
131 switch (code) {
132 case ConditionCode::CC_LT:
133 case ConditionCode::CC_LE:
134 case ConditionCode::CC_GT:
135 case ConditionCode::CC_GE:
136 return true;
137
138 case ConditionCode::CC_EQ:
139 case ConditionCode::CC_NE:
140 case ConditionCode::CC_B:
141 case ConditionCode::CC_BE:
142 case ConditionCode::CC_A:
143 case ConditionCode::CC_AE:
144 case ConditionCode::CC_TST_EQ:
145 case ConditionCode::CC_TST_NE:
146 return false;
147
148 default:
149 UNREACHABLE();
150 }
151 }
152
SwapOperandsConditionCode(ConditionCode code)153 inline ConditionCode SwapOperandsConditionCode(ConditionCode code)
154 {
155 switch (code) {
156 case ConditionCode::CC_EQ:
157 case ConditionCode::CC_NE:
158 return code;
159 default:
160 UNREACHABLE();
161 }
162 }
163
164 enum class Opcode {
165 INVALID = -1,
166 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
167 #define INST_DEF(opcode, ...) opcode,
168 OPCODE_LIST(INST_DEF)
169
170 #undef INST_DEF
171 NUM_OPCODES
172 };
173
174 /**
175 * Convert opcode to its string representation
176 */
177 constexpr std::array<const char *const, static_cast<size_t>(Opcode::NUM_OPCODES)> OPCODE_NAMES = {
178 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
179 #define INST_DEF(opcode, ...) #opcode,
180 OPCODE_LIST(INST_DEF)
181 #undef INST_DEF
182 };
183
GetOpcodeString(Opcode opc)184 constexpr const char *GetOpcodeString(Opcode opc)
185 {
186 ASSERT(static_cast<int>(opc) < static_cast<int>(Opcode::NUM_OPCODES));
187 return OPCODE_NAMES[static_cast<int>(opc)];
188 }
189
190 /**
191 * Instruction flags. See `instrutions.yaml` section `flags` for more information.
192 */
193 namespace inst_flags {
194 namespace internal {
195 enum FlagsIndex {
196 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
197 #define FLAG_DEF(flag) flag##_INDEX,
198 FLAGS_LIST(FLAG_DEF)
199 #undef FLAG_DEF
200 FLAGS_COUNT
201 };
202 } // namespace internal
203
204 enum Flags : uint32_t {
205 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
206 #define FLAG_DEF(flag) flag = (1U << internal::flag##_INDEX),
207 FLAGS_LIST(FLAG_DEF)
208 #undef FLAG_DEF
209 FLAGS_COUNT = internal::FLAGS_COUNT,
210 NONE = 0
211 };
212
GetFlagsMask(Opcode opcode)213 inline constexpr uintptr_t GetFlagsMask(Opcode opcode)
214 {
215 #define INST_DEF(OPCODE, BASE, FLAGS) FLAGS, // NOLINT(cppcoreguidelines-macro-usage)
216 // NOLINTNEXTLINE(hicpp-signed-bitwise)
217 constexpr std::array<uintptr_t, static_cast<int>(Opcode::NUM_OPCODES)> INST_FLAGS_TABLE = {OPCODE_LIST(INST_DEF)};
218 #undef INST_DEF
219 return INST_FLAGS_TABLE[static_cast<size_t>(opcode)];
220 }
221 } // namespace inst_flags
222
223 #ifndef NDEBUG
224 namespace inst_modes {
225 namespace internal {
226 enum ModeIndex {
227 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
228 #define MODE_DEF(mode) mode##_INDEX,
229 MODES_LIST(MODE_DEF)
230 #undef MODE_DEF
231 MODES_COUNT
232 };
233 } // namespace internal
234
235 enum Mode : uint8_t {
236 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
237 #define MODE_DEF(mode) mode = (1U << internal::mode##_INDEX),
238 MODES_LIST(MODE_DEF)
239 #undef MODE_DEF
240 MODES_COUNT = internal::MODES_COUNT,
241 };
242
GetModesMask(Opcode opcode)243 inline constexpr uint8_t GetModesMask(Opcode opcode)
244 {
245 // NOLINTNEXTLINE(hicpp-signed-bitwise)
246 constexpr std::array<uint8_t, static_cast<int>(Opcode::NUM_OPCODES)> INST_MODES_TABLE = {INST_MODES_LIST};
247 return INST_MODES_TABLE[static_cast<size_t>(opcode)];
248 }
249 } // namespace inst_modes
250 #endif
251
252 namespace internal {
253 inline constexpr std::array<const char *, ShiftType::INVALID_SHIFT + 1> SHIFT_TYPE_NAMES = {"LSL", "LSR", "ASR", "ROR",
254 "INVALID"};
255 } // namespace internal
256
GetShiftTypeStr(ShiftType type)257 inline const char *GetShiftTypeStr(ShiftType type)
258 {
259 ASSERT(type <= INVALID_SHIFT);
260 return internal::SHIFT_TYPE_NAMES[type];
261 }
262
263 /**
264 * Describes type of the object produced by an instruction.
265 */
266 class ObjectTypeInfo {
267 public:
268 using ClassType = RuntimeInterface::ClassPtr;
269
270 ObjectTypeInfo() = default;
ObjectTypeInfo(ClassType v)271 explicit ObjectTypeInfo(ClassType v) : class_(v) {}
272
273 // NOLINTNEXTLINE(google-explicit-constructor)
operator bool() const274 operator bool() const
275 {
276 return class_ != ClassType();
277 }
278
GetClass() const279 ClassType GetClass() const
280 {
281 return class_;
282 }
283
IsValid() const284 bool IsValid() const
285 {
286 return class_ != ClassType {};
287 }
288
289 private:
290 ClassType class_ {};
291 };
292
293 /**
294 * Class for storing panda bytecode's virtual register
295 */
296 class VirtualRegister final {
297 public:
298 using ValueType = uint16_t;
299 static constexpr unsigned BITS_FOR_VREG = (sizeof(ValueType) * BITS_PER_BYTE) - 1;
300 static constexpr ValueType INVALID = std::numeric_limits<ValueType>::max();
301
302 VirtualRegister() = default;
VirtualRegister(uint16_t v, bool is_acc)303 explicit VirtualRegister(uint16_t v, bool is_acc) : value_(v)
304 {
305 IsAccFlag::Set(is_acc, &value_);
306 }
307
operator uint16_t() const308 explicit operator uint16_t() const
309 {
310 return value_;
311 }
312
Value() const313 uint16_t Value() const
314 {
315 return ValueField::Get(value_);
316 }
317
IsAccumulator() const318 bool IsAccumulator() const
319 {
320 return IsAccFlag::Get(value_);
321 }
322
323 private:
324 uint16_t value_ {INVALID};
325
326 using ValueField = BitField<unsigned, 0, BITS_FOR_VREG>;
327 using IsAccFlag = ValueField::NextFlag;
328 };
329
330 // How many bits will be used in Inst's bit fields for number of inputs.
331 constexpr size_t BITS_PER_INPUTS_NUM = 3;
332 // Maximum number of static inputs
333 constexpr size_t MAX_STATIC_INPUTS = (1U << BITS_PER_INPUTS_NUM) - 1;
334
335 /**
336 * Currently Input class is just a wrapper for the Inst class.
337 */
338 class Input final {
339 public:
340 Input() = default;
Input(Inst *inst)341 explicit Input(Inst *inst) : inst_(inst) {}
342
GetInst()343 Inst *GetInst()
344 {
345 return inst_;
346 }
GetInst() const347 const Inst *GetInst() const
348 {
349 return inst_;
350 }
351
GetPadding(Arch arch, uint32_t inputs_count)352 static inline uint8_t GetPadding(Arch arch, uint32_t inputs_count)
353 {
354 return static_cast<uint8_t>(!Is64BitsArch(arch) && inputs_count % 2U == 1U);
355 }
356
357 private:
358 Inst *inst_ {nullptr};
359 };
360
361 /**
362 * User is a intrusive list node, thus it stores pointers to next and previous users.
363 * Also user has properties value to determine owner instruction and corresponding index of the input.
364 */
365 class User final {
366 public:
367 User() = default;
User(bool is_static, unsigned index, unsigned size)368 User(bool is_static, unsigned index, unsigned size)
369 : properties_(IsStaticFlag::Encode(is_static) | IndexField::Encode(index) | SizeField::Encode(size) |
370 BbNumField::Encode(BbNumField::MaxValue()))
371 {
372 ASSERT(index < 1U << (BITS_FOR_INDEX - 1U));
373 ASSERT(size < 1U << (BITS_FOR_SIZE - 1U));
374 }
375 ~User() = default;
376
377 // Copy/move semantic is disabled because we use tricky pointer arithmetic based on 'this' value
378 NO_COPY_SEMANTIC(User);
379 NO_MOVE_SEMANTIC(User);
380
381 Inst *GetInst();
382 const Inst *GetInst() const
383 {
384 return const_cast<User *>(this)->GetInst();
385 }
386
387 Inst *GetInput();
388 const Inst *GetInput() const;
389
390 bool IsDynamic() const
391 {
392 return !IsStaticFlag::Decode(properties_);
393 }
394 unsigned GetIndex() const
395 {
396 return IndexField::Decode(properties_);
397 }
398 unsigned GetSize() const
399 {
400 return SizeField::Decode(properties_);
401 }
402
403 VirtualRegister GetVirtualRegister() const
404 {
405 ASSERT(IsDynamic());
406 return VirtualRegister(VregField::Decode(properties_), IsAccFlag::Decode(properties_));
407 }
408
409 void SetVirtualRegister(VirtualRegister reg)
410 {
411 static_assert(sizeof(reg) <= sizeof(uintptr_t), "Consider passing the register by reference");
412 ASSERT(IsDynamic());
413 VregField::Set(reg.Value(), &properties_);
414 IsAccFlag::Set(reg.IsAccumulator(), &properties_);
415 }
416
417 uint32_t GetBbNum() const
418 {
419 ASSERT(IsDynamic());
420 return BbNumField::Decode(properties_);
421 }
422
423 void SetBbNum(uint32_t bb_num)
424 {
425 ASSERT(IsDynamic());
426 BbNumField::Set(bb_num, &properties_);
427 }
428
429 auto GetNext() const
430 {
431 return next_;
432 }
433
434 auto GetPrev() const
435 {
436 return prev_;
437 }
438
439 void SetNext(User *next)
440 {
441 next_ = next;
442 }
443
444 void SetPrev(User *prev)
445 {
446 prev_ = prev;
447 }
448
449 void Remove()
450 {
451 if (prev_ != nullptr) {
452 prev_->next_ = next_;
453 }
454 if (next_ != nullptr) {
455 next_->prev_ = prev_;
456 }
457 }
458
459 private:
460 static constexpr unsigned BITS_FOR_INDEX = 21;
461 static constexpr unsigned BITS_FOR_SIZE = BITS_FOR_INDEX;
462 static constexpr unsigned BITS_FOR_BB_NUM = 20;
463 using IndexField = BitField<unsigned, 0, BITS_FOR_INDEX>;
464 using SizeField = IndexField::NextField<unsigned, BITS_FOR_SIZE>;
465 using IsStaticFlag = SizeField::NextFlag;
466
467 using BbNumField = IsStaticFlag::NextField<uint32_t, BITS_FOR_BB_NUM>;
468
469 using VregField = IsStaticFlag::NextField<unsigned, VirtualRegister::BITS_FOR_VREG>;
470 using IsAccFlag = VregField::NextFlag;
471
472 uint64_t properties_ {0};
473 User *next_ {nullptr};
474 User *prev_ {nullptr};
475 };
476
477 /**
478 * List of users. Intended for range loop.
479 * @tparam T should be User or const User
480 */
481 template <typename T>
482 class UserList {
483 template <typename U>
484 struct UserIterator {
485 UserIterator() = default;
486 explicit UserIterator(U *u) : user_(u) {}
487
488 UserIterator &operator++()
489 {
490 user_ = user_->GetNext();
491 return *this;
492 }
493 bool operator!=(const UserIterator &other)
494 {
495 return user_ != other.user_;
496 }
497 U &operator*()
498 {
499 return *user_;
500 }
501 U *operator->()
502 {
503 return user_;
504 }
505
506 private:
507 U *user_ {nullptr};
508 };
509
510 public:
511 using Iterator = UserIterator<T>;
512 using ConstIterator = UserIterator<const T>;
513 using PointerType = std::conditional_t<std::is_const_v<T>, T *const *, T **>;
514
515 explicit UserList(PointerType head) : head_(head) {}
516
517 // NOLINTNEXTLINE(readability-identifier-naming)
518 Iterator begin()
519 {
520 return Iterator(*head_);
521 }
522 // NOLINTNEXTLINE(readability-identifier-naming)
523 Iterator end()
524 {
525 return Iterator(nullptr);
526 }
527 // NOLINTNEXTLINE(readability-identifier-naming)
528 ConstIterator begin() const
529 {
530 return ConstIterator(*head_);
531 }
532 // NOLINTNEXTLINE(readability-identifier-naming)
533 ConstIterator end() const
534 {
535 return ConstIterator(nullptr);
536 }
537 bool Empty() const
538 {
539 return *head_ == nullptr;
540 }
541 T &Front()
542 {
543 return **head_;
544 }
545 const T &Front() const
546 {
547 return **head_;
548 }
549
550 private:
551 PointerType head_ {nullptr};
552 };
553
554 inline bool operator==(const User &lhs, const User &rhs)
555 {
556 return lhs.GetInst() == rhs.GetInst();
557 }
558
559 /**
560 * Operands class for instructions with fixed inputs count.
561 * Actually, this class do absolutely nothing except that we can get sizeof of it when allocating memory.
562 */
563 template <int N>
564 struct Operands {
565 static_assert(N < MAX_STATIC_INPUTS, "Invalid inputs number");
566
567 std::array<User, N> users;
568 std::array<Input, N> inputs;
569 };
570
571 /**
572 * Specialized version for instructions with variable inputs count.
573 * Users and inputs are stored outside of this class.
574 */
575 class DynamicOperands {
576 public:
577 explicit DynamicOperands(ArenaAllocator *allocator) : allocator_(allocator) {}
578
579 User *Users()
580 {
581 return users_;
582 }
583
584 Input *Inputs()
585 {
586 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
587 return reinterpret_cast<Input *>(users_ + capacity_) + 1;
588 }
589
590 /// Append new input (and user accordingly)
591 unsigned Append(Inst *inst);
592
593 /// Remove input and user with index `index`.
594 void Remove(unsigned index);
595
596 /// Reallocate inputs/users storage to a new one with specified capacity.
597 void Reallocate(size_t new_capacity = 0);
598
599 /// Get instruction to which these operands belongs to.
600 Inst *GetOwnerInst() const
601 {
602 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
603 return reinterpret_cast<Inst *>(const_cast<DynamicOperands *>(this) + 1);
604 }
605
606 User *GetUser(unsigned index)
607 {
608 CHECK_GE(capacity_ - 1, index);
609 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
610 return &users_[capacity_ - index - 1];
611 }
612
613 Input *GetInput(unsigned index)
614 {
615 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
616 return &Inputs()[index];
617 }
618
619 void SetInput(unsigned index, Input input)
620 {
621 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
622 Inputs()[index] = input;
623 }
624
625 size_t Size() const
626 {
627 return size_;
628 }
629
630 private:
631 User *users_ {nullptr};
632 size_t size_ {0};
633 size_t capacity_ {0};
634 ArenaAllocator *allocator_ {nullptr};
635 };
636
637 /**
638 * Base class for all instructions, should not be instantiated directly
639 */
640 class InstBase {
641 NO_COPY_SEMANTIC(InstBase);
642 NO_MOVE_SEMANTIC(InstBase);
643
644 public:
645 virtual ~InstBase() = default;
646
647 public:
648 ALWAYS_INLINE void operator delete([[maybe_unused]] void *unused, [[maybe_unused]] size_t size)
649 {
650 UNREACHABLE();
651 }
652 ALWAYS_INLINE void *operator new([[maybe_unused]] size_t size, void *ptr) noexcept
653 {
654 return ptr;
655 }
656 ALWAYS_INLINE void operator delete([[maybe_unused]] void *unused1, [[maybe_unused]] void *unused2) noexcept {}
657
658 void *operator new([[maybe_unused]] size_t size) = delete;
659
660 protected:
661 InstBase() = default;
662 };
663
664 /**
665 * Base instruction class
666 */
667 class Inst : public MarkerSet, public InstBase {
668 public:
669 /**
670 * Create new instruction. All instructions must be created with this method.
671 * It allocates additional space before Inst object for def-use structures.
672 *
673 * @tparam InstType - concrete type of instruction, shall be derived from Inst
674 * @tparam Args - constructor arguments types
675 * @param allocator - allocator for memory allocating
676 * @param args - constructor arguments
677 * @return - new instruction
678 */
679 template <typename InstType, typename... Args>
680 [[nodiscard]] static InstType *New(ArenaAllocator *allocator, Args &&... args);
681
682 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
683 #define INST_DEF(opcode, base, ...) inline const base *CastTo##opcode() const;
684 OPCODE_LIST(INST_DEF)
685 #undef INST_DEF
686
687 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
688 #define INST_DEF(opcode, base, ...) inline base *CastTo##opcode();
689 OPCODE_LIST(INST_DEF)
690 #undef INST_DEF
691
692 // Methods for instruction chaining inside basic blocks.
693 Inst *GetNext()
694 {
695 return next_;
696 }
697 const Inst *GetNext() const
698 {
699 return next_;
700 }
701 Inst *GetPrev()
702 {
703 return prev_;
704 }
705 const Inst *GetPrev() const
706 {
707 return prev_;
708 }
709 void SetNext(Inst *next)
710 {
711 next_ = next;
712 }
713 void SetPrev(Inst *prev)
714 {
715 prev_ = prev;
716 }
717
718 // Id accessors
719 auto GetId() const
720 {
721 return id_;
722 }
723 void SetId(int id)
724 {
725 id_ = id;
726 }
727
728 auto GetLinearNumber() const
729 {
730 return linear_number_;
731 }
732 void SetLinearNumber(LinearNumber number)
733 {
734 linear_number_ = number;
735 }
736
737 auto GetCloneNumber() const
738 {
739 return clone_number_;
740 }
741 void SetCloneNumber(int32_t number)
742 {
743 clone_number_ = number;
744 }
745
746 // Opcode accessors
747 Opcode GetOpcode() const
748 {
749 return opcode_;
750 }
751 void SetOpcode(Opcode opcode)
752 {
753 opcode_ = opcode;
754 SetField<FieldFlags>(inst_flags::GetFlagsMask(opcode));
755 }
756 const char *GetOpcodeStr() const
757 {
758 return GetOpcodeString(GetOpcode());
759 }
760
761 // Bytecode PC accessors
762 uint32_t GetPc() const
763 {
764 return pc_;
765 }
766 void SetPc(uint32_t pc)
767 {
768 pc_ = pc;
769 }
770
771 // Type accessors
772 DataType::Type GetType() const
773 {
774 return FieldType::Get(bit_fields_);
775 }
776 void SetType(DataType::Type type)
777 {
778 FieldType::Set(type, &bit_fields_);
779 }
780 bool HasType() const
781 {
782 return GetType() != DataType::Type::NO_TYPE;
783 }
784
785 // Parent basic block accessors
786 BasicBlock *GetBasicBlock()
787 {
788 return bb_;
789 }
790 const BasicBlock *GetBasicBlock() const
791 {
792 return bb_;
793 }
794 void SetBasicBlock(BasicBlock *bb)
795 {
796 bb_ = bb;
797 }
798
799 // Instruction properties getters
800 bool IsControlFlow() const
801 {
802 return GetFlag(inst_flags::CF);
803 }
804
805 bool IsIntrinsic() const
806 {
807 return GetOpcode() == Opcode::Intrinsic;
808 }
809
810 bool IsCall() const
811 {
812 return GetFlag(inst_flags::CALL);
813 }
814
815 bool IsSpillFill() const
816 {
817 return GetOpcode() == Opcode::SpillFill;
818 }
819
820 bool IsAccRead() const;
821 bool IsAccWrite() const;
822 bool CanThrow() const
823 {
824 return GetFlag(inst_flags::CAN_THROW);
825 }
826 bool RequireState() const
827 {
828 return GetFlag(inst_flags::REQUIRE_STATE);
829 }
830 // Returns true if the instruction not removable in DCE
831 bool IsNotRemovable() const
832 {
833 return GetFlag(inst_flags::NO_DCE);
834 }
835
836 // Returns true if the instruction doesn't have destination register
837 bool NoDest() const
838 {
839 return GetFlag(inst_flags::PSEUDO_DST) || GetFlag(inst_flags::NO_DST) || GetType() == DataType::VOID;
840 }
841
842 bool HasPseudoDestination() const
843 {
844 return GetFlag(inst_flags::PSEUDO_DST);
845 }
846
847 bool HasImplicitRuntimeCall() const
848 {
849 return GetFlag(inst_flags::IMPLICIT_RUNTIME_CALL);
850 }
851
852 bool CanDeoptimize() const
853 {
854 return GetFlag(inst_flags::CAN_DEOPTIMIZE);
855 }
856
857 // Returns true if the instruction is low-level
858 bool IsLowLevel() const
859 {
860 return GetFlag(inst_flags::LOW_LEVEL);
861 }
862
863 // Returns true if the instruction not hoistable
864 bool IsNotHoistable() const
865 {
866 return GetFlag(inst_flags::NO_HOIST);
867 }
868
869 // Returns true Cse can't be applied to the instruction
870 bool IsNotCseApplicable() const
871 {
872 return GetFlag(inst_flags::NO_CSE);
873 }
874
875 // Returns true if opcode can not be moved throught runtime calls (REFERENCE type only)
876 bool IsRefSpecial() const
877 {
878 bool result = GetFlag(inst_flags::REF_SPECIAL);
879 ASSERT(!result || GetType() == DataType::Type::REFERENCE);
880 return result;
881 }
882
883 // Returns true if the instruction is a commutative
884 bool IsCommutative() const
885 {
886 return GetFlag(inst_flags::COMMUTATIVE);
887 }
888
889 // Returns true if the instruction can be used in if-conversion
890 bool IsIfConvertable() const
891 {
892 return GetFlag(inst_flags::IFCVT);
893 }
894
895 virtual bool IsPropagateLiveness() const;
896
897 bool RequireRegMap() const;
898
899 ObjectTypeInfo GetObjectTypeInfo() const
900 {
901 return object_type_info_;
902 }
903
904 bool HasObjectTypeInfo() const
905 {
906 return object_type_info_.IsValid();
907 }
908
909 Inst *GetDataFlowInput(int index) const
910 {
911 return GetDataFlowInput(GetInput(index).GetInst());
912 }
913 Inst *GetDataFlowInput(Inst *input_inst) const;
914
915 bool IsPrecedingInSameBlock(const Inst *other) const;
916
917 bool IsDominate(const Inst *other) const;
918
919 bool InSameBlockOrDominate(const Inst *other) const;
920
921 const SaveStateInst *GetSaveState() const
922 {
923 return const_cast<Inst *>(this)->GetSaveState();
924 }
925
926 SaveStateInst *GetSaveState()
927 {
928 if (!RequireState()) {
929 return nullptr;
930 }
931 if (GetInputsCount() == 0) {
932 return nullptr;
933 }
934 auto ss = GetInput(GetInputsCount() - 1).GetInst();
935 if (ss->GetOpcode() != Opcode::SaveState) {
936 return nullptr;
937 }
938 return ss->CastToSaveState();
939 }
940
SetSaveState(Inst *inst)941 void SetSaveState(Inst *inst)
942 {
943 ASSERT(RequireState());
944 SetInput(GetInputsCount() - 1, inst);
945 }
946
947 bool IsZeroRegInst() const;
948
949 /**
950 * Return instruction clone
951 */
952 virtual Inst *Clone(const Graph *targetGraph) const;
953
GetFlagsMask() const954 uintptr_t GetFlagsMask() const
955 {
956 return GetField<FieldFlags>();
957 }
958
GetFlag(inst_flags::Flags flag) const959 bool GetFlag(inst_flags::Flags flag) const
960 {
961 return (GetFlagsMask() & flag) != 0;
962 }
963
SetFlag(inst_flags::Flags flag)964 void SetFlag(inst_flags::Flags flag)
965 {
966 SetField<FieldFlags>(GetFlagsMask() | flag);
967 }
968
ClearFlag(inst_flags::Flags flag)969 void ClearFlag(inst_flags::Flags flag)
970 {
971 SetField<FieldFlags>(GetFlagsMask() & ~static_cast<uintptr_t>(flag));
972 }
973
974 #ifndef NDEBUG
GetModesMask() const975 uint8_t GetModesMask() const
976 {
977 return inst_modes::GetModesMask(opcode_);
978 }
979
SupportsMode(inst_modes::Mode mode) const980 bool SupportsMode(inst_modes::Mode mode) const
981 {
982 return (GetModesMask() & mode) != 0;
983 }
984 #endif
985
SetTerminator()986 void SetTerminator()
987 {
988 SetFlag(inst_flags::Flags::TERMINATOR);
989 }
990
991 void InsertBefore(Inst *inst);
992 void InsertAfter(Inst *inst);
993
994 /**
995 * Return true if instruction has dynamic operands storage.
996 */
IsOperandsDynamic() const997 bool IsOperandsDynamic() const
998 {
999 return GetField<InputsCount>() == MAX_STATIC_INPUTS;
1000 }
1001
1002 /**
1003 * Add user to the instruction.
1004 * @param user - pointer to User object
1005 */
AddUser(User *user)1006 void AddUser(User *user)
1007 {
1008 ASSERT(user && user->GetInst());
1009 user->SetNext(first_user_);
1010 user->SetPrev(nullptr);
1011 if (first_user_ != nullptr) {
1012 ASSERT(first_user_->GetPrev() == nullptr);
1013 first_user_->SetPrev(user);
1014 }
1015 first_user_ = user;
1016 }
1017
1018 /**
1019 * Remove instruction from users.
1020 * @param user - pointer to User object
1021 */
RemoveUser(User *user)1022 void RemoveUser(User *user)
1023 {
1024 ASSERT(user);
1025 ASSERT(HasUsers());
1026 if (user == first_user_) {
1027 first_user_ = user->GetNext();
1028 }
1029 user->Remove();
1030 }
1031
1032 /**
1033 * Set input instruction in specified index.
1034 * Old input will be removed.
1035 * @param index - index of input to be set
1036 * @param inst - new input instruction TODO sherstennikov: currently it can be nullptr, is it correct?
1037 */
SetInput(unsigned index, Inst *inst)1038 void SetInput(unsigned index, Inst *inst)
1039 {
1040 CHECK_LT(index, GetInputsCount());
1041 auto &input = GetInputs()[index];
1042 auto user = GetUser(index);
1043 if (input.GetInst() != nullptr && input.GetInst()->HasUsers()) {
1044 input.GetInst()->RemoveUser(user);
1045 }
1046 if (inst != nullptr) {
1047 inst->AddUser(user);
1048 }
1049 input = Input(inst);
1050 }
1051
1052 /**
1053 * Replace all inputs that points to specified instruction by new one.
1054 * @param old_input - instruction that should be replaced
1055 * @param new_input - new input instruction
1056 */
ReplaceInput(Inst *old_input, Inst *new_input)1057 void ReplaceInput(Inst *old_input, Inst *new_input)
1058 {
1059 unsigned index = 0;
1060 for (auto input : GetInputs()) {
1061 if (input.GetInst() == old_input) {
1062 SetInput(index, new_input);
1063 }
1064 index++;
1065 }
1066 }
1067
1068 /**
1069 * Replace inputs that point to this instruction by given instruction.
1070 * @param inst - new input instruction
1071 */
ReplaceUsers(Inst *inst)1072 void ReplaceUsers(Inst *inst)
1073 {
1074 ASSERT(inst != this);
1075 ASSERT(inst != nullptr);
1076 for (auto it = GetUsers().begin(); it != GetUsers().end(); it = GetUsers().begin()) {
1077 it->GetInst()->SetInput(it->GetIndex(), inst);
1078 }
1079 }
1080
1081 /**
1082 * Append input instruction.
1083 * Available only for variadic inputs instructions, such as PHI.
1084 * @param input - input instruction
1085 * @return index in inputs container where new input is placed
1086 */
AppendInput(Inst *input)1087 unsigned AppendInput(Inst *input)
1088 {
1089 CHECK_NOT_NULL(input);
1090 ASSERT(IsOperandsDynamic());
1091 DynamicOperands *operands = GetDynamicOperands();
1092 return operands->Append(input);
1093 }
1094
AppendInput(Input input)1095 unsigned AppendInput(Input input)
1096 {
1097 static_assert(sizeof(Input) <= sizeof(uintptr_t)); // Input become larger, so pass it by reference then
1098 return AppendInput(input.GetInst());
1099 }
1100
1101 /**
1102 * Remove input from inputs container
1103 * Available only for variadic inputs instructions, such as PHI.
1104 * @param index - index of input in inputs container
1105 */
RemoveInput(unsigned index)1106 virtual void RemoveInput(unsigned index)
1107 {
1108 ASSERT(IsOperandsDynamic());
1109 DynamicOperands *operands = GetDynamicOperands();
1110 ASSERT(index < operands->Size());
1111 operands->Remove(index);
1112 }
1113
1114 /**
1115 * Remove all inputs
1116 */
RemoveInputs()1117 void RemoveInputs()
1118 {
1119 if (UNLIKELY(IsOperandsDynamic())) {
1120 for (auto inputs_count = GetInputsCount(); inputs_count != 0; --inputs_count) {
1121 RemoveInput(inputs_count - 1);
1122 }
1123 } else {
1124 for (size_t i = 0; i < GetInputsCount(); ++i) {
1125 SetInput(i, nullptr);
1126 }
1127 }
1128 }
1129
1130 /**
1131 * Remove all users
1132 */
1133 template <bool with_inputs = false>
RemoveUsers()1134 void RemoveUsers()
1135 {
1136 auto users = GetUsers();
1137 while (!users.Empty()) {
1138 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
1139 if constexpr (with_inputs) {
1140 auto &user = users.Front();
1141 user.GetInst()->RemoveInput(user.GetIndex());
1142 // NOLINTNEXTLINE(readability-misleading-indentation)
1143 } else {
1144 RemoveUser(&users.Front());
1145 }
1146 }
1147 }
1148
1149 /**
1150 * Get input by index
1151 * @param index - index of input
1152 * @return input instruction
1153 */
GetInput(unsigned index)1154 Input GetInput(unsigned index)
1155 {
1156 ASSERT(index < GetInputsCount());
1157 return GetInputs()[index];
1158 }
1159
1160 Input GetInput(unsigned index) const
1161 {
1162 ASSERT(index < GetInputsCount());
1163 return GetInputs()[index];
1164 }
1165
1166 Span<Input> GetInputs()
1167 {
1168 if (UNLIKELY(IsOperandsDynamic())) {
1169 DynamicOperands *operands = GetDynamicOperands();
1170 return Span<Input>(operands->Inputs(), operands->Size());
1171 }
1172
1173 auto inputs_count {GetField<InputsCount>()};
1174 return Span<Input>(
1175 reinterpret_cast<Input *>(reinterpret_cast<uintptr_t>(this) -
1176 (inputs_count + Input::GetPadding(RUNTIME_ARCH, inputs_count)) * sizeof(Input)),
1177 inputs_count);
1178 }
GetInputs() const1179 Span<const Input> GetInputs() const
1180 {
1181 return Span<const Input>(const_cast<Inst *>(this)->GetInputs());
1182 }
1183
GetInputType([[maybe_unused]] size_t index) const1184 virtual DataType::Type GetInputType([[maybe_unused]] size_t index) const
1185 {
1186 ASSERT(index < GetInputsCount());
1187 return GetType();
1188 }
1189
1190 UserList<User> GetUsers()
1191 {
1192 return UserList<User>(&first_user_);
1193 }
1194 UserList<const User> GetUsers() const
1195 {
1196 return UserList<const User>(&first_user_);
1197 }
1198
1199 size_t GetInputsCount() const
1200 {
1201 if (UNLIKELY(IsOperandsDynamic())) {
1202 return GetDynamicOperands()->Size();
1203 }
1204 return GetInputs().Size();
1205 }
1206
HasUsers() const1207 bool HasUsers() const
1208 {
1209 return first_user_ != nullptr;
1210 };
1211
HasSingleUser() const1212 bool HasSingleUser() const
1213 {
1214 return first_user_ != nullptr && first_user_->GetNext() == nullptr;
1215 }
1216
1217 /// Reserve space in dataflow storage for specified inputs count
1218 void ReserveInputs(size_t capacity);
1219
SetLocation([[maybe_unused]] size_t index, [[maybe_unused]] Location location)1220 virtual void SetLocation([[maybe_unused]] size_t index, [[maybe_unused]] Location location) {}
1221
GetLocation([[maybe_unused]] size_t index) const1222 virtual Location GetLocation([[maybe_unused]] size_t index) const
1223 {
1224 return Location::RequireRegister();
1225 }
1226
GetDstLocation() const1227 virtual Location GetDstLocation() const
1228 {
1229 return Location::MakeRegister(GetDstReg(), GetType());
1230 }
1231
1232 template <typename Accessor>
GetField() const1233 typename Accessor::ValueType GetField() const
1234 {
1235 return Accessor::Get(bit_fields_);
1236 }
1237
1238 template <typename Accessor>
SetField(typename Accessor::ValueType value)1239 void SetField(typename Accessor::ValueType value)
1240 {
1241 Accessor::Set(value, &bit_fields_);
1242 }
1243
GetAllFields() const1244 uint64_t GetAllFields() const
1245 {
1246 return bit_fields_;
1247 }
1248
IsPhi() const1249 bool IsPhi() const
1250 {
1251 return opcode_ == Opcode::Phi;
1252 }
1253
IsCatchPhi() const1254 bool IsCatchPhi() const
1255 {
1256 return opcode_ == Opcode::CatchPhi;
1257 }
1258
IsConst() const1259 bool IsConst() const
1260 {
1261 return opcode_ == Opcode::Constant;
1262 }
1263
IsParameter() const1264 bool IsParameter() const
1265 {
1266 return opcode_ == Opcode::Parameter;
1267 }
1268
IsBoolConst() const1269 virtual bool IsBoolConst() const
1270 {
1271 return false;
1272 }
1273
IsSaveState() const1274 bool IsSaveState() const
1275 {
1276 return opcode_ == Opcode::SaveState;
1277 }
1278
IsTry() const1279 bool IsTry() const
1280 {
1281 return opcode_ == Opcode::Try;
1282 }
1283
SetVnObject([[maybe_unused]] VnObject *vn_obj)1284 virtual void SetVnObject([[maybe_unused]] VnObject *vn_obj) {}
1285
GetDstReg() const1286 Register GetDstReg() const
1287 {
1288 return dst_reg_;
1289 }
1290
SetDstReg(Register reg)1291 void SetDstReg(Register reg)
1292 {
1293 dst_reg_ = reg;
1294 }
1295
GetVN() const1296 uint32_t GetVN() const
1297 {
1298 return vn_;
1299 }
1300
SetVN(uint32_t vn)1301 void SetVN(uint32_t vn)
1302 {
1303 vn_ = vn;
1304 }
1305 void Dump(std::ostream *out, bool new_line = true) const;
1306 virtual bool DumpInputs(std::ostream *out) const;
1307 virtual void DumpOpcode(std::ostream *out) const;
1308
SetDstReg([[maybe_unused]] unsigned index, Register reg)1309 virtual void SetDstReg([[maybe_unused]] unsigned index, Register reg)
1310 {
1311 ASSERT(index == 0);
1312 SetDstReg(reg);
1313 }
1314
GetDstReg([[maybe_unused]] unsigned index) const1315 virtual Register GetDstReg([[maybe_unused]] unsigned index) const
1316 {
1317 ASSERT(index == 0);
1318 return GetDstReg();
1319 }
1320
GetDstCount() const1321 virtual size_t GetDstCount() const
1322 {
1323 return 1;
1324 }
1325
GetSrcRegIndex() const1326 virtual uint32_t GetSrcRegIndex() const
1327 {
1328 return 0;
1329 }
1330
SetSrcReg([[maybe_unused]] unsigned index, [[maybe_unused]] Register reg)1331 virtual void SetSrcReg([[maybe_unused]] unsigned index, [[maybe_unused]] Register reg) {}
1332
GetSrcReg([[maybe_unused]] unsigned index) const1333 virtual Register GetSrcReg([[maybe_unused]] unsigned index) const
1334 {
1335 return INVALID_REG;
1336 }
1337
GetFirstUser() const1338 User *GetFirstUser() const
1339 {
1340 return first_user_;
1341 }
1342
1343 protected:
1344 using InstBase::InstBase;
1345 static constexpr int INPUT_COUNT = 0;
1346
1347 Inst() = default;
1348
Inst(Opcode opcode)1349 explicit Inst(Opcode opcode) : Inst(opcode, DataType::Type::NO_TYPE, INVALID_PC) {}
1350
Inst(Opcode opcode, DataType::Type type, uint32_t pc)1351 explicit Inst(Opcode opcode, DataType::Type type, uint32_t pc) : pc_(pc), opcode_(opcode)
1352 {
1353 bit_fields_ = inst_flags::GetFlagsMask(opcode);
1354 SetField<FieldType>(type);
1355 }
1356
1357 protected:
1358 using FieldFlags = BitField<uint32_t, 0, MinimumBitsToStore(1U << inst_flags::FLAGS_COUNT)>;
1359 using FieldType = FieldFlags::NextField<DataType::Type, MinimumBitsToStore(DataType::LAST)>;
1360 using InputsCount = FieldType::NextField<uint32_t, BITS_PER_INPUTS_NUM>;
1361 using LastField = InputsCount;
1362
GetDynamicOperands() const1363 DynamicOperands *GetDynamicOperands() const
1364 {
1365 return reinterpret_cast<DynamicOperands *>(reinterpret_cast<uintptr_t>(this) - sizeof(DynamicOperands));
1366 }
1367
1368 private:
GetUser(unsigned index)1369 User *GetUser(unsigned index)
1370 {
1371 if (UNLIKELY(IsOperandsDynamic())) {
1372 return GetDynamicOperands()->GetUser(index);
1373 }
1374 auto inputs_count {GetField<InputsCount>()};
1375 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
1376 return reinterpret_cast<User *>(reinterpret_cast<Input *>(this) -
1377 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
1378 (inputs_count + Input::GetPadding(RUNTIME_ARCH, inputs_count))) -
1379 // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
1380 index - 1;
1381 }
1382
OperandsStorageSize() const1383 size_t OperandsStorageSize() const
1384 {
1385 if (UNLIKELY(IsOperandsDynamic())) {
1386 return sizeof(DynamicOperands);
1387 }
1388
1389 auto inputs_count {GetField<InputsCount>()};
1390 return inputs_count * (sizeof(Input) + sizeof(User)) +
1391 Input::GetPadding(RUNTIME_ARCH, inputs_count) * sizeof(Input);
1392 }
1393
1394 private:
1395 /// Basic block this instruction belongs to
1396 BasicBlock *bb_ {nullptr};
1397
1398 /// Next instruction within basic block
1399 Inst *next_ {nullptr};
1400
1401 /// Previous instruction within basic block
1402 Inst *prev_ {nullptr};
1403
1404 /// First user in users chain
1405 User *first_user_ {nullptr};
1406
1407 /// This value hold properties of the instruction. It accessed via BitField types(f.e. FieldType).
1408 uint64_t bit_fields_ {0};
1409
1410 /// Unique id of instruction
1411 uint32_t id_ {INVALID_ID};
1412
1413 /// Unique id of instruction
1414 uint32_t vn_ {INVALID_VN};
1415
1416 /// Bytecode pc
1417 uint32_t pc_ {INVALID_PC};
1418
1419 /// Number used in cloning
1420 uint32_t clone_number_ {0};
1421
1422 /// Instruction number getting while visiting graph
1423 LinearNumber linear_number_ {INVALID_LINEAR_NUM};
1424
1425 ObjectTypeInfo object_type_info_ {};
1426
1427 /// Opcode, see opcodes.def
1428 Opcode opcode_ {Opcode::INVALID};
1429
1430 // Destination register type - defined in FieldType
1431 Register dst_reg_ {INVALID_REG};
1432 };
1433
1434 /**
1435 * Proxy class that injects new field - type of the source operands - into property field of the instruction.
1436 * Should be used when instruction has sources of the same type and type of the instruction is not match to type of
1437 * sources. Examples: Cmp, Compare
1438 * @tparam T Base instruction class after which this mixin is injected
1439 */
1440 template <typename T>
1441 class InstWithOperandsType : public T {
1442 public:
1443 using T::T;
1444
SetOperandsType(DataType::Type type)1445 void SetOperandsType(DataType::Type type)
1446 {
1447 T::template SetField<FieldOperandsType>(type);
1448 }
GetOperandsType() const1449 virtual DataType::Type GetOperandsType() const
1450 {
1451 return T::template GetField<FieldOperandsType>();
1452 }
1453
1454 protected:
1455 using FieldOperandsType =
1456 typename T::LastField::template NextField<DataType::Type, MinimumBitsToStore(DataType::LAST)>;
1457 using LastField = FieldOperandsType;
1458 };
1459
1460 /**
1461 * Mixin for NeedBarrier flag.
1462 * @tparam T Base instruction class after which this mixin is injected
1463 */
1464 template <typename T>
1465 class NeedBarrierMixin : public T {
1466 public:
1467 using T::T;
1468
SetNeedBarrier(bool v)1469 void SetNeedBarrier(bool v)
1470 {
1471 T::template SetField<NeedBarrierFlag>(v);
1472 }
GetNeedBarrier() const1473 bool GetNeedBarrier() const
1474 {
1475 return T::template GetField<NeedBarrierFlag>();
1476 }
1477
1478 protected:
1479 using NeedBarrierFlag = typename T::LastField::NextFlag;
1480 using LastField = NeedBarrierFlag;
1481 };
1482
1483 /**
1484 * This mixin aims to implement type id accessors.
1485 */
1486 class TypeIdMixin {
1487 public:
1488 TypeIdMixin() = default;
1489 NO_COPY_SEMANTIC(TypeIdMixin);
1490 NO_MOVE_SEMANTIC(TypeIdMixin);
1491 virtual ~TypeIdMixin() = default;
1492
SetTypeId(uint32_t id)1493 void SetTypeId(uint32_t id)
1494 {
1495 type_id_ = id;
1496 }
1497
GetTypeId() const1498 auto GetTypeId() const
1499 {
1500 return type_id_;
1501 }
1502
SetMethod(RuntimeInterface::MethodPtr method)1503 void SetMethod(RuntimeInterface::MethodPtr method)
1504 {
1505 method_ = method;
1506 }
GetMethod() const1507 auto GetMethod() const
1508 {
1509 return method_;
1510 }
1511
1512 private:
1513 uint32_t type_id_ {0};
1514 // The pointer to the method in which this instruction is executed(inlined method)
1515 RuntimeInterface::MethodPtr method_ {nullptr};
1516 };
1517
1518 /**
1519 * Mixin for Inlined calls/returns.
1520 */
1521 template <typename T>
1522 class InlinedInstMixin : public T {
1523 public:
1524 using T::T;
1525
SetInlined(bool v)1526 void SetInlined(bool v)
1527 {
1528 T::template SetField<IsInlinedFlag>(v);
1529 }
IsInlined() const1530 bool IsInlined() const
1531 {
1532 return T::template GetField<IsInlinedFlag>();
1533 }
1534
1535 protected:
1536 using IsInlinedFlag = typename T::LastField::NextFlag;
1537 using LastField = IsInlinedFlag;
1538 };
1539
1540 /**
1541 * Mixin for instructions with immediate constant value
1542 */
1543 class ImmediateMixin {
1544 public:
ImmediateMixin(uint64_t immediate)1545 explicit ImmediateMixin(uint64_t immediate) : immediate_(immediate) {}
1546
1547 NO_COPY_SEMANTIC(ImmediateMixin);
1548 NO_MOVE_SEMANTIC(ImmediateMixin);
1549 virtual ~ImmediateMixin() = default;
1550
SetImm(uint64_t immediate)1551 void SetImm(uint64_t immediate)
1552 {
1553 immediate_ = immediate;
1554 }
GetImm() const1555 auto GetImm() const
1556 {
1557 return immediate_;
1558 }
1559
1560 protected:
1561 ImmediateMixin() = default;
1562
1563 private:
1564 uint64_t immediate_ {0};
1565 };
1566
1567 /**
1568 * Mixin for instructions with ConditionCode
1569 */
1570 template <typename T>
1571 class ConditionMixin : public T {
1572 public:
1573 enum class Prediction { NONE, LIKELY, UNLIKELY, SIZE = UNLIKELY };
1574
1575 using T::T;
ConditionMixin(ConditionCode cc)1576 explicit ConditionMixin(ConditionCode cc)
1577 {
1578 T::template SetField<CcFlag>(cc);
1579 }
1580 NO_COPY_SEMANTIC(ConditionMixin);
1581 NO_MOVE_SEMANTIC(ConditionMixin);
1582 ~ConditionMixin() override = default;
1583
GetCc() const1584 auto GetCc() const
1585 {
1586 return T::template GetField<CcFlag>();
1587 }
SetCc(ConditionCode cc)1588 void SetCc(ConditionCode cc)
1589 {
1590 T::template SetField<CcFlag>(cc);
1591 }
InverseConditionCode()1592 void InverseConditionCode()
1593 {
1594 SetCc(GetInverseConditionCode(GetCc()));
1595 if (IsLikely()) {
1596 SetUnlikely();
1597 } else if (IsUnlikely()) {
1598 SetLikely();
1599 }
1600 }
1601
IsLikely() const1602 bool IsLikely() const
1603 {
1604 return T::template GetField<PredictionFlag>() == Prediction::LIKELY;
1605 }
IsUnlikely() const1606 bool IsUnlikely() const
1607 {
1608 return T::template GetField<PredictionFlag>() == Prediction::UNLIKELY;
1609 }
SetLikely()1610 void SetLikely()
1611 {
1612 T::template SetField<PredictionFlag>(Prediction::LIKELY);
1613 }
SetUnlikely()1614 void SetUnlikely()
1615 {
1616 T::template SetField<PredictionFlag>(Prediction::UNLIKELY);
1617 }
1618
1619 protected:
1620 ConditionMixin() = default;
1621
1622 using CcFlag = typename T::LastField::template NextField<ConditionCode, MinimumBitsToStore(ConditionCode::CC_LAST)>;
1623 using PredictionFlag = typename CcFlag::template NextField<Prediction, MinimumBitsToStore(Prediction::SIZE)>;
1624 using LastField = PredictionFlag;
1625 };
1626
1627 /**
1628 * Instruction with fixed number of inputs.
1629 * Shall not be instantiated directly, only through derived classes.
1630 */
1631 template <size_t N>
1632 class FixedInputsInst : public Inst {
1633 public:
1634 using Inst::Inst;
1635
1636 static constexpr int INPUT_COUNT = N;
1637
1638 void SetSrcReg(unsigned index, Register reg) override
1639 {
1640 ASSERT(index < N);
1641 src_regs_[index] = reg;
1642 }
1643
1644 Register GetSrcReg(unsigned index) const override
1645 {
1646 ASSERT(index < N);
1647 return src_regs_[index];
1648 }
1649
1650 Location GetLocation(size_t index) const override
1651 {
1652 return Location::MakeRegister(GetSrcReg(index), GetInputType(index));
1653 }
1654
1655 void SetLocation(size_t index, Location location) override
1656 {
1657 SetSrcReg(index, location.GetValue());
1658 }
1659
1660 void SetDstLocation(Location location)
1661 {
1662 SetDstReg(location.GetValue());
1663 }
1664
1665 Inst *Clone(const Graph *targetGraph) const override;
1666
1667 private:
1668 template <typename T, std::size_t... Is>
1669 constexpr auto CreateArray(T value, [[maybe_unused]] std::index_sequence<Is...> unused)
1670 {
1671 return std::array<T, sizeof...(Is)> {(static_cast<void>(Is), value)...};
1672 }
1673
1674 std::array<Register, N> src_regs_ = CreateArray(INVALID_REG, std::make_index_sequence<INPUT_COUNT>());
1675 };
1676
1677 template <size_t N>
1678 Inst *FixedInputsInst<N>::Clone(const Graph *targetGraph) const
1679 {
1680 auto clone = static_cast<FixedInputsInst *>(Inst::Clone(targetGraph));
1681 #ifndef NDEBUG
1682 for (size_t i = 0; i < INPUT_COUNT; ++i) {
1683 clone->SetSrcReg(i, GetSrcReg(i));
1684 }
1685 #endif
1686 return clone;
1687 }
1688
1689 /**
1690 * Instructions with fixed static inputs
1691 * We need to explicitly declare these proxy classes because some code can't work with the templated inst classes, for
1692 * example DEFINE_INST macro.
1693 */
1694 class FixedInputsInst0 : public FixedInputsInst<0> {
1695 public:
1696 using FixedInputsInst::FixedInputsInst;
1697
1698 NO_COPY_SEMANTIC(FixedInputsInst0);
1699 NO_MOVE_SEMANTIC(FixedInputsInst0);
1700 ~FixedInputsInst0() override = default;
1701 };
1702
1703 class FixedInputsInst1 : public FixedInputsInst<1> {
1704 public:
1705 using FixedInputsInst::FixedInputsInst;
1706
1707 NO_COPY_SEMANTIC(FixedInputsInst1);
1708 NO_MOVE_SEMANTIC(FixedInputsInst1);
1709 ~FixedInputsInst1() override = default;
1710 };
1711
1712 class FixedInputsInst2 : public FixedInputsInst<2U> {
1713 public:
1714 using FixedInputsInst::FixedInputsInst;
1715
1716 NO_COPY_SEMANTIC(FixedInputsInst2);
1717 NO_MOVE_SEMANTIC(FixedInputsInst2);
1718 ~FixedInputsInst2() override = default;
1719 };
1720
1721 /**
1722 * Instruction with variable inputs count
1723 */
1724 class DynamicInputsInst : public Inst {
1725 public:
1726 using Inst::Inst;
1727
1728 static constexpr int INPUT_COUNT = MAX_STATIC_INPUTS;
1729
1730 Location GetLocation(size_t index) const override
1731 {
1732 if (locations_ == nullptr) {
1733 return Location::Invalid();
1734 }
1735 return locations_->GetLocation(index);
1736 }
1737
1738 Location GetDstLocation() const override
1739 {
1740 if (locations_ == nullptr) {
1741 return Location::Invalid();
1742 }
1743 return locations_->GetDstLocation();
1744 }
1745
1746 void SetLocation(size_t index, Location location) override
1747 {
1748 ASSERT(locations_ != nullptr);
1749 locations_->SetLocation(index, location);
1750 }
1751
SetDstLocation(Location location)1752 void SetDstLocation(Location location)
1753 {
1754 ASSERT(locations_ != nullptr);
1755 locations_->SetDstLocation(location);
1756 }
1757
SetLocationsInfo(LocationsInfo *info)1758 void SetLocationsInfo(LocationsInfo *info)
1759 {
1760 locations_ = info;
1761 }
1762
1763 Register GetSrcReg(unsigned index) const override
1764 {
1765 return GetLocation(index).GetValue();
1766 }
1767
1768 void SetSrcReg(unsigned index, Register reg) override
1769 {
1770 SetLocation(index, Location::MakeRegister(reg, GetInputType(index)));
1771 }
1772
1773 private:
1774 LocationsInfo *locations_ {nullptr};
1775 };
1776
1777 class SpillFillInst;
1778
1779 /**
1780 * Mixin to hold location data
1781 */
1782 class LocationDataMixin {
1783 public:
SetLocationData(SpillFillData location_data)1784 void SetLocationData(SpillFillData location_data)
1785 {
1786 location_data_ = location_data;
1787 }
1788
GetLocationData() const1789 auto GetLocationData() const
1790 {
1791 return location_data_;
1792 }
1793
GetLocationData()1794 auto &GetLocationData()
1795 {
1796 return location_data_;
1797 }
1798
1799 protected:
1800 LocationDataMixin() = default;
1801 NO_COPY_SEMANTIC(LocationDataMixin);
1802 NO_MOVE_SEMANTIC(LocationDataMixin);
1803 virtual ~LocationDataMixin() = default;
1804
1805 private:
1806 SpillFillData location_data_ {};
1807 };
1808
1809 /**
1810 * Mixin to hold input types of call instruction
1811 */
1812 class InputTypesMixin {
1813 public:
1814 InputTypesMixin() = default;
1815 NO_COPY_SEMANTIC(InputTypesMixin);
1816 NO_MOVE_SEMANTIC(InputTypesMixin);
1817 virtual ~InputTypesMixin() = default;
1818
AllocateInputTypes(ArenaAllocator *allocator, size_t capacity)1819 void AllocateInputTypes(ArenaAllocator *allocator, size_t capacity)
1820 {
1821 ASSERT(allocator != nullptr);
1822 ASSERT(input_types_ == nullptr);
1823 input_types_ = allocator->New<ArenaVector<DataType::Type>>(allocator->Adapter());
1824 ASSERT(input_types_ != nullptr);
1825 input_types_->reserve(capacity);
1826 ASSERT(input_types_->capacity() >= capacity);
1827 }
AddInputType(DataType::Type type)1828 void AddInputType(DataType::Type type)
1829 {
1830 ASSERT(input_types_ != nullptr);
1831 input_types_->push_back(type);
1832 }
GetInputTypes()1833 ArenaVector<DataType::Type> *GetInputTypes()
1834 {
1835 return input_types_;
1836 }
CloneTypes(ArenaAllocator *allocator, InputTypesMixin *target_inst) const1837 void CloneTypes(ArenaAllocator *allocator, InputTypesMixin *target_inst) const
1838 {
1839 if (UNLIKELY(input_types_ == nullptr)) {
1840 return;
1841 }
1842 target_inst->AllocateInputTypes(allocator, input_types_->size());
1843 for (auto input_type : *input_types_) {
1844 target_inst->AddInputType(input_type);
1845 }
1846 }
1847
1848 protected:
1849 // NOLINTNEXTLINE(misc-non-private-member-variables-in-classes)
1850 ArenaVector<DataType::Type> *input_types_ {nullptr};
1851 };
1852
1853 /**
1854 * Compare instruction
1855 */
1856 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
1857 class CompareInst : public InstWithOperandsType<ConditionMixin<FixedInputsInst2>> {
1858 public:
1859 using BaseInst = InstWithOperandsType<ConditionMixin<FixedInputsInst2>>;
1860 using BaseInst::BaseInst;
1861
CompareInst(Opcode opcode, DataType::Type type, uint32_t pc, ConditionCode cc)1862 CompareInst(Opcode opcode, DataType::Type type, uint32_t pc, ConditionCode cc) : BaseInst(opcode, type, pc)
1863 {
1864 SetCc(cc);
1865 }
1866
1867 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
1868 {
1869 ASSERT(index < GetInputsCount());
1870 return GetOperandsType();
1871 }
1872 void DumpOpcode(std::ostream *out) const override;
1873
1874 void SetVnObject(VnObject *vn_obj) override;
1875
1876 Inst *Clone(const Graph *targetGraph) const override;
1877 };
1878
1879 /**
1880 * Mixin for AnyTypeMixin instructions
1881 */
1882 template <typename T>
1883 class AnyTypeMixin : public T {
1884 public:
1885 using T::T;
1886
1887 void SetAnyType(AnyBaseType any_type)
1888 {
1889 T::template SetField<AnyBaseTypeField>(any_type);
1890 }
1891
1892 AnyBaseType GetAnyType() const
1893 {
1894 return T::template GetField<AnyBaseTypeField>();
1895 }
1896
1897 protected:
1898 using AnyBaseTypeField =
1899 typename T::LastField::template NextField<AnyBaseType, MinimumBitsToStore(AnyBaseType::COUNT)>;
1900 using LastField = AnyBaseTypeField;
1901 };
1902
1903 /**
1904 * CompareAnyTypeInst instruction
1905 */
1906 class CompareAnyTypeInst : public AnyTypeMixin<FixedInputsInst1> {
1907 public:
1908 using BaseInst = AnyTypeMixin<FixedInputsInst1>;
1909 using BaseInst::BaseInst;
1910
1911 CompareAnyTypeInst(Opcode opcode, uint32_t pc, AnyBaseType any_type) : BaseInst(opcode, DataType::Type::BOOL, pc)
1912 {
1913 SetAnyType(any_type);
1914 }
1915
1916 DataType::Type GetInputType(size_t index) const override
1917 {
1918 ASSERT(index < GetInputsCount());
1919 return GetInput(index).GetInst()->GetType();
1920 }
1921
1922 void DumpOpcode(std::ostream *out) const override;
1923
1924 Inst *Clone(const Graph *targetGraph) const override
1925 {
1926 auto clone = FixedInputsInst::Clone(targetGraph);
1927 clone->CastToCompareAnyType()->SetAnyType(GetAnyType());
1928 return clone;
1929 }
1930 };
1931
1932 /**
1933 * CastAnyTypeValueInst instruction
1934 */
1935 class CastAnyTypeValueInst : public AnyTypeMixin<FixedInputsInst1> {
1936 public:
1937 using BaseInst = AnyTypeMixin<FixedInputsInst1>;
1938 using BaseInst::BaseInst;
1939
CastAnyTypeValueInst(Opcode opcode, uint32_t pc, AnyBaseType any_type)1940 CastAnyTypeValueInst(Opcode opcode, uint32_t pc, AnyBaseType any_type)
1941 : BaseInst(opcode, AnyBaseTypeToDataType(any_type), pc)
1942 {
1943 SetAnyType(any_type);
1944 }
1945
1946 DataType::Type GetInputType(size_t index) const override
1947 {
1948 ASSERT(index < GetInputsCount());
1949 return GetInput(index).GetInst()->GetType();
1950 }
1951
GetDeducedType() const1952 DataType::Type GetDeducedType() const
1953 {
1954 return AnyBaseTypeToDataType(GetAnyType());
1955 }
1956
1957 void DumpOpcode(std::ostream *out) const override;
1958
1959 Inst *Clone(const Graph *targetGraph) const override
1960 {
1961 auto targetGraphClone = FixedInputsInst::Clone(targetGraph);
1962 CHECK_NOT_NULL(targetGraphClone);
1963 auto clone = targetGraphClone->CastToCastAnyTypeValue();
1964 AnyBaseType any_type = GetAnyType();
1965 clone->SetAnyType(any_type);
1966 clone->SetType(GetType());
1967 return clone;
1968 }
1969 };
1970
1971 /**
1972 * CastValueToAnyTypeInst instruction
1973 */
1974 class CastValueToAnyTypeInst : public AnyTypeMixin<FixedInputsInst1> {
1975 public:
1976 using BaseInst = AnyTypeMixin<FixedInputsInst1>;
1977 using BaseInst::BaseInst;
1978
CastValueToAnyTypeInst(Opcode opcode, uint32_t pc)1979 CastValueToAnyTypeInst(Opcode opcode, uint32_t pc) : BaseInst(opcode, DataType::ANY, pc) {}
1980
1981 DataType::Type GetInputType(size_t index) const override
1982 {
1983 ASSERT(index < GetInputsCount());
1984 return GetInput(index).GetInst()->GetType();
1985 }
1986
1987 void DumpOpcode(std::ostream *out) const override;
1988
1989 Inst *Clone(const Graph *targetGraph) const override
1990 {
1991 auto clone = FixedInputsInst::Clone(targetGraph)->CastToCastValueToAnyType();
1992 auto any_type = GetAnyType();
1993 clone->SetAnyType(any_type);
1994 clone->SetType(GetType());
1995 return clone;
1996 }
1997 };
1998
1999 /**
2000 * ConstantInst represent constant value.
2001 *
2002 * Available types: INT64, FLOAT32, FLOAT64, ANY. All integer types are stored as INT64 value.
2003 * Once type of constant is set, it can't be changed anymore.
2004 */
2005 class ConstantInst : public Inst {
2006 public:
2007 using Inst::Inst;
2008
2009 template <typename T>
ConstantInst(Opcode , T value, bool support_int32 = false)2010 explicit ConstantInst(Opcode /* unused */, T value, bool support_int32 = false) : Inst(Opcode::Constant)
2011 {
2012 ASSERT(GetTypeFromCType<T>() != DataType::NO_TYPE);
2013 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-branch-clone)
2014 if constexpr (GetTypeFromCType<T>() == DataType::FLOAT64) {
2015 value_ = bit_cast<uint64_t, double>(value);
2016 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2017 } else if constexpr (GetTypeFromCType<T>() == DataType::FLOAT32) {
2018 value_ = bit_cast<uint32_t, float>(value);
2019 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2020 } else if constexpr (GetTypeFromCType<T>() == DataType::ANY) {
2021 value_ = value.Raw();
2022 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2023 } else if (GetTypeFromCType<T>(support_int32) == DataType::INT32) {
2024 value_ = static_cast<int32_t>(value);
2025 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2026 } else {
2027 value_ = value;
2028 }
2029
2030 SetType(GetTypeFromCType<T>(support_int32));
2031 }
2032
GetRawValue() const2033 uint64_t GetRawValue() const
2034 {
2035 return value_;
2036 }
2037
GetInt32Value() const2038 uint32_t GetInt32Value() const
2039 {
2040 ASSERT(GetType() == DataType::INT32);
2041 return static_cast<uint32_t>(value_);
2042 }
2043
GetInt64Value() const2044 uint64_t GetInt64Value() const
2045 {
2046 ASSERT(GetType() == DataType::INT64);
2047 return value_;
2048 }
2049
GetIntValue() const2050 uint64_t GetIntValue() const
2051 {
2052 ASSERT(GetType() == DataType::INT64 || GetType() == DataType::INT32);
2053 return value_;
2054 }
2055
GetFloatValue() const2056 float GetFloatValue() const
2057 {
2058 ASSERT(GetType() == DataType::FLOAT32);
2059 return bit_cast<float, uint32_t>(static_cast<uint32_t>(value_));
2060 }
2061
GetDoubleValue() const2062 double GetDoubleValue() const
2063 {
2064 ASSERT(GetType() == DataType::FLOAT64);
2065 return bit_cast<double, uint64_t>(value_);
2066 }
2067
GetNextConst()2068 ConstantInst *GetNextConst()
2069 {
2070 return next_const_;
2071 }
SetNextConst(ConstantInst *next_const)2072 void SetNextConst(ConstantInst *next_const)
2073 {
2074 next_const_ = next_const;
2075 }
2076
2077 template <typename T>
GetTypeFromCType(bool support_int32 = false)2078 static constexpr DataType::Type GetTypeFromCType(bool support_int32 = false)
2079 {
2080 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-branch-clone)
2081 if constexpr (std::is_integral_v<T>) {
2082 if (support_int32 && sizeof(T) == sizeof(uint32_t)) {
2083 return DataType::INT32;
2084 }
2085 return DataType::INT64;
2086 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2087 } else if constexpr (std::is_same_v<T, float>) {
2088 return DataType::FLOAT32;
2089 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2090 } else if constexpr (std::is_same_v<T, double>) {
2091 return DataType::FLOAT64;
2092 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2093 } else if constexpr (std::is_same_v<T, DataType::Any>) {
2094 return DataType::ANY;
2095 }
2096 return DataType::NO_TYPE;
2097 }
2098
IsEqualConst(double value, [[maybe_unused]] bool support_int32 = false)2099 inline bool IsEqualConst(double value, [[maybe_unused]] bool support_int32 = false)
2100 {
2101 return IsEqualConst(DataType::FLOAT64, bit_cast<uint64_t, double>(value));
2102 }
IsEqualConst(float value, [[maybe_unused]] bool support_int32 = false)2103 inline bool IsEqualConst(float value, [[maybe_unused]] bool support_int32 = false)
2104 {
2105 return IsEqualConst(DataType::FLOAT32, bit_cast<uint32_t, float>(value));
2106 }
IsEqualConst(DataType::Any value, [[maybe_unused]] bool support_int32 = false)2107 inline bool IsEqualConst(DataType::Any value, [[maybe_unused]] bool support_int32 = false)
2108 {
2109 return IsEqualConst(DataType::ANY, value.Raw());
2110 }
IsEqualConst(DataType::Type type, uint64_t value)2111 inline bool IsEqualConst(DataType::Type type, uint64_t value)
2112 {
2113 return GetType() == type && value_ == value;
2114 }
2115 template <typename T>
IsEqualConst(T value, bool support_int32 = false)2116 inline bool IsEqualConst(T value, bool support_int32 = false)
2117 {
2118 static_assert(GetTypeFromCType<T>() == DataType::INT64);
2119 if (support_int32 && sizeof(T) == sizeof(uint32_t)) {
2120 return (GetType() == DataType::INT32 && static_cast<int32_t>(value_) == static_cast<int32_t>(value));
2121 }
2122 return (GetType() == DataType::INT64 && value_ == static_cast<uint64_t>(value));
2123 }
2124
IsEqualConstAllTypes(int64_t value, bool support_int32 = false)2125 inline bool IsEqualConstAllTypes(int64_t value, bool support_int32 = false)
2126 {
2127 return IsEqualConst(value, support_int32) || IsEqualConst(static_cast<float>(value)) ||
2128 IsEqualConst(static_cast<double>(value));
2129 }
2130
2131 bool IsBoolConst() const override
2132 {
2133 ASSERT(IsConst());
2134 return GetType() == DataType::INT64 && (GetIntValue() == 0 || GetIntValue() == 1);
2135 }
2136
SetImmTableSlot(ImmTableSlot imm_slot)2137 void SetImmTableSlot(ImmTableSlot imm_slot)
2138 {
2139 imm_slot_ = imm_slot;
2140 }
2141
GetImmTableSlot() const2142 auto GetImmTableSlot() const
2143 {
2144 return imm_slot_;
2145 }
2146
2147 bool DumpInputs(std::ostream *out) const override;
2148
2149 Inst *Clone(const Graph *targetGraph) const override;
2150
2151 private:
2152 uint64_t value_ {0};
2153 ConstantInst *next_const_ {nullptr};
2154 ImmTableSlot imm_slot_ {INVALID_IMM_TABLE_SLOT};
2155 };
2156
2157 // Type describing the purpose of the SpillFillInst.
2158 // RegAlloc may use this information to preserve correct order of several SpillFillInst
2159 // instructions placed along each other in the graph.
2160 enum SpillFillType {
2161 UNKNOWN,
2162 INPUT_FILL,
2163 CONNECT_SPLIT_SIBLINGS,
2164 SPLIT_MOVE,
2165 };
2166
2167 class SpillFillInst : public FixedInputsInst0 {
2168 public:
SpillFillInst(ArenaAllocator *allocator, Opcode opcode)2169 explicit SpillFillInst(ArenaAllocator *allocator, Opcode opcode)
2170 : FixedInputsInst0(opcode), spill_fills_(allocator->Adapter())
2171 {
2172 }
2173
AddMove(Register src, Register dst, DataType::Type type)2174 void AddMove(Register src, Register dst, DataType::Type type)
2175 {
2176 AddSpillFill(Location::MakeRegister(src, type), Location::MakeRegister(dst, type), type);
2177 }
2178
AddSpill(Register src, StackSlot dst, DataType::Type type)2179 void AddSpill(Register src, StackSlot dst, DataType::Type type)
2180 {
2181 AddSpillFill(Location::MakeRegister(src, type), Location::MakeStackSlot(dst), type);
2182 }
2183
AddFill(StackSlot src, Register dst, DataType::Type type)2184 void AddFill(StackSlot src, Register dst, DataType::Type type)
2185 {
2186 AddSpillFill(Location::MakeStackSlot(src), Location::MakeRegister(dst, type), type);
2187 }
2188
AddMemCopy(StackSlot src, StackSlot dst, DataType::Type type)2189 void AddMemCopy(StackSlot src, StackSlot dst, DataType::Type type)
2190 {
2191 AddSpillFill(Location::MakeStackSlot(src), Location::MakeStackSlot(dst), type);
2192 }
2193
AddSpillFill(const SpillFillData &spill_fill)2194 void AddSpillFill(const SpillFillData &spill_fill)
2195 {
2196 spill_fills_.emplace_back(spill_fill);
2197 }
2198
AddSpillFill(const Location &src, const Location &dst, DataType::Type type)2199 void AddSpillFill(const Location &src, const Location &dst, DataType::Type type)
2200 {
2201 spill_fills_.emplace_back(SpillFillData {src.GetKind(), dst.GetKind(), src.GetValue(), dst.GetValue(), type});
2202 }
2203
GetSpillFills() const2204 const ArenaVector<SpillFillData> &GetSpillFills() const
2205 {
2206 return spill_fills_;
2207 }
2208
GetSpillFills()2209 ArenaVector<SpillFillData> &GetSpillFills()
2210 {
2211 return spill_fills_;
2212 }
2213
GetSpillFill(size_t n) const2214 const SpillFillData &GetSpillFill(size_t n) const
2215 {
2216 ASSERT(n < spill_fills_.size());
2217 return spill_fills_[n];
2218 }
2219
2220 SpillFillData &GetSpillFill(size_t n)
2221 {
2222 ASSERT(n < spill_fills_.size());
2223 return spill_fills_[n];
2224 }
2225
2226 void RemoveSpillFill(size_t n)
2227 {
2228 ASSERT(n < spill_fills_.size());
2229 spill_fills_.erase(spill_fills_.begin() + n);
2230 }
2231
2232 // Get register number, holded by n-th spill-fill
2233 Register GetInputReg(size_t n) const
2234 {
2235 ASSERT(n < spill_fills_.size());
2236 ASSERT(spill_fills_[n].SrcType() == LocationType::REGISTER);
2237 return spill_fills_[n].SrcValue();
2238 }
2239
2240 void ClearSpillFills()
2241 {
2242 spill_fills_.clear();
2243 }
2244
2245 SpillFillType GetSpillFillType() const
2246 {
2247 return sf_type_;
2248 }
2249
2250 void SetSpillFillType(SpillFillType type)
2251 {
2252 sf_type_ = type;
2253 }
2254
2255 bool DumpInputs(std::ostream *out) const override;
2256
2257 Inst *Clone(const Graph *targetGraph) const override;
2258
2259 private:
2260 ArenaVector<SpillFillData> spill_fills_;
2261 SpillFillType sf_type_ {UNKNOWN};
2262 };
2263
2264 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2265 class ParameterInst : public Inst, public LocationDataMixin {
2266 public:
2267 using Inst::Inst;
2268
2269 explicit ParameterInst(Opcode /* unused */, uint16_t arg_number) : Inst(Opcode::Parameter), arg_number_(arg_number)
2270 {
2271 }
2272 uint16_t GetArgNumber() const
2273 {
2274 return arg_number_;
2275 }
2276
2277 void SetArgNumber(uint16_t arg_number)
2278 {
2279 arg_number_ = arg_number;
2280 }
2281
2282 bool DumpInputs(std::ostream *out) const override;
2283
2284 Inst *Clone(const Graph *targetGraph) const override;
2285
2286 private:
2287 uint16_t arg_number_ {0};
2288 };
2289
2290 inline bool IsZeroConstant(const Inst *inst)
2291 {
2292 return inst->IsConst() && inst->GetType() == DataType::INT64 && inst->CastToConstant()->GetIntValue() == 0;
2293 }
2294
IsZeroConstantOrNullPtr(const Inst *inst)2295 inline bool IsZeroConstantOrNullPtr(const Inst *inst)
2296 {
2297 return IsZeroConstant(inst);
2298 }
2299
2300 /**
2301 * Phi instruction
2302 */
2303 class PhiInst : public AnyTypeMixin<DynamicInputsInst> {
2304 public:
2305 using BaseInst = AnyTypeMixin<DynamicInputsInst>;
2306 using BaseInst::BaseInst;
2307 /// Get basic block corresponding to given input index. Returned pointer to basic block, can't be nullptr
2308 BasicBlock *GetPhiInputBb(unsigned index);
GetPhiInputBb(unsigned index) const2309 const BasicBlock *GetPhiInputBb(unsigned index) const
2310 {
2311 return (const_cast<PhiInst *>(this))->GetPhiInputBb(index);
2312 }
2313
GetPhiInputBbNum(unsigned index) const2314 uint32_t GetPhiInputBbNum(unsigned index) const
2315 {
2316 ASSERT(index < GetInputsCount());
2317 return GetDynamicOperands()->GetUser(index)->GetBbNum();
2318 }
2319
SetPhiInputBbNum(unsigned index, uint32_t bb_num)2320 void SetPhiInputBbNum(unsigned index, uint32_t bb_num)
2321 {
2322 ASSERT(index < GetInputsCount());
2323 GetDynamicOperands()->GetUser(index)->SetBbNum(bb_num);
2324 }
2325
2326 Inst *Clone(const Graph *targetGraph) const override
2327 {
2328 auto clone = DynamicInputsInst::Clone(targetGraph);
2329 clone->CastToPhi()->SetAnyType(GetAnyType());
2330 return clone;
2331 }
2332
GetAssumedAnyType()2333 AnyBaseType GetAssumedAnyType()
2334 {
2335 return GetAnyType();
2336 }
2337
2338 /// Get input instruction corresponding to the given basic block, can't be null.
2339 Inst *GetPhiInput(BasicBlock *bb);
2340 Inst *GetPhiDataflowInput(BasicBlock *bb);
2341 bool DumpInputs(std::ostream *out) const override;
2342
2343 // Get index of the given block in phi inputs
2344 size_t GetPredBlockIndex(const BasicBlock *block) const;
2345
2346 protected:
2347 using FlagIsLive = LastField::NextFlag;
2348 using LastField = FlagIsLive;
2349 };
2350
2351 /**
2352 * Immediate for SavaState:
2353 * value - constant value to be stored
2354 * vreg - virtual register number
2355 */
2356 struct SaveStateImm {
2357 uint64_t value;
2358 uint16_t vreg;
2359 DataType::Type type;
2360 bool is_acc;
2361 };
2362
2363 /**
2364 * Frame state saving instruction
2365 * Aims to save pbc registers before calling something that can raise exception
2366 */
2367 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2368 class SaveStateInst : public DynamicInputsInst {
2369 public:
2370 using DynamicInputsInst::DynamicInputsInst;
2371
2372 bool DumpInputs(std::ostream *out) const override;
2373
SetVirtualRegister(size_t index, VirtualRegister reg)2374 void SetVirtualRegister(size_t index, VirtualRegister reg)
2375 {
2376 static_assert(sizeof(reg) <= sizeof(uintptr_t), "Consider passing the register by reference");
2377 ASSERT(index < GetInputsCount());
2378 GetDynamicOperands()->GetUser(index)->SetVirtualRegister(reg);
2379 }
2380
GetVirtualRegister(size_t index) const2381 VirtualRegister GetVirtualRegister(size_t index) const
2382 {
2383 ASSERT(index < GetInputsCount());
2384 return GetDynamicOperands()->GetUser(index)->GetVirtualRegister();
2385 }
2386
Verify() const2387 bool Verify() const
2388 {
2389 for (size_t i {0}; i < GetInputsCount(); ++i) {
2390 if (static_cast<uint16_t>(GetVirtualRegister(i)) == VirtualRegister::INVALID) {
2391 return false;
2392 }
2393 }
2394 return true;
2395 }
2396
RemoveNumericInputs()2397 bool RemoveNumericInputs()
2398 {
2399 size_t idx = 0;
2400 size_t inputs_count = GetInputsCount();
2401 bool removed = false;
2402 while (idx < inputs_count) {
2403 auto input_inst = GetInput(idx).GetInst();
2404 if (DataType::IsTypeNumeric(input_inst->GetType())) {
2405 RemoveInput(idx);
2406 inputs_count--;
2407 removed = true;
2408 } else {
2409 idx++;
2410 }
2411 }
2412 return removed;
2413 }
2414
2415 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
2416 {
2417 ASSERT(index < GetInputsCount());
2418 return DataType::NO_TYPE;
2419 }
2420 auto GetMethod() const
2421 {
2422 return method_;
2423 }
2424 auto SetMethod(void *method)
2425 {
2426 method_ = method;
2427 }
2428
2429 void AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, bool is_acc);
2430
2431 const ArenaVector<SaveStateImm> *GetImmediates() const
2432 {
2433 return immediates_;
2434 }
2435
2436 const SaveStateImm &GetImmediate(size_t index) const
2437 {
2438 ASSERT(immediates_ != nullptr && index < immediates_->size());
2439 return (*immediates_)[index];
2440 }
2441
2442 void AllocateImmediates(ArenaAllocator *allocator, size_t size = 0);
2443
2444 size_t GetImmediatesCount() const
2445 {
2446 if (immediates_ == nullptr) {
2447 return 0;
2448 }
2449 return immediates_->size();
2450 }
2451
SetRootsRegMaskBit(size_t reg)2452 void SetRootsRegMaskBit(size_t reg)
2453 {
2454 ASSERT(reg < roots_regs_mask_.size());
2455 roots_regs_mask_.set(reg);
2456 }
2457
2458 void SetRootsStackMaskBit(size_t slot)
2459 {
2460 if (roots_stack_mask_ != nullptr) {
2461 roots_stack_mask_->SetBit(slot);
2462 }
2463 }
2464
GetRootsStackMask()2465 ArenaBitVector *GetRootsStackMask()
2466 {
2467 return roots_stack_mask_;
2468 }
2469
GetRootsRegsMask()2470 auto &GetRootsRegsMask()
2471 {
2472 return roots_regs_mask_;
2473 }
2474
CreateRootsStackMask(ArenaAllocator *allocator)2475 void CreateRootsStackMask(ArenaAllocator *allocator)
2476 {
2477 ASSERT(roots_stack_mask_ == nullptr);
2478 roots_stack_mask_ = allocator->New<ArenaBitVector>(allocator);
2479 CHECK_NOT_NULL(roots_stack_mask_);
2480 roots_stack_mask_->Reset();
2481 }
2482
2483 Inst *Clone(const Graph *targetGraph) const override;
2484 #ifndef NDEBUG
SetInputsWereDeleted()2485 void SetInputsWereDeleted()
2486 {
2487 SetField<FlagInputsWereDeleted>(true);
2488 }
2489
GetInputsWereDeleted()2490 bool GetInputsWereDeleted()
2491 {
2492 return GetField<FlagInputsWereDeleted>();
2493 }
2494 #endif
2495
2496 protected:
2497 #ifndef NDEBUG
2498 using FlagInputsWereDeleted = LastField::NextFlag;
2499 using LastField = FlagInputsWereDeleted;
2500 #endif
2501
2502 private:
2503 ArenaVector<SaveStateImm> *immediates_ {nullptr};
2504 void *method_ {nullptr};
2505 /// If instruction is in the inlined graph, this variable points to the inliner's call instruction.
2506 ArenaBitVector *roots_stack_mask_ {nullptr};
2507 std::bitset<BITS_PER_UINT32> roots_regs_mask_ {0};
2508 };
2509
2510 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2511 class IntrinsicInst : public InlinedInstMixin<DynamicInputsInst>, public InputTypesMixin {
2512 public:
2513 using Base = InlinedInstMixin<DynamicInputsInst>;
2514 using Base::Base;
2515 using IntrinsicId = RuntimeInterface::IntrinsicId;
2516
IntrinsicInst(Opcode opcode, IntrinsicId intrinsic_id)2517 IntrinsicInst(Opcode opcode, IntrinsicId intrinsic_id) : Base(opcode), intrinsic_id_(intrinsic_id) {}
2518
IntrinsicInst(Opcode opcode, DataType::Type type, uint32_t pc, IntrinsicId intrinsic_id)2519 IntrinsicInst(Opcode opcode, DataType::Type type, uint32_t pc, IntrinsicId intrinsic_id)
2520 : Base(opcode, type, pc), intrinsic_id_(intrinsic_id) {}
2521
GetIntrinsicId() const2522 IntrinsicId GetIntrinsicId() const
2523 {
2524 return intrinsic_id_;
2525 }
2526
SetIntrinsicId(IntrinsicId intrinsic_id)2527 void SetIntrinsicId(IntrinsicId intrinsic_id)
2528 {
2529 intrinsic_id_ = intrinsic_id;
2530 }
2531
2532 DataType::Type GetInputType(size_t index) const override
2533 {
2534 ASSERT(input_types_ != nullptr);
2535 ASSERT(index < input_types_->size());
2536 ASSERT(index < GetInputsCount());
2537 return (*input_types_)[index];
2538 }
2539
2540 const ArenaVector<uint32_t> &GetImms()
2541 {
2542 return *imms_;
2543 }
2544
2545 const ArenaVector<uint32_t> &GetImms() const
2546 {
2547 return *imms_;
2548 }
2549
2550 bool HasImms() const
2551 {
2552 return imms_ != nullptr;
2553 }
2554
2555 void AddImm(ArenaAllocator *allocator, uint32_t imm)
2556 {
2557 if (imms_ == nullptr) {
2558 imms_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
2559 CHECK_NOT_NULL(imms_);
2560 }
2561 imms_->push_back(imm);
2562 }
2563
2564 bool IsNativeCall() const;
2565
HasArgumentsOnStack() const2566 bool HasArgumentsOnStack() const
2567 {
2568 return GetField<ArgumentsOnStack>();
2569 }
2570
SetArgumentsOnStack()2571 void SetArgumentsOnStack()
2572 {
2573 SetField<ArgumentsOnStack>(true);
2574 }
2575
2576 Inst *Clone(const Graph *targetGraph) const override;
2577
CanBeInlined()2578 bool CanBeInlined()
2579 {
2580 return IsInlined();
2581 }
2582
SetRelocate()2583 void SetRelocate()
2584 {
2585 SetField<Relocate>(true);
2586 }
2587
GetRelocate() const2588 bool GetRelocate() const
2589 {
2590 return GetField<Relocate>();
2591 }
2592
2593 void DumpOpcode(std::ostream *out) const override;
2594
2595 protected:
2596 using ArgumentsOnStack = LastField::NextFlag;
2597 using Relocate = ArgumentsOnStack::NextFlag;
2598 using LastField = Relocate;
2599
2600 private:
2601 std::string GetIntrinsicOpcodeName() const;
2602
2603 IntrinsicId intrinsic_id_ {RuntimeInterface::IntrinsicId::COUNT};
2604 ArenaVector<uint32_t> *imms_ {nullptr}; // record imms appeared in intrinsics
2605 };
2606
2607 #include <ecma_intrinsics_enum.inl>
2608
2609 /**
2610 * Cmp instruction
2611 */
2612 class CmpInst : public InstWithOperandsType<FixedInputsInst2> {
2613 public:
2614 using BaseInst = InstWithOperandsType<FixedInputsInst2>;
2615 using BaseInst::BaseInst;
2616
IsFcmpg() const2617 bool IsFcmpg() const
2618 {
2619 ASSERT(DataType::IsFloatType(GetOperandsType()));
2620 return GetField<Fcmpg>();
2621 }
IsFcmpl() const2622 bool IsFcmpl() const
2623 {
2624 ASSERT(DataType::IsFloatType(GetOperandsType()));
2625 return !GetField<Fcmpg>();
2626 }
SetFcmpg()2627 void SetFcmpg()
2628 {
2629 ASSERT(DataType::IsFloatType(GetOperandsType()));
2630 SetField<Fcmpg>(true);
2631 }
SetFcmpg(bool v)2632 void SetFcmpg(bool v)
2633 {
2634 ASSERT(DataType::IsFloatType(GetOperandsType()));
2635 SetField<Fcmpg>(v);
2636 }
SetFcmpl()2637 void SetFcmpl()
2638 {
2639 ASSERT(DataType::IsFloatType(GetOperandsType()));
2640 SetField<Fcmpg>(false);
2641 }
SetFcmpl(bool v)2642 void SetFcmpl(bool v)
2643 {
2644 ASSERT(DataType::IsFloatType(GetOperandsType()));
2645 SetField<Fcmpg>(!v);
2646 }
2647
2648 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
2649 {
2650 ASSERT(index < GetInputsCount());
2651 return GetOperandsType();
2652 }
2653
2654 void SetVnObject(VnObject *vn_obj) override;
2655
2656 void DumpOpcode(std::ostream *out) const override;
2657
2658 Inst *Clone(const Graph *targetGraph) const override;
2659
2660 protected:
2661 using Fcmpg = LastField::NextFlag;
2662 using LastField = Fcmpg;
2663 };
2664
2665 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2666 class LoadFromPool : public NeedBarrierMixin<FixedInputsInst1>, public TypeIdMixin {
2667 public:
2668 using Base = NeedBarrierMixin<FixedInputsInst1>;
2669 using Base::Base;
2670
2671 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
2672 {
2673 ASSERT(index < GetInputsCount());
2674 return DataType::NO_TYPE;
2675 }
2676
2677 void DumpOpcode(std::ostream *out) const override;
2678
2679 Inst *Clone(const Graph *targetGraph) const override
2680 {
2681 auto clone = FixedInputsInst::Clone(targetGraph);
2682 static_cast<LoadFromPool *>(clone)->SetTypeId(GetTypeId());
2683 static_cast<LoadFromPool *>(clone)->SetMethod(GetMethod());
2684 return clone;
2685 }
2686 };
2687
2688 /**
2689 * Conditional jump instruction
2690 */
2691 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2692 class IfInst : public InstWithOperandsType<ConditionMixin<FixedInputsInst2>> {
2693 public:
2694 using Base = InstWithOperandsType<ConditionMixin<FixedInputsInst2>>;
2695 using Base::Base;
2696
IfInst(Opcode opcode, DataType::Type type, uint32_t pc, ConditionCode cc)2697 IfInst(Opcode opcode, DataType::Type type, uint32_t pc, ConditionCode cc) : Base(opcode, type, pc)
2698 {
2699 SetCc(cc);
2700 }
2701
2702 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
2703 {
2704 ASSERT(index < GetInputsCount());
2705 return GetOperandsType();
2706 }
2707
2708 void DumpOpcode(std::ostream *out) const override;
2709
2710 void SetVnObject(VnObject *vn_obj) override;
2711
2712 Inst *Clone(const Graph *targetGraph) const override;
2713
2714 void SetMethod(RuntimeInterface::MethodPtr method)
2715 {
2716 method_ = method;
2717 }
2718
2719 RuntimeInterface::MethodPtr GetMethod() const
2720 {
2721 return method_;
2722 }
2723
2724 private:
2725 RuntimeInterface::MethodPtr method_ {nullptr};
2726 };
2727
2728 /**
2729 * IfImm instruction with immediate
2730 */
2731 // NOLINTNEXTLINE(fuchsia-multiple-inheritance)
2732 class IfImmInst : public InstWithOperandsType<ConditionMixin<FixedInputsInst1>>, public ImmediateMixin {
2733 public:
2734 using Base = InstWithOperandsType<ConditionMixin<FixedInputsInst1>>;
2735 using Base::Base;
2736
2737 IfImmInst(Opcode opcode, DataType::Type type, uint32_t pc, ConditionCode cc, uint64_t imm)
2738 : Base(opcode, type, pc), ImmediateMixin(imm)
2739 {
2740 SetCc(cc);
2741 }
2742
2743 DataType::Type GetInputType([[maybe_unused]] size_t index) const override
2744 {
2745 ASSERT(index < GetInputsCount());
2746 return GetOperandsType();
2747 }
2748
2749 void DumpOpcode(std::ostream *out) const override;
2750 bool DumpInputs(std::ostream *out) const override;
2751 void SetVnObject(VnObject *vn_obj) override;
2752
2753 Inst *Clone(const Graph *targetGraph) const override
2754 {
2755 auto clone = FixedInputsInst::Clone(targetGraph);
2756 clone->CastToIfImm()->SetCc(GetCc());
2757 clone->CastToIfImm()->SetImm(GetImm());
2758 clone->CastToIfImm()->SetOperandsType(GetOperandsType());
2759 clone->CastToIfImm()->SetMethod(GetMethod());
2760 return clone;
2761 }
2762
2763 BasicBlock *GetEdgeIfInputTrue();
2764 BasicBlock *GetEdgeIfInputFalse();
2765
SetMethod(RuntimeInterface::MethodPtr method)2766 void SetMethod(RuntimeInterface::MethodPtr method)
2767 {
2768 method_ = method;
2769 }
2770
GetMethod() const2771 RuntimeInterface::MethodPtr GetMethod() const
2772 {
2773 return method_;
2774 }
2775
2776 private:
2777 size_t GetTrueInputEdgeIdx();
2778 RuntimeInterface::MethodPtr method_ {nullptr};
2779 };
2780
2781 /**
2782 * CatchPhiInst instruction
2783 */
2784 class CatchPhiInst : public DynamicInputsInst {
2785 public:
2786 using DynamicInputsInst::DynamicInputsInst;
2787
GetThrowableInsts() const2788 const ArenaVector<const Inst *> *GetThrowableInsts() const
2789 {
2790 return throw_insts_;
2791 }
2792
GetThrowableInst(size_t i) const2793 const Inst *GetThrowableInst(size_t i) const
2794 {
2795 ASSERT(throw_insts_ != nullptr && i < throw_insts_->size());
2796 return throw_insts_->at(i);
2797 }
2798
2799 void AppendThrowableInst(const Inst *inst);
2800 void ReplaceThrowableInst(const Inst *old_inst, const Inst *new_inst);
2801 void RemoveInput(unsigned index) override;
2802
IsAcc() const2803 bool IsAcc() const
2804 {
2805 return GetField<IsAccFlag>();
2806 }
2807
SetIsAcc()2808 void SetIsAcc()
2809 {
2810 SetField<IsAccFlag>(true);
2811 }
2812
2813 protected:
2814 using IsAccFlag = LastField::NextFlag;
2815 using LastField = IsAccFlag;
2816
2817 private:
GetThrowableInstIndex(const Inst *inst)2818 size_t GetThrowableInstIndex(const Inst *inst)
2819 {
2820 ASSERT(throw_insts_ != nullptr);
2821 auto it = std::find(throw_insts_->begin(), throw_insts_->end(), inst);
2822 ASSERT(it != throw_insts_->end());
2823 return std::distance(throw_insts_->begin(), it);
2824 }
2825
2826 private:
2827 ArenaVector<const Inst *> *throw_insts_ {nullptr};
2828 };
2829
2830 class TryInst : public FixedInputsInst0 {
2831 public:
2832 using FixedInputsInst0::FixedInputsInst0;
2833
2834 void AppendCatchTypeId(uint32_t id, uint32_t catch_edge_index);
2835
GetCatchTypeIds() const2836 const ArenaVector<uint32_t> *GetCatchTypeIds() const
2837 {
2838 return catch_type_ids_;
2839 }
2840
GetCatchEdgeIndexes() const2841 const ArenaVector<uint32_t> *GetCatchEdgeIndexes() const
2842 {
2843 return catch_edge_indexes_;
2844 }
2845
GetCatchTypeIdsCount() const2846 size_t GetCatchTypeIdsCount() const
2847 {
2848 return (catch_type_ids_ == nullptr ? 0 : catch_type_ids_->size());
2849 }
2850
2851 Inst *Clone(const Graph *targetGraph) const override;
2852
SetTryEndBlock(BasicBlock *try_end_bb)2853 void SetTryEndBlock(BasicBlock *try_end_bb)
2854 {
2855 try_end_bb_ = try_end_bb;
2856 }
2857
GetTryEndBlock() const2858 BasicBlock *GetTryEndBlock() const
2859 {
2860 return try_end_bb_;
2861 }
2862
2863 private:
2864 ArenaVector<uint32_t> *catch_type_ids_ {nullptr};
2865 ArenaVector<uint32_t> *catch_edge_indexes_ {nullptr};
2866 BasicBlock *try_end_bb_ {nullptr};
2867 };
2868
2869 TryInst *GetTryBeginInst(const BasicBlock *try_begin_bb);
2870
2871 template <typename InstType, typename... Args>
New(ArenaAllocator *allocator, Args &&... args)2872 InstType *Inst::New(ArenaAllocator *allocator, Args &&... args)
2873 {
2874 static_assert(alignof(InstType) >= alignof(uintptr_t));
2875 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-branch-clone)
2876 if constexpr (std::is_same_v<InstType, SpillFillInst>) {
2877 auto data = reinterpret_cast<uintptr_t>(allocator->Alloc(sizeof(InstType), DEFAULT_ALIGNMENT));
2878 CHECK(data != 0);
2879 return new (reinterpret_cast<void *>(data)) InstType(allocator, std::forward<Args>(args)...);
2880 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2881 } else if constexpr (InstType::INPUT_COUNT == 0) {
2882 auto data = reinterpret_cast<uintptr_t>(allocator->Alloc(sizeof(InstType), DEFAULT_ALIGNMENT));
2883 CHECK(data != 0);
2884 return new (reinterpret_cast<void *>(data)) InstType(std::forward<Args>(args)...);
2885 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
2886 } else if constexpr (InstType::INPUT_COUNT == MAX_STATIC_INPUTS) {
2887 constexpr size_t OPERANDS_SIZE = sizeof(DynamicOperands);
2888 static_assert((OPERANDS_SIZE % alignof(InstType)) == 0);
2889 auto data = reinterpret_cast<uintptr_t>(allocator->Alloc(OPERANDS_SIZE + sizeof(InstType), DEFAULT_ALIGNMENT));
2890 CHECK(data != 0);
2891 auto inst = new (reinterpret_cast<void *>(data + OPERANDS_SIZE)) InstType(std::forward<Args>(args)...);
2892 [[maybe_unused]] auto operands = new (reinterpret_cast<void *>(data)) DynamicOperands(allocator);
2893 static_cast<Inst *>(inst)->SetField<InputsCount>(InstType::INPUT_COUNT);
2894 return inst;
2895 } else { // NOLINT(readability-misleading-indentation)
2896 constexpr size_t OPERANDS_SIZE = sizeof(Operands<InstType::INPUT_COUNT>);
2897 constexpr auto ALIGNMENT {GetLogAlignment(alignof(Operands<InstType::INPUT_COUNT>))};
2898 static_assert((OPERANDS_SIZE % alignof(InstType)) == 0);
2899 auto data = reinterpret_cast<uintptr_t>(allocator->Alloc(OPERANDS_SIZE + sizeof(InstType), ALIGNMENT));
2900 CHECK(data != 0);
2901 auto inst = new (reinterpret_cast<void *>(data + OPERANDS_SIZE)) InstType(std::forward<Args>(args)...);
2902 auto operands = new (reinterpret_cast<void *>(data)) Operands<InstType::INPUT_COUNT>;
2903 static_cast<Inst *>(inst)->SetField<InputsCount>(InstType::INPUT_COUNT);
2904 unsigned idx = InstType::INPUT_COUNT - 1;
2905 for (auto &user : operands->users) {
2906 new (&user) User(true, idx--, InstType::INPUT_COUNT);
2907 }
2908 return inst;
2909 }
2910 }
2911
GetInput()2912 inline Inst *User::GetInput()
2913 {
2914 return GetInst()->GetInput(GetIndex()).GetInst();
2915 }
2916
GetInput() const2917 inline const Inst *User::GetInput() const
2918 {
2919 return GetInst()->GetInput(GetIndex()).GetInst();
2920 }
2921
operator <<(std::ostream &os, const Inst &inst)2922 inline std::ostream &operator<<(std::ostream &os, const Inst &inst)
2923 {
2924 inst.Dump(&os, false);
2925 return os;
2926 }
2927
2928 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2929 #define INST_DEF(opcode, base, ...) \
2930 inline const base *Inst::CastTo##opcode() const \
2931 { \
2932 ASSERT(GetOpcode() == Opcode::opcode); \
2933 return static_cast<const base *>(this); \
2934 }
2935 OPCODE_LIST(INST_DEF)
2936 #undef INST_DEF
2937
2938 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
2939 #define INST_DEF(opcode, base, ...) \
2940 inline base *Inst::CastTo##opcode() \
2941 { \
2942 ASSERT(GetOpcode() == Opcode::opcode); \
2943 return static_cast<base *>(this); \
2944 }
2945 OPCODE_LIST(INST_DEF)
2946 #undef INST_DEF
2947 } // namespace panda::compiler
2948
2949 #endif // COMPILER_OPTIMIZER_IR_INST_H
2950