1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #ifndef COMPILER_OPTIMIZER_IR_GRAPH_H
17 #define COMPILER_OPTIMIZER_IR_GRAPH_H
18
19 #include <algorithm>
20 #include <optional>
21 #include "compiler_events_gen.h"
22 #include "inst.h"
23 #include "marker.h"
24 #include "optimizer/pass_manager.h"
25 #include "utils/arena_containers.h"
26
27 namespace panda {
28 class Method;
29 class CodeAllocator;
30 } // namespace panda
31
32 namespace panda::compiler {
33 class BasicBlock;
34 class Graph;
35 class RuntimeInfo;
36 class PassManager;
37 class LivenessAnalyzer;
38 class DominatorsTree;
39 class Rpo;
40 class Loop;
41 class ParameterInfo;
42
43 /**
44 * Specifies graph compilation mode.
45 */
46 class GraphMode {
47 public:
GraphMode(uint32_t value)48 explicit GraphMode(uint32_t value) : value_(value) {}
49
50 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
51 #define DECLARE_GRAPH_MODE(name) \
52 static GraphMode name(bool set = true) \
53 { \
54 return GraphMode(Flag##name ::Encode(set)); \
55 } \
56 void Set##name(bool v) \
57 { \
58 Flag##name ::Set(v, &value_); \
59 } \
60 bool Is##name() const \
61 { \
62 return Flag##name ::Get(value_); \
63 }
64
65 DECLARE_GRAPH_MODE(Osr);
66 // The graph is used in BytecodeOptimizer mode
67 DECLARE_GRAPH_MODE(BytecodeOpt);
68 // The method from dynamic language
69 DECLARE_GRAPH_MODE(DynamicMethod);
70 // Graph will be compiled with native calling convention
71 DECLARE_GRAPH_MODE(Native);
72 // FastPath from compiled code to runtime
73 DECLARE_GRAPH_MODE(FastPath);
74 // Boundary frame is used for compiled code
75 DECLARE_GRAPH_MODE(Boundary);
76 // Graph will be compiled for calling inside interpreter
77 DECLARE_GRAPH_MODE(Interpreter);
78 // Graph will be compiled for interpreter main loop
79 DECLARE_GRAPH_MODE(InterpreterEntry);
80
81 #undef DECLARE_GRAPH_MODE
82
SupportManagedCode() const83 bool SupportManagedCode() const
84 {
85 return !IsNative() && !IsFastPath() && !IsBoundary() && !IsInterpreter() && !IsInterpreterEntry();
86 }
87
88 void Dump(std::ostream &stm);
89
90 private:
91 using FlagOsr = BitField<bool, 0, 1>;
92 using FlagBytecodeOpt = FlagOsr::NextFlag;
93 using FlagDynamicMethod = FlagBytecodeOpt::NextFlag;
94 using FlagNative = FlagDynamicMethod::NextFlag;
95 using FlagFastPath = FlagNative::NextFlag;
96 using FlagBoundary = FlagFastPath::NextFlag;
97 using FlagInterpreter = FlagBoundary::NextFlag;
98 using FlagInterpreterEntry = FlagInterpreter::NextFlag;
99
100 uint32_t value_ {0};
101
102 friend GraphMode operator|(GraphMode a, GraphMode b);
103 };
104
operator |(GraphMode a, GraphMode b)105 inline GraphMode operator|(GraphMode a, GraphMode b)
106 {
107 return GraphMode(a.value_ | b.value_);
108 }
109
110 using EncodeDataType = Span<uint8_t>;
111
112 class Graph final : public MarkerMgr {
113 public:
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch)114 explicit Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch)
115 : Graph(allocator, local_allocator, arch, false)
116 {
117 }
118
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool osr_mode)119 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool osr_mode)
120 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), osr_mode)
121 {
122 }
123
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool dynamic_method, bool bytecode_opt)124 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, bool dynamic_method, bool bytecode_opt)
125 : Graph(allocator, local_allocator, arch, nullptr, GetDefaultRuntime(), false, nullptr, dynamic_method,
126 bytecode_opt)
127 {
128 }
129
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method, RuntimeInterface *runtime, bool osr_mode)130 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
131 RuntimeInterface *runtime, bool osr_mode)
132 : Graph(allocator, local_allocator, arch, method, runtime, osr_mode, nullptr)
133 {
134 }
135
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method, RuntimeInterface *runtime, bool osr_mode, Graph *parent, bool dynamic_method = false, bool bytecode_opt = false)136 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
137 RuntimeInterface *runtime, bool osr_mode, Graph *parent, bool dynamic_method = false,
138 bool bytecode_opt = false)
139 : Graph(allocator, local_allocator, arch, method, runtime, parent,
140 GraphMode::Osr(osr_mode) | GraphMode::BytecodeOpt(bytecode_opt) |
141 GraphMode::DynamicMethod(dynamic_method))
142 {
143 }
144
Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method, RuntimeInterface *runtime, Graph *parent, GraphMode mode)145 Graph(ArenaAllocator *allocator, ArenaAllocator *local_allocator, Arch arch, RuntimeInterface::MethodPtr method,
146 RuntimeInterface *runtime, Graph *parent, GraphMode mode)
147 : ALLOCATOR(allocator),
148 LOCAL_ALLOCATOR(local_allocator),
149 arch_(arch),
150 vector_bb_(allocator->Adapter()),
151 throwable_insts_(allocator->Adapter()),
152 runtime_(runtime),
153 method_(method),
154 pass_manager_(this, parent != nullptr ? parent->GetPassManager() : nullptr),
155 event_writer_(runtime->GetClassNameFromMethod(method), runtime->GetMethodName(method)),
156 mode_(mode),
157 single_implementation_list_(allocator->Adapter()),
158 try_begin_blocks_(allocator->Adapter()),
159 spilled_constants_(allocator->Adapter()),
160 parent_graph_(parent)
161 {
162 SetNeedCleanup(true);
163 }
164
165 ~Graph() override;
166
CreateChildGraph(RuntimeInterface::MethodPtr method)167 Graph *CreateChildGraph(RuntimeInterface::MethodPtr method)
168 {
169 auto graph = GetAllocator()->New<Graph>(GetAllocator(), GetLocalAllocator(), GetArch(), method, GetRuntime(),
170 this, mode_);
171 return graph;
172 }
173
174 /// Get default runtime interface object
GetDefaultRuntime()175 static RuntimeInterface *GetDefaultRuntime()
176 {
177 static RuntimeInterface runtime_interface;
178 return &runtime_interface;
179 }
180
GetArch() const181 Arch GetArch() const
182 {
183 return arch_;
184 }
185
186 void AddBlock(BasicBlock *block);
187 #ifndef NDEBUG
188 void AddBlock(BasicBlock *block, uint32_t id);
189 #endif
190 void DisconnectBlock(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
191 void DisconnectBlockRec(BasicBlock *block, bool remove_last_inst = true, bool fix_dom_tree = true);
192
193 void EraseBlock(BasicBlock *block);
194 void RestoreBlock(BasicBlock *block);
195 // Remove empty block. Block must have one successor and no Phis.
196 void RemoveEmptyBlock(BasicBlock *block);
197
198 // Remove empty block. Block may have Phis and can't be a loop pre-header.
199 void RemoveEmptyBlockWithPhis(BasicBlock *block, bool irr_loop = false);
200
201 // Remove block predecessors.
202 void RemovePredecessors(BasicBlock *block, bool remove_last_inst = true);
203
204 // Remove block successors.
205 void RemoveSuccessors(BasicBlock *block);
206
207 // Remove unreachable blocks.
208 void RemoveUnreachableBlocks();
209
210 // get end block
GetEndBlock()211 BasicBlock *GetEndBlock()
212 {
213 return end_block_;
214 }
215
GetEndBlock() const216 BasicBlock *GetEndBlock() const
217 {
218 return end_block_;
219 }
220 // set end block
SetEndBlock(BasicBlock *end_block)221 void SetEndBlock(BasicBlock *end_block)
222 {
223 end_block_ = end_block;
224 }
HasEndBlock()225 bool HasEndBlock()
226 {
227 return end_block_ != nullptr;
228 }
229 // get start block
GetStartBlock()230 BasicBlock *GetStartBlock()
231 {
232 return start_block_;
233 }
GetStartBlock() const234 BasicBlock *GetStartBlock() const
235 {
236 return start_block_;
237 }
238 // set start block
SetStartBlock(BasicBlock *start_block)239 void SetStartBlock(BasicBlock *start_block)
240 {
241 start_block_ = start_block;
242 }
243 // get vector_bb_
GetVectorBlocks() const244 const ArenaVector<BasicBlock *> &GetVectorBlocks() const
245 {
246 return vector_bb_;
247 }
248
GetAliveBlocksCount() const249 size_t GetAliveBlocksCount() const
250 {
251 return std::count_if(vector_bb_.begin(), vector_bb_.end(), [](BasicBlock *block) { return block != nullptr; });
252 }
253
GetPassManager()254 PassManager *GetPassManager()
255 {
256 return &pass_manager_;
257 }
GetPassManager() const258 const PassManager *GetPassManager() const
259 {
260 return &pass_manager_;
261 }
262
263 const ArenaVector<BasicBlock *> &GetBlocksRPO() const;
264
265 const ArenaVector<BasicBlock *> &GetBlocksLinearOrder() const;
266
267 template <class Callback>
268 void VisitAllInstructions(Callback callback);
269
270 /// Main allocator for graph, all related to Graph data should be allocated via this allocator.
GetAllocator() const271 ArenaAllocator *GetAllocator() const
272 {
273 return ALLOCATOR;
274 }
275 /// Allocator for temproray usage, when allocated data is no longer needed after optimization/analysis finished.
GetLocalAllocator() const276 ArenaAllocator *GetLocalAllocator() const
277 {
278 return LOCAL_ALLOCATOR;
279 }
IsDFConstruct() const280 bool IsDFConstruct() const
281 {
282 return FlagDFConstruct::Get(bit_fields_);
283 }
SetDFConstruct()284 void SetDFConstruct()
285 {
286 FlagDFConstruct::Set(true, &bit_fields_);
287 }
288
IsAotMode() const289 bool IsAotMode() const
290 {
291 return false;
292 }
293
IsOfflineCompilationMode() const294 bool IsOfflineCompilationMode() const
295 {
296 return IsAotMode() || GetMode().IsInterpreter();
297 }
298
IsDefaultLocationsInit() const299 bool IsDefaultLocationsInit() const
300 {
301 return FlagDefaultLocationsInit::Get(bit_fields_);
302 }
SetDefaultLocationsInit()303 void SetDefaultLocationsInit()
304 {
305 FlagDefaultLocationsInit::Set(true, &bit_fields_);
306 }
307 #ifndef NDEBUG
IsRegAllocApplied() const308 bool IsRegAllocApplied() const
309 {
310 return FlagRegallocApplied::Get(bit_fields_);
311 }
SetRegAllocApplied()312 void SetRegAllocApplied()
313 {
314 FlagRegallocApplied::Set(true, &bit_fields_);
315 }
IsRegAccAllocApplied() const316 bool IsRegAccAllocApplied() const
317 {
318 return FlagRegaccallocApplied::Get(bit_fields_);
319 }
SetRegAccAllocApplied()320 void SetRegAccAllocApplied()
321 {
322 FlagRegaccallocApplied::Set(true, &bit_fields_);
323 }
IsInliningComplete() const324 bool IsInliningComplete() const
325 {
326 return FlagInliningComplete::Get(bit_fields_);
327 }
SetInliningComplete()328 void SetInliningComplete()
329 {
330 FlagInliningComplete::Set(true, &bit_fields_);
331 }
IsSchedulerComplete() const332 bool IsSchedulerComplete() const
333 {
334 return FlagSchedulerComplete::Get(bit_fields_);
335 }
SetSchedulerComplete()336 void SetSchedulerComplete()
337 {
338 FlagSchedulerComplete::Set(true, &bit_fields_);
339 }
IsLowLevelInstructionsEnabled() const340 bool IsLowLevelInstructionsEnabled() const
341 {
342 return FlagLowLevelInstnsEnabled::Get(bit_fields_);
343 }
SetLowLevelInstructionsEnabled()344 void SetLowLevelInstructionsEnabled()
345 {
346 FlagLowLevelInstnsEnabled::Set(true, &bit_fields_);
347 }
348 #else
IsRegAllocApplied() const349 bool IsRegAllocApplied() const
350 {
351 return false;
352 }
353 #endif // NDEBUG
354
SetData(EncodeDataType data)355 void SetData(EncodeDataType data)
356 {
357 data_ = data;
358 }
359
GetData() const360 EncodeDataType GetData() const
361 {
362 return data_;
363 }
364
GetData()365 EncodeDataType GetData()
366 {
367 return data_;
368 }
369
SetCodeInfo(Span<uint8_t> data)370 void SetCodeInfo(Span<uint8_t> data)
371 {
372 code_info_data_ = data.SubSpan<const uint8_t>(0, data.size());
373 }
374
GetCodeInfoData() const375 Span<const uint8_t> GetCodeInfoData() const
376 {
377 return code_info_data_;
378 }
379
DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const380 void DumpUsedRegs(std::ostream &out = std::cerr, const char *prefix = nullptr) const
381 {
382 if (prefix != nullptr) {
383 out << prefix;
384 }
385 out << "'\n used scalar regs: ";
386 if (used_regs_ != nullptr) {
387 for (unsigned i = 0; i < used_regs_->size(); ++i) {
388 if (used_regs_->at(i)) {
389 out << i << " ";
390 }
391 }
392 }
393 out << "\n used float regs: ";
394 if (used_regs_ != nullptr) {
395 for (unsigned i = 0; i < used_vregs_->size(); ++i) {
396 if (used_vregs_->at(i)) {
397 out << i << " ";
398 }
399 }
400 }
401 out << std::endl;
402 }
403
404 // Get registers mask which used in graph
405 template <DataType::Type reg_type>
GetUsedRegs() const406 ArenaVector<bool> *GetUsedRegs() const
407 {
408 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
409 if constexpr (reg_type == DataType::INT64) {
410 return used_regs_;
411 }
412 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
413 if constexpr (reg_type == DataType::FLOAT64) {
414 return used_vregs_;
415 }
416 UNREACHABLE();
417 return nullptr;
418 }
419
SetRegUsage(Register reg, DataType::Type type)420 void SetRegUsage(Register reg, DataType::Type type)
421 {
422 ASSERT(reg != INVALID_REG);
423 if (DataType::IsFloatType(type)) {
424 SetUsedReg<DataType::FLOAT64>(reg);
425 } else {
426 SetUsedReg<DataType::INT64>(reg);
427 }
428 }
429
430 template <DataType::Type reg_type>
SetUsedReg(Register reg)431 void SetUsedReg(Register reg)
432 {
433 ArenaVector<bool> *graph_regs = nullptr;
434 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
435 if constexpr (reg_type == DataType::INT64) {
436 graph_regs = used_regs_;
437 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
438 } else if constexpr (reg_type == DataType::FLOAT64) {
439 graph_regs = used_vregs_;
440 } else {
441 UNREACHABLE();
442 }
443 CHECK_NOT_NULL(graph_regs);
444 ASSERT(reg < graph_regs->size());
445 (*graph_regs)[reg] = true;
446 }
447
448 template <DataType::Type reg_type>
InitUsedRegs(const ArenaVector<bool> *used_regs)449 void InitUsedRegs(const ArenaVector<bool> *used_regs)
450 {
451 if (used_regs == nullptr) {
452 return;
453 }
454 ArenaVector<bool> *graph_regs = nullptr;
455 // NOLINTNEXTLINE(readability-braces-around-statements, bugprone-suspicious-semicolon)
456 if constexpr (reg_type == DataType::INT64) {
457 used_regs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
458 graph_regs = used_regs_;
459 // NOLINTNEXTLINE(readability-braces-around-statements, readability-misleading-indentation)
460 } else if constexpr (reg_type == DataType::FLOAT64) {
461 used_vregs_ = GetAllocator()->New<ArenaVector<bool>>(GetAllocator()->Adapter());
462 graph_regs = used_vregs_;
463 } else {
464 UNREACHABLE();
465 }
466 CHECK_NOT_NULL(graph_regs);
467 graph_regs->resize(used_regs->size());
468 std::copy(used_regs->begin(), used_regs->end(), graph_regs->begin());
469 }
470
GetStackSlotsCount() const471 uint32_t GetStackSlotsCount() const
472 {
473 return stack_slot_count_;
474 }
475
SetStackSlotsCount(uint32_t stack_slot_count)476 void SetStackSlotsCount(uint32_t stack_slot_count)
477 {
478 stack_slot_count_ = stack_slot_count;
479 }
480
UpdateStackSlotsCount(uint32_t stack_slot_count)481 void UpdateStackSlotsCount(uint32_t stack_slot_count)
482 {
483 stack_slot_count_ = std::max(stack_slot_count_, stack_slot_count);
484 }
485
486 uint32_t GetParametersSlotsCount() const;
487
GetExtSlotsStart() const488 uint32_t GetExtSlotsStart() const
489 {
490 return ext_stack_slot_;
491 }
492
SetExtSlotsStart(uint32_t ext_stack_slot)493 void SetExtSlotsStart(uint32_t ext_stack_slot)
494 {
495 ext_stack_slot_ = ext_stack_slot;
496 }
497
498 BasicBlock *CreateEmptyBlock(uint32_t guest_pc = INVALID_PC);
499 BasicBlock *CreateEmptyBlock(BasicBlock *base_block);
500 #ifndef NDEBUG
501 BasicBlock *CreateEmptyBlock(uint32_t id, uint32_t guest_pc);
502 #endif
503 BasicBlock *CreateStartBlock();
504 BasicBlock *CreateEndBlock(uint32_t guest_pc = INVALID_PC);
GetFirstConstInst()505 ConstantInst *GetFirstConstInst()
506 {
507 return first_const_inst_;
508 }
SetFirstConstInst(ConstantInst *const_inst)509 void SetFirstConstInst(ConstantInst *const_inst)
510 {
511 first_const_inst_ = const_inst;
512 }
513
GetNullPtrInst() const514 Inst *GetNullPtrInst() const
515 {
516 return nullptr_inst_;
517 }
HasNullPtrInst() const518 bool HasNullPtrInst() const
519 {
520 return nullptr_inst_ != nullptr;
521 }
UnsetNullPtrInst()522 void UnsetNullPtrInst()
523 {
524 ASSERT(HasNullPtrInst());
525 nullptr_inst_ = nullptr;
526 }
527
528 /// Find constant in the list, return nullptr if not found
529 ConstantInst *FindConstant(DataType::Type type, uint64_t value);
530 /// Find constant in the list or create new one and insert at the end
531 template <typename T>
532 ConstantInst *FindOrCreateConstant(T value);
533
534 /**
535 * Find constant that is equal to the given one specified by inst. If not found, add inst to the graph.
536 * @param inst Constant instruction to be added
537 * @return Found instruction or inst if not found
538 */
539 ConstantInst *FindOrAddConstant(ConstantInst *inst);
540
541 ParameterInst *AddNewParameter(uint16_t arg_number);
542
AddNewParameter(uint16_t arg_number, DataType::Type type)543 ParameterInst *AddNewParameter(uint16_t arg_number, DataType::Type type)
544 {
545 ParameterInst *param = AddNewParameter(arg_number);
546 param->SetType(type);
547 return param;
548 }
549
550 /*
551 * The function remove the ConstantInst from the graph list
552 * !NOTE ConstantInst isn't removed from BasicBlock list
553 */
554 void RemoveConstFromList(ConstantInst *const_inst);
555
GetSpilledConstant(ImmTableSlot slot)556 ConstantInst *GetSpilledConstant(ImmTableSlot slot)
557 {
558 ASSERT(static_cast<size_t>(slot) < spilled_constants_.size());
559 return spilled_constants_[slot];
560 }
561
AddSpilledConstant(ConstantInst *const_inst)562 ImmTableSlot AddSpilledConstant(ConstantInst *const_inst)
563 {
564 // Constant already in the table
565 auto current_slot = const_inst->GetImmTableSlot();
566 if (current_slot != INVALID_IMM_TABLE_SLOT) {
567 ASSERT(spilled_constants_[current_slot] == const_inst);
568 return current_slot;
569 }
570
571 auto count = spilled_constants_.size();
572 if (count >= MAX_NUM_IMM_SLOTS) {
573 return INVALID_IMM_TABLE_SLOT;
574 }
575 spilled_constants_.push_back(const_inst);
576 const_inst->SetImmTableSlot(count);
577 return ImmTableSlot(count);
578 }
579
FindSpilledConstantSlot(ConstantInst *const_inst) const580 ImmTableSlot FindSpilledConstantSlot(ConstantInst *const_inst) const
581 {
582 auto slot = std::find(spilled_constants_.begin(), spilled_constants_.end(), const_inst);
583 if (slot == spilled_constants_.end()) {
584 return INVALID_IMM_TABLE_SLOT;
585 }
586 return std::distance(spilled_constants_.begin(), slot);
587 }
588
GetSpilledConstantsCount() const589 size_t GetSpilledConstantsCount() const
590 {
591 return spilled_constants_.size();
592 }
593
HasAvailableConstantSpillSlots() const594 bool HasAvailableConstantSpillSlots() const
595 {
596 return GetSpilledConstantsCount() < MAX_NUM_IMM_SLOTS;
597 }
598
begin()599 auto begin() // NOLINT(readability-identifier-naming)
600 {
601 return vector_bb_.begin();
602 }
begin() const603 auto begin() const // NOLINT(readability-identifier-naming)
604 {
605 return vector_bb_.begin();
606 }
end()607 auto end() // NOLINT(readability-identifier-naming)
608 {
609 return vector_bb_.end();
610 }
end() const611 auto end() const // NOLINT(readability-identifier-naming)
612 {
613 return vector_bb_.end();
614 }
615
616 void Dump(std::ostream *out) const;
617
GetRootLoop()618 Loop *GetRootLoop()
619 {
620 return root_loop_;
621 }
GetRootLoop() const622 const Loop *GetRootLoop() const
623 {
624 return root_loop_;
625 }
626
SetRootLoop(Loop *root_loop)627 void SetRootLoop(Loop *root_loop)
628 {
629 root_loop_ = root_loop;
630 }
631
SetHasIrreducibleLoop(bool has_irr_loop)632 void SetHasIrreducibleLoop(bool has_irr_loop)
633 {
634 FlagIrredicibleLoop::Set(has_irr_loop, &bit_fields_);
635 }
636
SetHasInfiniteLoop(bool has_inf_loop)637 void SetHasInfiniteLoop(bool has_inf_loop)
638 {
639 FlagInfiniteLoop::Set(has_inf_loop, &bit_fields_);
640 }
641
SetHasFloatRegs()642 void SetHasFloatRegs()
643 {
644 FlagFloatRegs::Set(true, &bit_fields_);
645 }
646
647 bool HasLoop() const;
648 bool HasIrreducibleLoop() const;
649 bool HasInfiniteLoop() const;
650 bool HasFloatRegs() const;
651
652 /**
653 * Try-catch info
654 * Vector of begin try-blocks in order they were declared in the bytecode
655 */
AppendTryBeginBlock(const BasicBlock *block)656 void AppendTryBeginBlock(const BasicBlock *block)
657 {
658 try_begin_blocks_.push_back(block);
659 }
660
EraseTryBeginBlock(const BasicBlock *block)661 void EraseTryBeginBlock(const BasicBlock *block)
662 {
663 auto it = std::find(try_begin_blocks_.begin(), try_begin_blocks_.end(), block);
664 if (it == try_begin_blocks_.end()) {
665 ASSERT(false && "Trying to remove non try_begin block");
666 return;
667 }
668 try_begin_blocks_.erase(it);
669 }
670
GetTryBeginBlocks() const671 const auto &GetTryBeginBlocks() const
672 {
673 return try_begin_blocks_;
674 }
675
AppendThrowableInst(const Inst *inst, BasicBlock *catch_handler)676 void AppendThrowableInst(const Inst *inst, BasicBlock *catch_handler)
677 {
678 auto it = throwable_insts_.emplace(inst, GetAllocator()->Adapter()).first;
679 it->second.push_back(catch_handler);
680 }
681
IsInstThrowable(const Inst *inst) const682 bool IsInstThrowable(const Inst *inst) const
683 {
684 return throwable_insts_.count(inst) > 0;
685 }
686
687 void RemoveThrowableInst(const Inst *inst);
688 void ReplaceThrowableInst(Inst *old_inst, Inst *new_inst);
689
GetThrowableInstHandlers(const Inst *inst) const690 const auto &GetThrowableInstHandlers(const Inst *inst) const
691 {
692 ASSERT(IsInstThrowable(inst));
693 return throwable_insts_.at(inst);
694 }
695
ClearTryCatchInfo()696 void ClearTryCatchInfo()
697 {
698 throwable_insts_.clear();
699 try_begin_blocks_.clear();
700 }
701
702 void DumpThrowableInsts(std::ostream *out) const;
703
704 /**
705 * Run pass specified by template argument T.
706 * Optimization passes might take additional arguments that will passed to Optimization's constructor.
707 * Analyses can't take additional arguments.
708 * @tparam T Type of pass
709 * @param args Additional arguments for optimizations passes
710 * @return true if pass was successful
711 */
712 template <typename T, typename... Args>
RunPass(Args.... args)713 bool RunPass(Args... args)
714 {
715 ASSERT(GetPassManager());
716 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
717 }
718 template <typename T, typename... Args>
RunPass(Args.... args) const719 bool RunPass(Args... args) const
720 {
721 ASSERT(GetPassManager());
722 return pass_manager_.RunPass<T>(std::forward<Args>(args)...);
723 }
724
725 template <typename T>
RunPass(T *pass)726 bool RunPass(T *pass)
727 {
728 ASSERT(GetPassManager());
729 return pass_manager_.RunPass(pass, GetLocalAllocator()->GetAllocatedSize());
730 }
731
732 /**
733 * Get analysis instance.
734 * All analyses are reside in Graph object in composition relationship.
735 * @tparam T Type of analysis
736 * @return Reference to analysis instance
737 */
738 template <typename T>
GetAnalysis()739 T &GetAnalysis()
740 {
741 ASSERT(GetPassManager());
742 return GetPassManager()->GetAnalysis<T>();
743 }
744 template <typename T>
GetAnalysis() const745 const T &GetAnalysis() const
746 {
747 ASSERT(GetPassManager());
748 return pass_manager_.GetAnalysis<T>();
749 }
750
751 /**
752 * Same as GetAnalysis but additionaly checck that analysis in valid state.
753 * @tparam T Type of analysis
754 * @return Reference to analysis instance
755 */
756 template <typename T>
GetValidAnalysis()757 T &GetValidAnalysis()
758 {
759 RunPass<T>();
760 ASSERT(IsAnalysisValid<T>());
761 return GetAnalysis<T>();
762 }
763 template <typename T>
GetValidAnalysis() const764 const T &GetValidAnalysis() const
765 {
766 RunPass<T>();
767 ASSERT(IsAnalysisValid<T>());
768 return GetAnalysis<T>();
769 }
770
771 /**
772 * Return true if Analysis valid, false otherwise
773 * @tparam T Type of analysis
774 */
775 template <typename T>
IsAnalysisValid() const776 bool IsAnalysisValid() const
777 {
778 return GetAnalysis<T>().IsValid();
779 }
780
781 /**
782 * Reset valid state of specified analysis
783 * @tparam T Type of analysis
784 */
785 template <typename T>
InvalidateAnalysis()786 void InvalidateAnalysis()
787 {
788 ASSERT(GetPassManager());
789 GetPassManager()->GetAnalysis<T>().SetValid(false);
790 }
791
792 /// Accessors to the number of current instruction id.
GetCurrentInstructionId() const793 auto GetCurrentInstructionId() const
794 {
795 return instr_current_id_;
796 }
SetCurrentInstructionId(size_t v)797 auto SetCurrentInstructionId(size_t v)
798 {
799 instr_current_id_ = v;
800 }
801
802 /// RuntimeInterface accessors
GetRuntime() const803 RuntimeInterface *GetRuntime() const
804 {
805 return runtime_;
806 }
SetRuntime(RuntimeInterface *runtime)807 void SetRuntime(RuntimeInterface *runtime)
808 {
809 runtime_ = runtime;
810 }
GetMethod() const811 auto GetMethod() const
812 {
813 return method_;
814 }
SetMethod(RuntimeInterface::MethodPtr method)815 auto SetMethod(RuntimeInterface::MethodPtr method)
816 {
817 method_ = method;
818 }
819
820 void ResetParameterInfo();
821
GetEventWriter()822 EventWriter &GetEventWriter()
823 {
824 return event_writer_;
825 }
826
827 // clang-format off
828
829 /**
830 * Create instruction by opcode
831 */
CreateInst(Opcode opc) const832 [[nodiscard]] Inst* CreateInst(Opcode opc) const
833 {
834 switch (opc) {
835 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
836 #define INST_DEF(OPCODE, BASE, ...) \
837 case Opcode::OPCODE: { \
838 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE); \
839 inst->SetId(instr_current_id_++); \
840 return inst; \
841 }
842 OPCODE_LIST(INST_DEF)
843
844 #undef INST_DEF
845 default:
846 return nullptr;
847 }
848 }
849 /**
850 * Define creation methods for all opcodes
851 */
852 // NOLINTNEXTLINE(cppcoreguidelines-macro-usage)
853 #define INST_DEF(OPCODE, BASE, ...) \
854 template <typename... Args> \
855 [[nodiscard]] BASE* CreateInst##OPCODE(Args&&... args) const { \
856 auto inst = Inst::New<BASE>(ALLOCATOR, Opcode::OPCODE, std::forward<Args>(args)...); \
857 inst->SetId(instr_current_id_++); \
858 return inst; \
859 }
860 OPCODE_LIST(INST_DEF)
861
862 #undef INST_DEF
863 // clang-format on
864
GetBitFields()865 uint32_t GetBitFields()
866 {
867 return bit_fields_;
868 }
869
SetBitFields(uint32_t bit_fields)870 void SetBitFields(uint32_t bit_fields)
871 {
872 bit_fields_ = bit_fields;
873 }
874
NeedCleanup() const875 bool NeedCleanup() const
876 {
877 return FlagNeedCleanup::Get(bit_fields_);
878 }
879
SetNeedCleanup(bool v)880 void SetNeedCleanup(bool v)
881 {
882 FlagNeedCleanup::Set(v, &bit_fields_);
883 }
884
IsOsrMode() const885 bool IsOsrMode() const
886 {
887 return mode_.IsOsr();
888 }
889
IsBytecodeOptimizer() const890 bool IsBytecodeOptimizer() const
891 {
892 return mode_.IsBytecodeOpt();
893 }
894
IsDynamicMethod() const895 bool IsDynamicMethod() const
896 {
897 return mode_.IsDynamicMethod();
898 }
899
SupportManagedCode() const900 bool SupportManagedCode() const
901 {
902 return mode_.SupportManagedCode();
903 }
904
GetMode() const905 GraphMode GetMode() const
906 {
907 return mode_;
908 }
909
SetMode(GraphMode mode)910 void SetMode(GraphMode mode)
911 {
912 mode_ = mode;
913 }
914
915 #ifndef NDEBUG
GetCompilerMode()916 compiler::inst_modes::Mode GetCompilerMode()
917 {
918 if (IsBytecodeOptimizer()) {
919 return compiler::inst_modes::BYTECODE_OPT;
920 }
921 if (SupportManagedCode()) {
922 return compiler::inst_modes::JIT_AOT;
923 }
924 return compiler::inst_modes::IRTOC;
925 }
926 #endif
927
AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)928 void AddSingleImplementationMethod(RuntimeInterface::MethodPtr method)
929 {
930 single_implementation_list_.push_back(method);
931 }
932
SetDynamicMethod()933 void SetDynamicMethod()
934 {
935 mode_.SetDynamicMethod(true);
936 }
937
GetSingleImplementationList()938 auto &GetSingleImplementationList()
939 {
940 return single_implementation_list_;
941 }
942
GetParentGraph()943 Graph *GetParentGraph()
944 {
945 return parent_graph_;
946 }
947
GetOutermostParentGraph()948 Graph *GetOutermostParentGraph()
949 {
950 auto graph = this;
951 while (graph->GetParentGraph() != nullptr) {
952 graph = graph->GetParentGraph();
953 }
954 return graph;
955 }
956
SetVRegsCount(size_t count)957 void SetVRegsCount(size_t count)
958 {
959 vregs_count_ = count;
960 }
961
GetVRegsCount() const962 size_t GetVRegsCount() const
963 {
964 return vregs_count_;
965 }
966
967 int64_t GetBranchCounter(const BasicBlock *block, bool true_succ);
968
969 /**
970 * This class provides methods for ranged-based `for` loop over all parameters in the graph.
971 */
972 class ParameterList {
973 public:
974 class Iterator {
975 public:
Iterator(Inst *inst)976 explicit Iterator(Inst *inst) : inst_(inst) {}
977
operator ++()978 Iterator &operator++()
979 {
980 for (inst_ = inst_->GetNext(); inst_ != nullptr && inst_->GetOpcode() != Opcode::Parameter;
981 inst_ = inst_->GetNext()) {
982 }
983 return *this;
984 }
operator !=(const Iterator &other)985 bool operator!=(const Iterator &other)
986 {
987 return inst_ != other.inst_;
988 }
operator *()989 Inst *operator*()
990 {
991 return inst_;
992 }
operator ->()993 Inst *operator->()
994 {
995 return inst_;
996 }
997
998 private:
999 Inst *inst_ {nullptr};
1000 };
1001
ParameterList(const Graph *graph)1002 explicit ParameterList(const Graph *graph) : graph_(graph) {}
1003
1004 // NOLINTNEXTLINE(readability-identifier-naming)
1005 Iterator begin();
1006 // NOLINTNEXTLINE(readability-identifier-naming)
end()1007 static Iterator end()
1008 {
1009 return Iterator(nullptr);
1010 }
1011
1012 private:
1013 const Graph *graph_ {nullptr};
1014 };
1015
1016 /**
1017 * Get list of all parameters
1018 * @return instance of the ParameterList class
1019 */
GetParameters() const1020 ParameterList GetParameters() const
1021 {
1022 return ParameterList(this);
1023 }
1024
1025 void InitDefaultLocations();
1026
1027 private:
1028 void AddConstInStartBlock(ConstantInst *const_inst);
1029
1030 NO_MOVE_SEMANTIC(Graph);
1031 NO_COPY_SEMANTIC(Graph);
1032
1033 private:
1034 ArenaAllocator *const ALLOCATOR;
1035 ArenaAllocator *const LOCAL_ALLOCATOR;
1036
1037 Arch arch_ {RUNTIME_ARCH};
1038
1039 // List of blocks in insertion order.
1040 ArenaVector<BasicBlock *> vector_bb_;
1041 BasicBlock *start_block_ {nullptr};
1042 BasicBlock *end_block_ {nullptr};
1043
1044 Loop *root_loop_ {nullptr};
1045
1046 uint32_t bit_fields_ {0};
1047 using FlagDFConstruct = BitField<bool, 0, 1>;
1048 using FlagNeedCleanup = FlagDFConstruct::NextFlag;
1049 using FlagIrredicibleLoop = FlagNeedCleanup::NextFlag;
1050 using FlagInfiniteLoop = FlagIrredicibleLoop::NextFlag;
1051 using FlagFloatRegs = FlagInfiniteLoop::NextFlag;
1052 using FlagDefaultLocationsInit = FlagFloatRegs::NextFlag;
1053 #ifndef NDEBUG
1054 using FlagRegallocApplied = FlagDefaultLocationsInit::NextFlag;
1055 using FlagRegaccallocApplied = FlagRegallocApplied::NextFlag;
1056 using FlagInliningComplete = FlagRegaccallocApplied::NextFlag;
1057 using FlagSchedulerComplete = FlagInliningComplete::NextFlag;
1058 using FlagLowLevelInstnsEnabled = FlagSchedulerComplete::NextFlag;
1059 #endif // NDEBUG
1060
1061 // codegen data
1062 EncodeDataType data_;
1063 Span<const uint8_t> code_info_data_;
1064 ArenaVector<bool> *used_regs_ {nullptr};
1065 ArenaVector<bool> *used_vregs_ {nullptr};
1066
1067 // TODO (a.popov) Replace by ArenaMap from throwable_inst* to try_inst*
1068 ArenaMap<const Inst *, ArenaVector<BasicBlock *>> throwable_insts_;
1069
1070 mutable size_t instr_current_id_ {0};
1071 // first constant instruction in graph !TODO rewrite it to hash-map
1072 ConstantInst *first_const_inst_ {nullptr};
1073 Inst *nullptr_inst_ {nullptr};
1074
1075 RuntimeInterface *runtime_ {nullptr};
1076 RuntimeInterface::MethodPtr method_ {nullptr};
1077
1078 ParameterInfo *param_info_ {nullptr};
1079
1080 mutable PassManager pass_manager_;
1081 EventWriter event_writer_;
1082
1083 GraphMode mode_;
1084
1085 ArenaVector<RuntimeInterface::MethodPtr> single_implementation_list_;
1086 ArenaVector<const BasicBlock *> try_begin_blocks_;
1087 ArenaVector<ConstantInst *> spilled_constants_;
1088 // Graph that inlines this graph
1089 Graph *parent_graph_ {nullptr};
1090 // Number of used stack slots
1091 uint32_t stack_slot_count_ {0};
1092 // Number of used stack slots for parameters
1093 uint32_t param_slots_count_ {0};
1094 // First language extension slot
1095 uint32_t ext_stack_slot_ {0};
1096 // Number of the virtual registers used in the compiled method (inlined methods aren't included).
1097 uint32_t vregs_count_ {0};
1098 };
1099
1100 class MarkerHolder {
1101 public:
1102 NO_COPY_SEMANTIC(MarkerHolder);
1103 NO_MOVE_SEMANTIC(MarkerHolder);
1104
MarkerHolder(const Graph *graph)1105 explicit MarkerHolder(const Graph *graph) : graph_(graph), marker_(graph->NewMarker())
1106 {
1107 ASSERT(marker_ != UNDEF_MARKER);
1108 }
1109
~MarkerHolder()1110 ~MarkerHolder()
1111 {
1112 graph_->EraseMarker(marker_);
1113 }
1114
GetMarker()1115 Marker GetMarker()
1116 {
1117 return marker_;
1118 }
1119
1120 private:
1121 const Graph *graph_;
1122 Marker marker_ {UNDEF_MARKER};
1123 };
1124
1125 template <typename T>
FindOrCreateConstant(T value)1126 ConstantInst *Graph::FindOrCreateConstant(T value)
1127 {
1128 bool is_support_int32 = IsBytecodeOptimizer();
1129 if (first_const_inst_ == nullptr) {
1130 first_const_inst_ = CreateInstConstant(value, is_support_int32);
1131 AddConstInStartBlock(first_const_inst_);
1132 return first_const_inst_;
1133 }
1134 ConstantInst *current_const = first_const_inst_;
1135 ConstantInst *prev_const = nullptr;
1136 while (current_const != nullptr) {
1137 if (current_const->IsEqualConst(value, is_support_int32)) {
1138 return current_const;
1139 }
1140 prev_const = current_const;
1141 current_const = current_const->GetNextConst();
1142 }
1143 ASSERT(prev_const != nullptr);
1144 auto *new_const = CreateInstConstant(value, is_support_int32);
1145 AddConstInStartBlock(new_const);
1146
1147 prev_const->SetNextConst(new_const);
1148 return new_const;
1149 }
1150
1151 void InvalidateBlocksOrderAnalyzes(Graph *graph);
1152 void MarkLoopExits(const Graph *graph, Marker marker);
1153 void RemovePredecessorUpdateDF(BasicBlock *block, BasicBlock *rm_pred);
1154 std::string GetMethodFullName(const Graph *graph, RuntimeInterface::MethodPtr method);
1155 } // namespace panda::compiler
1156
1157 #endif // COMPILER_OPTIMIZER_IR_GRAPH_H
1158