1/**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16#include "inst.h"
17#include "graph.h"
18#include "graph_visitor.h"
19#include "optimizer/optimizations/vn.h"
20
21namespace panda::compiler {
22
23void Inst::ReserveInputs(size_t capacity)
24{
25    ASSERT(IsOperandsDynamic());
26    GetDynamicOperands()->Reallocate(capacity);
27}
28
29Inst *User::GetInst()
30{
31    if (UNLIKELY(IsDynamic())) {
32        // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
33        return *reinterpret_cast<Inst **>(this + GetIndex() + 1);
34    }
35    auto p = reinterpret_cast<uintptr_t>(this);
36    p += (GetIndex() + 1) * sizeof(User);
37
38    auto inputs_count {SizeField::Decode(properties_)};
39    p += (inputs_count + Input::GetPadding(RUNTIME_ARCH, inputs_count)) * sizeof(Input);
40    return reinterpret_cast<Inst *>(p);
41}
42
43void Inst::InsertBefore(Inst *inst)
44{
45    ASSERT(bb_ != nullptr);
46    bb_->InsertBefore(inst, this);
47}
48
49void Inst::InsertAfter(Inst *inst)
50{
51    ASSERT(bb_ != nullptr);
52    bb_->InsertAfter(inst, this);
53}
54
55void DynamicOperands::Reallocate([[maybe_unused]] size_t new_capacity /* =0 */)
56{
57    if (new_capacity == 0) {
58        constexpr auto IMM_2 = 2;
59        new_capacity = (((capacity_ != 0U) ? capacity_ : 1U) << 1U) + IMM_2;
60    } else if (new_capacity <= capacity_) {
61        return;
62    }
63    auto size = new_capacity * (sizeof(User) + sizeof(Inst *)) + sizeof(Inst *);
64    auto new_stor = reinterpret_cast<uintptr_t>(allocator_->Alloc(size));
65    CHECK(new_stor != 0);
66
67    auto owner_inst {GetOwnerInst()};
68    // Set pointer to owned instruction into new storage NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
69    *reinterpret_cast<Inst **>(reinterpret_cast<User *>(new_stor) + new_capacity) = owner_inst;
70
71    if (users_ == nullptr) {
72        users_ = reinterpret_cast<User *>(new_stor);
73        capacity_ = new_capacity;
74        return;
75    }
76    Input *old_inputs = Inputs();
77    // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
78    auto *new_inputs = reinterpret_cast<Input *>(new_stor + sizeof(User) * new_capacity) + 1;
79
80    for (size_t i = 0; i < size_; i++) {
81        Inst *old_input = old_inputs[i].GetInst();  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
82        ASSERT(old_input);
83        // Initialize new User in container. Since users are placed from end of array, i.e. zero index element
84        // will be at the end of array, we need to add capacity and substitute index.
85        // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
86        User *new_user = new (reinterpret_cast<User *>(new_stor) + new_capacity - i - 1) User(false, i, new_capacity);
87        auto old_user {GetUser(i)};
88        if (owner_inst->IsSaveState()) {
89            new_user->SetVirtualRegister(old_user->GetVirtualRegister());
90        } else if (owner_inst->IsPhi()) {
91            new_user->SetBbNum(old_user->GetBbNum());
92        }
93        old_input->RemoveUser(old_user);
94        old_input->AddUser(new_user);
95        new_inputs[i] = Input(old_input);  // NOLINT(cppcoreguidelines-pro-bounds-pointer-arithmetic)
96    }
97    capacity_ = new_capacity;
98    users_ = reinterpret_cast<User *>(new_stor);
99}
100
101unsigned DynamicOperands::Append(Inst *inst)
102{
103    ASSERT(capacity_ >= size_);
104    if (capacity_ == size_) {
105        Reallocate();
106    }
107    SetInput(size_, Input(inst));
108    // NOLINTNEXTLINE(cppcoreguidelines-pro-bounds-pointer-arithmetic)
109    new (users_ + capacity_ - size_ - 1) User(false, size_, capacity_);
110    auto user {GetUser(size_)};
111    if (GetOwnerInst()->IsPhi()) {
112        user->SetBbNum(size_);
113    }
114    inst->AddUser(user);
115    return size_++;
116}
117
118void DynamicOperands::Remove(unsigned index)
119{
120    size_--;
121    auto *curr_input = GetInput(index)->GetInst();
122    if (curr_input->GetBasicBlock() != nullptr && curr_input->HasUsers()) {
123        curr_input->RemoveUser(GetUser(index));
124    }
125
126    auto bb_num {GetUser(index)->GetBbNum()};
127    auto owner_inst {GetOwnerInst()};
128
129    if (index != size_) {
130        auto *last_input = GetInput(size_)->GetInst();
131        if (last_input->HasUsers()) {
132            last_input->RemoveUser(GetUser(size_));
133            last_input->AddUser(GetUser(index));
134        }
135        SetInput(index, *GetInput(size_));
136        if (owner_inst->IsSaveState()) {
137            GetUser(index)->SetVirtualRegister(GetUser(size_)->GetVirtualRegister());
138        } else if (owner_inst->IsPhi()) {
139            GetUser(index)->SetBbNum(GetUser(size_)->GetBbNum());
140        }
141    }
142
143    if (owner_inst->IsPhi()) {
144        for (size_t i {0}; i < size_; ++i) {
145            if (GetUser(i)->GetBbNum() == size_) {
146                GetUser(i)->SetBbNum(bb_num);
147                break;
148            }
149        }
150    }
151}
152
153void CompareInst::SetVnObject(VnObject *vn_obj)
154{
155    vn_obj->Add(static_cast<uint32_t>(GetCc()));
156}
157
158void IfInst::SetVnObject(VnObject *vn_obj)
159{
160    vn_obj->Add(static_cast<uint32_t>(GetCc()));
161}
162
163void IfImmInst::SetVnObject(VnObject *vn_obj)
164{
165    vn_obj->Add(static_cast<uint32_t>(GetCc()));
166}
167
168void CmpInst::SetVnObject(VnObject *vn_obj)
169{
170    if (DataType::IsFloatType(GetOperandsType())) {
171        vn_obj->Add(static_cast<uint32_t>(IsFcmpg()));
172    }
173    vn_obj->Add(static_cast<uint32_t>(GetInputType(0)));
174}
175
176BasicBlock *PhiInst::GetPhiInputBb(unsigned index)
177{
178    ASSERT(index < GetInputsCount());
179
180    auto bb_num {GetPhiInputBbNum(index)};
181    ASSERT(bb_num < GetBasicBlock()->GetPredsBlocks().size());
182    return GetBasicBlock()->GetPredsBlocks()[bb_num];
183}
184
185Inst *PhiInst::GetPhiInput(BasicBlock *bb)
186{
187    auto index = GetPredBlockIndex(bb);
188    ASSERT(index < GetInputs().size());
189    return GetInput(index).GetInst();
190}
191
192Inst *PhiInst::GetPhiDataflowInput(BasicBlock *bb)
193{
194    auto index = GetPredBlockIndex(bb);
195    ASSERT(index < GetInputs().size());
196    return GetDataFlowInput(index);
197}
198
199size_t PhiInst::GetPredBlockIndex(const BasicBlock *block) const
200{
201    for (size_t i {0}; i < GetInputsCount(); ++i) {
202        if (GetPhiInputBb(i) == block) {
203            return i;
204        }
205    }
206    UNREACHABLE();
207}
208
209template <Opcode opc, size_t input_idx>
210Inst *SkipInstructions(Inst *input_inst)
211{
212    // NOLINTNEXTLINE(readability-magic-numbers)
213    for (Opcode opcode = input_inst->GetOpcode(); opcode == opc; opcode = input_inst->GetOpcode()) {
214        input_inst = input_inst->GetInput(input_idx).GetInst();
215    }
216    return input_inst;
217}
218/*
219 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair, LoadArrayI, StoreArrayI, LoadArrayPairI,
220 * StoreArrayPairI, LenArray, LoadObject, StoreObject, CallVirtual, Monitor with NullCheck input the dataflow user
221 * is object, which is the first input of NullCheck instruction.
222 * For instructions LoadArray, StoreArray, LoadArrayPair, StoreArrayPair with BoundsCheck input the dataflow user is
223 * array index, which is the second input of BoundsCheck instruction
224 * For instructions Div and Mod with ZeroCheck input the dataflow user is the first input of ZeroCheck
225 */
226Inst *Inst::GetDataFlowInput(Inst *input_inst) const
227{
228    return input_inst;
229}
230
231bool Inst::IsPrecedingInSameBlock(const Inst *other) const
232{
233    ASSERT(other != nullptr && GetBasicBlock() == other->GetBasicBlock());
234    if (this == other) {
235        return true;
236    }
237    auto next = GetNext();
238    while (next != nullptr) {
239        if (next == other) {
240            return true;
241        }
242        next = next->GetNext();
243    }
244    return false;
245}
246
247bool Inst::IsDominate(const Inst *other) const
248{
249    ASSERT(other != nullptr);
250    if (this == other) {
251        return true;
252    }
253    auto this_bb = GetBasicBlock();
254    auto other_bb = other->GetBasicBlock();
255    return this_bb == other_bb ? IsPrecedingInSameBlock(other) : this_bb->IsDominate(other_bb);
256}
257
258bool Inst::InSameBlockOrDominate(const Inst *other) const
259{
260    return GetBasicBlock() == other->GetBasicBlock() || IsDominate(other);
261}
262
263Inst *Inst::Clone(const Graph *targetGraph) const
264{
265    ASSERT(targetGraph != nullptr);
266    auto clone = targetGraph->CreateInst(GetOpcode());
267    CHECK_NOT_NULL(clone);
268    clone->bit_fields_ = GetAllFields();
269    clone->pc_ = GetPc();
270#ifndef NDEBUG
271    clone->SetDstReg(GetDstReg());
272#endif
273    if (IsOperandsDynamic()) {
274        clone->ReserveInputs(GetInputsCount());
275    }
276    return clone;
277}
278
279#if PANDA_TARGET_MACOS
280template class FixedInputsInst<0>;
281template class FixedInputsInst<1>;
282template class FixedInputsInst<2U>;
283#endif
284
285Inst *SpillFillInst::Clone(const Graph *targetGraph) const
286{
287    auto clone = FixedInputsInst::Clone(targetGraph)->CastToSpillFill();
288    for (auto spill_fill : spill_fills_) {
289        clone->AddSpillFill(spill_fill);
290    }
291    return clone;
292}
293
294Inst *CompareInst::Clone(const Graph *targetGraph) const
295{
296    auto clone = FixedInputsInst::Clone(targetGraph);
297    clone->CastToCompare()->SetCc(GetCc());
298    clone->CastToCompare()->SetOperandsType(GetOperandsType());
299    return clone;
300}
301
302Inst *CmpInst::Clone(const Graph *targetGraph) const
303{
304    auto clone = FixedInputsInst::Clone(targetGraph);
305    clone->CastToCmp()->SetOperandsType(GetOperandsType());
306    return clone;
307}
308
309Inst *IfInst::Clone(const Graph *targetGraph) const
310{
311    auto clone = FixedInputsInst::Clone(targetGraph);
312    static_cast<IfInst *>(clone)->SetCc(GetCc());
313    static_cast<IfInst *>(clone)->SetOperandsType(GetOperandsType());
314    static_cast<IfInst *>(clone)->SetMethod(GetMethod());
315    return clone;
316}
317
318Inst *IntrinsicInst::Clone(const Graph *targetGraph) const
319{
320    ASSERT(targetGraph != nullptr);
321    auto intrinsicClone = Inst::Clone(targetGraph)->CastToIntrinsic();
322    intrinsicClone->SetIntrinsicId(GetIntrinsicId());
323    CloneTypes(targetGraph->GetAllocator(), intrinsicClone);
324    if (HasImms()) {
325        for (auto imm : GetImms()) {
326            intrinsicClone->AddImm(targetGraph->GetAllocator(), imm);
327        }
328    }
329    return intrinsicClone;
330}
331
332Inst *ConstantInst::Clone(const Graph *targetGraph) const
333{
334    Inst *new_cnst = nullptr;
335    bool is_support_int32 = GetBasicBlock()->GetGraph()->IsBytecodeOptimizer();
336    switch (GetType()) {
337        case DataType::INT32:
338            new_cnst = targetGraph->CreateInstConstant(static_cast<int32_t>(GetIntValue()), is_support_int32);
339            break;
340        case DataType::INT64:
341            new_cnst = targetGraph->CreateInstConstant(GetIntValue(), is_support_int32);
342            break;
343        case DataType::FLOAT32:
344            new_cnst = targetGraph->CreateInstConstant(GetFloatValue(), is_support_int32);
345            break;
346        case DataType::FLOAT64:
347            new_cnst = targetGraph->CreateInstConstant(GetDoubleValue(), is_support_int32);
348            break;
349        case DataType::ANY:
350            new_cnst = targetGraph->CreateInstConstant(GetRawValue(), is_support_int32);
351            new_cnst->SetType(DataType::ANY);
352            break;
353        default:
354            UNREACHABLE();
355    }
356#ifndef NDEBUG
357    new_cnst->SetDstReg(GetDstReg());
358#endif
359    return new_cnst;
360}
361
362Inst *ParameterInst::Clone(const Graph *targetGraph) const
363{
364    auto clone = Inst::Clone(targetGraph)->CastToParameter();
365    clone->SetArgNumber(GetArgNumber());
366    clone->SetLocationData(GetLocationData());
367    return clone;
368}
369
370Inst *SaveStateInst::Clone(const Graph *targetGraph) const
371{
372    auto clone = static_cast<SaveStateInst *>(Inst::Clone(targetGraph));
373    if (GetImmediatesCount() > 0) {
374        clone->AllocateImmediates(targetGraph->GetAllocator(), GetImmediatesCount());
375        std::copy(immediates_->begin(), immediates_->end(), clone->immediates_->begin());
376    }
377    clone->method_ = method_;
378    return clone;
379}
380
381void SaveStateInst::AppendImmediate(uint64_t imm, uint16_t vreg, DataType::Type type, bool is_acc)
382{
383    if (immediates_ == nullptr) {
384        ASSERT(GetBasicBlock() != nullptr);
385        AllocateImmediates(GetBasicBlock()->GetGraph()->GetAllocator(), 0);
386    }
387    immediates_->emplace_back(SaveStateImm {imm, vreg, type, is_acc});
388}
389
390void SaveStateInst::AllocateImmediates(ArenaAllocator *allocator, size_t size)
391{
392    immediates_ = allocator->New<ArenaVector<SaveStateImm>>(allocator->Adapter());
393    CHECK_NOT_NULL(immediates_);
394    immediates_->resize(size);
395}
396
397void TryInst::AppendCatchTypeId(uint32_t id, uint32_t catch_edge_index)
398{
399    if (catch_type_ids_ == nullptr) {
400        ASSERT(catch_edge_indexes_ == nullptr);
401        ASSERT(GetBasicBlock() != nullptr);
402        auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
403        catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
404        catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
405        CHECK_NOT_NULL(catch_type_ids_);
406        CHECK_NOT_NULL(catch_edge_indexes_);
407    }
408    catch_type_ids_->push_back(id);
409    catch_edge_indexes_->push_back(catch_edge_index);
410}
411
412void CatchPhiInst::AppendThrowableInst(const Inst *inst)
413{
414    if (throw_insts_ == nullptr) {
415        ASSERT(GetBasicBlock() != nullptr);
416        auto allocator = GetBasicBlock()->GetGraph()->GetAllocator();
417        throw_insts_ = allocator->New<ArenaVector<const Inst *>>(allocator->Adapter());
418        CHECK_NOT_NULL(throw_insts_);
419    }
420    throw_insts_->push_back(inst);
421}
422
423void CatchPhiInst::ReplaceThrowableInst(const Inst *old_inst, const Inst *new_inst)
424{
425    auto index = GetThrowableInstIndex(old_inst);
426    throw_insts_->at(index) = new_inst;
427}
428
429void CatchPhiInst::RemoveInput(unsigned index)
430{
431    Inst::RemoveInput(index);
432    if (throw_insts_ != nullptr) {
433        throw_insts_->at(index) = throw_insts_->back();
434        throw_insts_->pop_back();
435    }
436}
437
438Inst *TryInst::Clone(const Graph *targetGraph) const
439{
440    auto clone = FixedInputsInst::Clone(targetGraph)->CastToTry();
441    if (auto ids_count = this->GetCatchTypeIdsCount(); ids_count > 0) {
442        if (clone->catch_type_ids_ == nullptr) {
443            auto allocator = targetGraph->GetAllocator();
444            clone->catch_type_ids_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
445            clone->catch_edge_indexes_ = allocator->New<ArenaVector<uint32_t>>(allocator->Adapter());
446            CHECK_NOT_NULL(clone->catch_edge_indexes_);
447        }
448        clone->catch_type_ids_->resize(ids_count);
449        clone->catch_edge_indexes_->resize(ids_count);
450        std::copy(this->catch_type_ids_->begin(), this->catch_type_ids_->end(), clone->catch_type_ids_->begin());
451        std::copy(this->catch_edge_indexes_->begin(), this->catch_edge_indexes_->end(),
452                  clone->catch_edge_indexes_->begin());
453    }
454    return clone;
455}
456
457BasicBlock *IfImmInst::GetEdgeIfInputTrue()
458{
459    return GetBasicBlock()->GetSuccessor(GetTrueInputEdgeIdx());
460}
461
462BasicBlock *IfImmInst::GetEdgeIfInputFalse()
463{
464    return GetBasicBlock()->GetSuccessor(1 - GetTrueInputEdgeIdx());
465}
466
467/**
468 * NB! Can be called before Lowering pass only
469 * Return if_imm's block successor index when input is true
470 */
471size_t IfImmInst::GetTrueInputEdgeIdx()
472{
473    ASSERT(GetBasicBlock() != nullptr);
474    ASSERT(GetBasicBlock()->GetSuccsBlocks().size() == MAX_SUCCS_NUM);
475    ASSERT(GetCc() == ConditionCode::CC_NE || GetCc() == ConditionCode::CC_EQ);
476    ASSERT(GetImm() == 0);
477    return GetCc() == CC_NE ? 0 : 1;
478}
479
480bool Inst::IsPropagateLiveness() const
481{
482    return (CanThrow() && GetBasicBlock()->IsTry()) || CanDeoptimize();
483}
484
485bool Inst::RequireRegMap() const
486{
487    return CanThrow() && GetBasicBlock()->IsTry();
488}
489
490bool Inst::IsZeroRegInst() const
491{
492    ASSERT(GetBasicBlock() != nullptr);
493    ASSERT(GetBasicBlock()->GetGraph() != nullptr);
494    return false;
495}
496
497bool Inst::IsAccRead() const
498{
499    return GetFlag(inst_flags::ACC_READ);
500}
501
502bool Inst::IsAccWrite() const
503{
504    if (IsConst()) {
505        return true;
506    }
507    return GetFlag(inst_flags::ACC_WRITE);
508}
509
510TryInst *GetTryBeginInst(const BasicBlock *try_begin_bb)
511{
512    ASSERT(try_begin_bb != nullptr && try_begin_bb->IsTryBegin());
513    for (auto inst : try_begin_bb->AllInsts()) {
514        if (inst->GetOpcode() == Opcode::Try) {
515            return inst->CastToTry();
516        }
517    }
518    UNREACHABLE();
519    return nullptr;
520}
521
522/**
523 * Regalloc's helper to checks if intrinsic's arguments should be located on the registers according to
524 * calling-convention
525 */
526bool IntrinsicInst::IsNativeCall() const
527{
528    return false;
529}
530
531}  // namespace panda::compiler
532