1 /*
2 * Copyright (c) 2023 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "lower.h"
17 #include <string>
18 #include <cinttypes>
19 #include <vector>
20 #include "mir_symbol.h"
21 #include "mir_function.h"
22 #include "cg_option.h"
23 #include "switch_lowerer.h"
24 #include "intrinsic_op.h"
25 #include "mir_builder.h"
26 #include "opcode_info.h"
27 #include "rt.h"
28 #include "securec.h"
29 #include "string_utils.h"
30
31 namespace maplebe {
32
33 using namespace maple;
34
35 #define TARGARM32 0
36
37 // input node must be cvt, retype, zext or sext
LowerCastExpr(BaseNode &expr)38 BaseNode *CGLowerer::LowerCastExpr(BaseNode &expr)
39 {
40 return &expr;
41 }
42
43 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)44 BlockNode *CGLowerer::LowerReturnStructUsingFakeParm(NaryStmtNode &retNode)
45 {
46 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
47 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
48 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
49 }
50 BaseNode *opnd0 = retNode.Opnd(0);
51 /* It is possible function never returns and have a dummy return const instead of a struct. */
52 maple::LogInfo::MapleLogger(kLlWarn) << "return struct should have a kid" << std::endl;
53
54 MIRFunction *curFunc = GetCurrentFunc();
55 MIRSymbol *retSt = curFunc->GetFormal(0);
56 MIRPtrType *retTy = static_cast<MIRPtrType *>(retSt->GetType());
57 IassignNode *iassign = mirModule.CurFuncCodeMemPool()->New<IassignNode>();
58 iassign->SetTyIdx(retTy->GetTypeIndex());
59 DEBUG_ASSERT(opnd0 != nullptr, "opnd0 should not be nullptr");
60 iassign->SetFieldID(0);
61 iassign->SetRHS(opnd0);
62 if (retSt->IsPreg()) {
63 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(
64 GetLoweredPtrType(), curFunc->GetPregTab()->GetPregIdxFromPregno(retSt->GetPreg()->GetPregNo()));
65 iassign->SetOpnd(regNode, 0);
66 } else {
67 AddrofNode *dreadNode = mirModule.CurFuncCodeMemPool()->New<AddrofNode>(OP_dread);
68 dreadNode->SetPrimType(GetLoweredPtrType());
69 dreadNode->SetStIdx(retSt->GetStIdx());
70 iassign->SetOpnd(dreadNode, 0);
71 }
72 blk->AddStatement(iassign);
73 retNode.GetNopnd().clear();
74 retNode.SetNumOpnds(0);
75 blk->AddStatement(&retNode);
76 return blk;
77 }
78
79 #endif /* TARGARM32 || TARGAARCH64 || TARGX86_64 */
80
LowerReturn(NaryStmtNode &retNode)81 BlockNode *CGLowerer::LowerReturn(NaryStmtNode &retNode)
82 {
83 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
84 if (retNode.NumOpnds() != 0) {
85 BaseNode *expr = retNode.Opnd(0);
86 Opcode opr = expr->GetOpCode();
87 if (opr == OP_dread) {
88 AddrofNode *retExpr = static_cast<AddrofNode *>(expr);
89 MIRFunction *mirFunc = mirModule.CurFunction();
90 MIRSymbol *sym = mirFunc->GetLocalOrGlobalSymbol(retExpr->GetStIdx());
91 if (sym->GetAttr(ATTR_localrefvar)) {
92 mirFunc->InsertMIRSymbol(sym);
93 }
94 }
95 }
96 for (size_t i = 0; i < retNode.GetNopndSize(); ++i) {
97 retNode.SetOpnd(LowerExpr(retNode, *retNode.GetNopndAt(i), *blk), i);
98 }
99 blk->AddStatement(&retNode);
100 return blk;
101 }
102
LowerIassign(IassignNode &iassign, BlockNode &newBlk)103 void CGLowerer::LowerIassign(IassignNode &iassign, BlockNode &newBlk)
104 {
105 StmtNode *newStmt = nullptr;
106 CHECK_FATAL(iassign.GetFieldID() == 0, "fieldID must be 0");
107 LowerStmt(iassign, newBlk);
108 newStmt = &iassign;
109 newBlk.AddStatement(newStmt);
110 }
111
NeedRetypeWhenLowerCallAssigned(PrimType pType)112 BaseNode *CGLowerer::NeedRetypeWhenLowerCallAssigned(PrimType pType)
113 {
114 BaseNode *retNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
115 if (IsPrimitiveInteger(pType) && GetPrimTypeBitSize(pType) <= k32BitSize) {
116 auto newPty = IsPrimitiveUnsigned(pType) ? PTY_u64 : PTY_i64;
117 retNode = mirModule.GetMIRBuilder()->CreateExprTypeCvt(OP_cvt, newPty, pType, *retNode);
118 }
119 return retNode;
120 }
121
SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)122 DassignNode *CGLowerer::SaveReturnValueInLocal(StIdx stIdx, uint16 fieldID)
123 {
124 MIRSymbol *var;
125 if (stIdx.IsGlobal()) {
126 var = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
127 } else {
128 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
129 var = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
130 }
131 CHECK_FATAL(var != nullptr, "var should not be nullptr");
132 PrimType pType;
133 if (var->GetAttr(ATTR_oneelem_simd)) {
134 pType = PTY_f64;
135 } else {
136 pType = GlobalTables::GetTypeTable().GetTypeTable().at(var->GetTyIdx())->GetPrimType();
137 }
138 auto *regRead = NeedRetypeWhenLowerCallAssigned(pType);
139 return mirModule.GetMIRBuilder()->CreateStmtDassign(*var, fieldID, regRead);
140 }
141
142 /* to lower call (including icall) and intrinsicall statements */
LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar, bool isIntrinAssign)143 void CGLowerer::LowerCallStmt(StmtNode &stmt, StmtNode *&nextStmt, BlockNode &newBlk, MIRType *retty, bool uselvar,
144 bool isIntrinAssign)
145 {
146 StmtNode *newStmt = nullptr;
147 if (stmt.GetOpCode() == OP_intrinsiccall) {
148 auto &intrnNode = static_cast<IntrinsiccallNode &>(stmt);
149 newStmt = LowerIntrinsiccall(intrnNode, newBlk);
150 } else {
151 /* We note the function has a user-defined (i.e., not an intrinsic) call. */
152 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
153 GetCurrentFunc()->SetHasCall();
154 newStmt = &stmt;
155 }
156
157 if (newStmt == nullptr) {
158 return;
159 }
160
161 if (newStmt->GetOpCode() == OP_call || newStmt->GetOpCode() == OP_icall || newStmt->GetOpCode() == OP_icallproto) {
162 auto &callNode = static_cast<NaryStmtNode&>(*newStmt);
163 for (size_t i = 0; i < callNode.GetNopndSize(); ++i) {
164 BaseNode *newOpnd = LowerExpr(callNode, *callNode.GetNopndAt(i), newBlk);
165 callNode.SetOpnd(newOpnd, i);
166 }
167 newStmt = &callNode;
168 }
169 newStmt->SetSrcPos(stmt.GetSrcPos());
170 newBlk.AddStatement(newStmt);
171 }
172
GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)173 StmtNode *CGLowerer::GenCallNode(const StmtNode &stmt, PUIdx &funcCalled, CallNode &origCall)
174 {
175 CallNode *newCall = nullptr;
176 if (stmt.GetOpCode() == OP_callassigned) {
177 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(origCall.GetPUIdx(), origCall.GetNopnd());
178 }
179 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
180 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
181 newCall->SetSrcPos(stmt.GetSrcPos());
182 funcCalled = origCall.GetPUIdx();
183 CHECK_FATAL((newCall->GetOpCode() == OP_call),
184 "virtual call or super class call are not expected");
185 newCall->SetStmtAttrs(stmt.GetStmtAttrs());
186 return newCall;
187 }
188
GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel, IntrinsiccallNode &origCall)189 StmtNode *CGLowerer::GenIntrinsiccallNode(const StmtNode &stmt, PUIdx &funcCalled, bool &handledAtLowerLevel,
190 IntrinsiccallNode &origCall)
191 {
192 StmtNode *newCall = nullptr;
193 handledAtLowerLevel = IsIntrinsicCallHandledAtLowerLevel(origCall.GetIntrinsic());
194 if (handledAtLowerLevel) {
195 /* If the lower level can handle the intrinsic, just let it pass through. */
196 newCall = &origCall;
197 } else {
198 PUIdx bFunc = GetBuiltinToUse(origCall.GetIntrinsic());
199 if (bFunc != kFuncNotFound) {
200 newCall = mirModule.GetMIRBuilder()->CreateStmtCall(bFunc, origCall.GetNopnd());
201 CHECK_FATAL(newCall->GetOpCode() == OP_call, "intrinsicnode except intrinsiccall is not expected");
202 } else {
203 if (stmt.GetOpCode() == OP_intrinsiccallassigned) {
204 newCall =
205 mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(), origCall.GetNopnd());
206 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccall,
207 "intrinsicnode except intrinsiccall is not expected");
208 } else {
209 newCall = mirModule.GetMIRBuilder()->CreateStmtIntrinsicCall(origCall.GetIntrinsic(),
210 origCall.GetNopnd(), origCall.GetTyIdx());
211 CHECK_FATAL(newCall->GetOpCode() == OP_intrinsiccallwithtype,
212 "intrinsicnode except OP_intrinsiccallwithtype is not expected");
213 }
214 }
215 newCall->SetSrcPos(stmt.GetSrcPos());
216 funcCalled = bFunc;
217 }
218 return newCall;
219 }
220
GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)221 StmtNode *CGLowerer::GenIcallNode(PUIdx &funcCalled, IcallNode &origCall)
222 {
223 IcallNode *newCall = nullptr;
224 if (origCall.GetOpCode() == OP_icallassigned) {
225 newCall = mirModule.GetMIRBuilder()->CreateStmtIcall(origCall.GetNopnd());
226 } else {
227 newCall = mirModule.GetMIRBuilder()->CreateStmtIcallproto(origCall.GetNopnd(), origCall.GetRetTyIdx());
228 newCall->SetRetTyIdx(static_cast<IcallNode &>(origCall).GetRetTyIdx());
229 }
230 newCall->SetDeoptBundleInfo(origCall.GetDeoptBundleInfo());
231 newCall->SetStmtAttrs(origCall.GetStmtAttrs());
232 newCall->SetSrcPos(origCall.GetSrcPos());
233 CHECK_FATAL(newCall != nullptr, "nullptr is not expected");
234 funcCalled = kFuncNotFound;
235 return newCall;
236 }
237
GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode, const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)238 BlockNode *CGLowerer::GenBlockNode(StmtNode &newCall, const CallReturnVector &p2nRets, const Opcode &opcode,
239 const PUIdx &funcCalled, bool handledAtLowerLevel, bool uselvar)
240 {
241 BlockNode *blk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
242 blk->AddStatement(&newCall);
243 if (!handledAtLowerLevel) {
244 CHECK_FATAL(p2nRets.size() <= 1, "make sure p2nRets size <= 1");
245 /* Create DassignStmt to save kSregRetval0. */
246 StmtNode *dStmt = nullptr;
247 MIRType *retType = nullptr;
248 if (p2nRets.size() == 1) {
249 MIRSymbol *sym = nullptr;
250 StIdx stIdx = p2nRets[0].first;
251 if (stIdx.IsGlobal()) {
252 sym = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
253 } else {
254 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
255 sym = GetCurrentFunc()->GetSymbolTabItem(stIdx.Idx());
256 }
257 bool sizeIs0 = false;
258 if (sym != nullptr) {
259 retType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(sym->GetTyIdx());
260 if (beCommon.GetTypeSize(retType->GetTypeIndex().GetIdx()) == 0) {
261 sizeIs0 = true;
262 }
263 }
264 if (!sizeIs0) {
265 RegFieldPair regFieldPair = p2nRets[0].second;
266 if (!regFieldPair.IsReg()) {
267 uint16 fieldID = static_cast<uint16>(regFieldPair.GetFieldID());
268 DassignNode *dn = SaveReturnValueInLocal(stIdx, fieldID);
269 CHECK_FATAL(dn->GetFieldID() == 0, "make sure dn's fieldID return 0");
270 LowerDassign(*dn, *blk);
271 CHECK_FATAL(&newCall == blk->GetLast() || newCall.GetNext() == blk->GetLast(), "");
272 dStmt = (&newCall == blk->GetLast()) ? nullptr : blk->GetLast();
273 CHECK_FATAL(newCall.GetNext() == dStmt, "make sure newCall's next equal dStmt");
274 } else {
275 PregIdx pregIdx = static_cast<PregIdx>(regFieldPair.GetPregIdx());
276 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
277 MIRPreg *mirPreg = GetCurrentFunc()->GetPregTab()->PregFromPregIdx(pregIdx);
278 PrimType pType = mirPreg->GetPrimType();
279 RegreadNode *regNode = mirModule.GetMIRBuilder()->CreateExprRegread(pType, -kSregRetval0);
280 RegassignNode *regAssign;
281
282 regAssign = mirModule.GetMIRBuilder()->CreateStmtRegassign(
283 mirPreg->GetPrimType(), regFieldPair.GetPregIdx(), regNode);
284 blk->AddStatement(regAssign);
285 dStmt = regAssign;
286 }
287 }
288 }
289 blk->ResetBlock();
290 /* if VerboseCG, insert a comment */
291 if (ShouldAddAdditionalComment()) {
292 CommentNode *cmnt = mirModule.CurFuncCodeMemPool()->New<CommentNode>(mirModule);
293 cmnt->SetComment(kOpcodeInfo.GetName(opcode).c_str());
294 if (funcCalled == kFuncNotFound) {
295 cmnt->Append(" : unknown");
296 } else {
297 cmnt->Append(" : ");
298 cmnt->Append(GlobalTables::GetFunctionTable().GetFunctionFromPuidx(funcCalled)->GetName());
299 }
300 blk->AddStatement(cmnt);
301 }
302 CHECK_FATAL(dStmt == nullptr || dStmt->GetNext() == nullptr, "make sure dStmt or dStmt's next is nullptr");
303 LowerCallStmt(newCall, dStmt, *blk, retType, uselvar, opcode == OP_intrinsiccallassigned);
304 if (!uselvar && dStmt != nullptr) {
305 dStmt->SetSrcPos(newCall.GetSrcPos());
306 blk->AddStatement(dStmt);
307 }
308 }
309 return blk;
310 }
311
LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)312 BlockNode *CGLowerer::LowerIntrinsiccallAassignedToAssignStmt(IntrinsiccallNode &intrinsicCall)
313 {
314 auto *builder = mirModule.GetMIRBuilder();
315 auto *block = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
316 auto intrinsicID = intrinsicCall.GetIntrinsic();
317 auto &opndVector = intrinsicCall.GetNopnd();
318 auto returnPair = intrinsicCall.GetReturnVec().begin();
319 auto regFieldPair = returnPair->second;
320 DEBUG_ASSERT(mirModule.CurFunction() != nullptr, "CurFunction should not be nullptr");
321 if (regFieldPair.IsReg()) {
322 auto regIdx = regFieldPair.GetPregIdx();
323 auto primType = mirModule.CurFunction()->GetPregItem(static_cast<PregIdx>(regIdx))->GetPrimType();
324 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, primType, TyIdx(0), opndVector);
325 auto regAssign = builder->CreateStmtRegassign(primType, regIdx, intrinsicOp);
326 block->AddStatement(regAssign);
327 } else {
328 auto fieldID = regFieldPair.GetFieldID();
329 auto stIdx = returnPair->first;
330 DEBUG_ASSERT(mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx) != nullptr, "nullptr check");
331 auto *type = mirModule.CurFunction()->GetLocalOrGlobalSymbol(stIdx)->GetType();
332 auto intrinsicOp = builder->CreateExprIntrinsicop(intrinsicID, OP_intrinsicop, *type, opndVector);
333 auto dAssign = builder->CreateStmtDassign(stIdx, fieldID, intrinsicOp);
334 block->AddStatement(dAssign);
335 }
336 return LowerBlock(*block);
337 }
338
LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)339 BlockNode *CGLowerer::LowerCallAssignedStmt(StmtNode &stmt, bool uselvar)
340 {
341 StmtNode *newCall = nullptr;
342 CallReturnVector *p2nRets = nullptr;
343 PUIdx funcCalled = kFuncNotFound;
344 bool handledAtLowerLevel = false;
345 switch (stmt.GetOpCode()) {
346 case OP_callassigned: {
347 auto &origCall = static_cast<CallNode &>(stmt);
348 newCall = GenCallNode(stmt, funcCalled, origCall);
349 p2nRets = &origCall.GetReturnVec();
350 static_cast<CallNode *>(newCall)->SetReturnVec(*p2nRets);
351 MIRFunction *curFunc = mirModule.CurFunction();
352 curFunc->SetLastFreqMap(newCall->GetStmtID(),
353 static_cast<uint32>(curFunc->GetFreqFromLastStmt(stmt.GetStmtID())));
354 break;
355 }
356 case OP_intrinsiccallassigned: {
357 BlockNode *blockNode = LowerIntrinsiccallToIntrinsicop(stmt);
358 if (blockNode) {
359 return blockNode;
360 }
361 IntrinsiccallNode &intrincall = static_cast<IntrinsiccallNode &>(stmt);
362 newCall = GenIntrinsiccallNode(stmt, funcCalled, handledAtLowerLevel, intrincall);
363 p2nRets = &intrincall.GetReturnVec();
364 static_cast<IntrinsiccallNode *>(newCall)->SetReturnVec(*p2nRets);
365 break;
366 }
367 case OP_icallprotoassigned:
368 case OP_icallassigned: {
369 auto &origCall = static_cast<IcallNode &>(stmt);
370 newCall = GenIcallNode(funcCalled, origCall);
371 p2nRets = &origCall.GetReturnVec();
372 static_cast<IcallNode *>(newCall)->SetReturnVec(*p2nRets);
373 break;
374 }
375 default:
376 CHECK_FATAL(false, "NIY");
377 return nullptr;
378 }
379
380 /* transfer srcPosition location info */
381 newCall->SetSrcPos(stmt.GetSrcPos());
382 return GenBlockNode(*newCall, *p2nRets, stmt.GetOpCode(), funcCalled, handledAtLowerLevel, uselvar);
383 }
384
LowerIntrinsiccallToIntrinsicop(StmtNode &stmt)385 BlockNode *CGLowerer::LowerIntrinsiccallToIntrinsicop(StmtNode &stmt)
386 {
387 return nullptr;
388 }
389
LowerStmt(StmtNode &stmt, BlockNode &newBlk)390 void CGLowerer::LowerStmt(StmtNode &stmt, BlockNode &newBlk)
391 {
392 for (size_t i = 0; i < stmt.NumOpnds(); ++i) {
393 DEBUG_ASSERT(stmt.Opnd(i) != nullptr, "null ptr check");
394 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(i), newBlk), i);
395 }
396 }
397
LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)398 void CGLowerer::LowerSwitchOpnd(StmtNode &stmt, BlockNode &newBlk)
399 {
400 BaseNode *opnd = LowerExpr(stmt, *stmt.Opnd(0), newBlk);
401 if (CGOptions::GetInstance().GetOptimizeLevel() >= CGOptions::kLevel2 && opnd->GetOpCode() != OP_regread) {
402 PrimType ptyp = stmt.Opnd(0)->GetPrimType();
403 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
404 PregIdx pIdx = GetCurrentFunc()->GetPregTab()->CreatePreg(ptyp);
405 RegassignNode *regAss = mirBuilder->CreateStmtRegassign(ptyp, pIdx, opnd);
406 newBlk.AddStatement(regAss);
407 GetCurrentFunc()->SetLastFreqMap(regAss->GetStmtID(),
408 static_cast<uint32>(GetCurrentFunc()->GetFreqFromLastStmt(stmt.GetStmtID())));
409 stmt.SetOpnd(mirBuilder->CreateExprRegread(ptyp, pIdx), 0);
410 } else {
411 stmt.SetOpnd(LowerExpr(stmt, *stmt.Opnd(0), newBlk), 0);
412 }
413 }
414
LowerBlock(BlockNode &block)415 BlockNode *CGLowerer::LowerBlock(BlockNode &block)
416 {
417 BlockNode *newBlk = mirModule.CurFuncCodeMemPool()->New<BlockNode>();
418 BlockNode *tmpBlockNode = nullptr;
419 std::vector<StmtNode *> abortNode;
420 if (block.GetFirst() == nullptr) {
421 return newBlk;
422 }
423
424 StmtNode *nextStmt = block.GetFirst();
425 do {
426 StmtNode *stmt = nextStmt;
427 nextStmt = stmt->GetNext();
428 stmt->SetNext(nullptr);
429 currentBlock = newBlk;
430
431 switch (stmt->GetOpCode()) {
432 case OP_switch: {
433 LowerSwitchOpnd(*stmt, *newBlk);
434 auto switchMp = std::make_unique<ThreadLocalMemPool>(memPoolCtrler, "switchlowere");
435 MapleAllocator switchAllocator(switchMp.get());
436 SwitchLowerer switchLowerer(mirModule, static_cast<SwitchNode &>(*stmt), switchAllocator);
437 BlockNode *blk = switchLowerer.LowerSwitch();
438 if (blk->GetFirst() != nullptr) {
439 newBlk->AppendStatementsFromBlock(*blk);
440 }
441 needBranchCleanup = true;
442 break;
443 }
444 case OP_block:
445 tmpBlockNode = LowerBlock(static_cast<BlockNode &>(*stmt));
446 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
447 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
448 break;
449 case OP_dassign: {
450 LowerDassign(static_cast<DassignNode &>(*stmt), *newBlk);
451 break;
452 }
453 case OP_regassign: {
454 LowerRegassign(static_cast<RegassignNode &>(*stmt), *newBlk);
455 break;
456 }
457 case OP_iassign: {
458 LowerIassign(static_cast<IassignNode &>(*stmt), *newBlk);
459 break;
460 }
461 case OP_callassigned:
462 case OP_icallassigned:
463 case OP_icallprotoassigned: {
464 // pass the addr of lvar if this is a struct call assignment
465 bool lvar = false;
466 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt, lvar));
467 break;
468 }
469 case OP_intrinsiccallassigned:
470 newBlk->AppendStatementsFromBlock(*LowerCallAssignedStmt(*stmt));
471 break;
472 case OP_intrinsiccall:
473 case OP_call:
474 case OP_icall:
475 case OP_icallproto:
476 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
477 // nextStmt could be changed by the call to LowerStructReturn
478 LowerCallStmt(*stmt, nextStmt, *newBlk);
479 #else
480 LowerStmt(*stmt, *newBlk);
481 #endif
482 break;
483 case OP_return: {
484 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
485 DEBUG_ASSERT(GetCurrentFunc() != nullptr, "GetCurrentFunc should not be nullptr");
486 if (GetCurrentFunc()->IsFirstArgReturn() && stmt->NumOpnds() > 0) {
487 newBlk->AppendStatementsFromBlock(
488 *LowerReturnStructUsingFakeParm(static_cast<NaryStmtNode &>(*stmt)));
489 } else {
490 #endif
491 NaryStmtNode *retNode = static_cast<NaryStmtNode *>(stmt);
492 if (retNode->GetNopndSize() == 0) {
493 newBlk->AddStatement(stmt);
494 } else {
495 tmpBlockNode = LowerReturn(*retNode);
496 CHECK_FATAL(tmpBlockNode != nullptr, "nullptr is not expected");
497 newBlk->AppendStatementsFromBlock(*tmpBlockNode);
498 }
499 #if TARGARM32 || TARGAARCH64 || TARGRISCV64 || TARGX86_64
500 }
501 #endif
502 break;
503 }
504 case OP_comment:
505 newBlk->AddStatement(stmt);
506 break;
507 default:
508 LowerStmt(*stmt, *newBlk);
509 newBlk->AddStatement(stmt);
510 break;
511 }
512 CHECK_FATAL(beCommon.GetSizeOfTypeSizeTable() == GlobalTables::GetTypeTable().GetTypeTableSize(), "Error!");
513 } while (nextStmt != nullptr);
514 for (auto node : abortNode) {
515 newBlk->AddStatement(node);
516 }
517 return newBlk;
518 }
519
LowerTypePtr(BaseNode &node) const520 void CGLowerer::LowerTypePtr(BaseNode &node) const
521 {
522 if ((node.GetPrimType() == PTY_ptr) || (node.GetPrimType() == PTY_ref)) {
523 node.SetPrimType(GetLoweredPtrType());
524 }
525
526 if (kOpcodeInfo.IsTypeCvt(node.GetOpCode())) {
527 auto &cvt = static_cast<TypeCvtNode &>(node);
528 if ((cvt.FromType() == PTY_ptr) || (cvt.FromType() == PTY_ref)) {
529 cvt.SetFromType(GetLoweredPtrType());
530 }
531 } else if (kOpcodeInfo.IsCompare(node.GetOpCode())) {
532 auto &cmp = static_cast<CompareNode &>(node);
533 if ((cmp.GetOpndType() == PTY_ptr) || (cmp.GetOpndType() == PTY_ref)) {
534 cmp.SetOpndType(GetLoweredPtrType());
535 }
536 }
537 }
538
LowerEntry(MIRFunction &func)539 void CGLowerer::LowerEntry(MIRFunction &func)
540 {
541 if (func.IsFirstArgReturn() && func.GetReturnType()->GetPrimType() != PTY_void) {
542 MIRSymbol *retSt = func.GetSymTab()->CreateSymbol(kScopeLocal);
543 retSt->SetStorageClass(kScFormal);
544 retSt->SetSKind(kStVar);
545 std::string retName(".return.");
546 MIRSymbol *funcSt = GlobalTables::GetGsymTable().GetSymbolFromStidx(func.GetStIdx().Idx());
547 DEBUG_ASSERT(funcSt != nullptr, "null ptr check");
548 retName += funcSt->GetName();
549 retSt->SetNameStrIdx(retName);
550 MIRType *pointType = beCommon.BeGetOrCreatePointerType(*func.GetReturnType());
551
552 retSt->SetTyIdx(pointType->GetTypeIndex());
553 std::vector<MIRSymbol *> formals;
554 formals.emplace_back(retSt);
555 for (uint32 i = 0; i < func.GetFormalCount(); ++i) {
556 auto formal = func.GetFormal(i);
557 formals.emplace_back(formal);
558 }
559 func.SetFirstArgReturn();
560
561 beCommon.AddElementToFuncReturnType(func, func.GetReturnTyIdx());
562
563 func.UpdateFuncTypeAndFormalsAndReturnType(formals, TyIdx(PTY_void), true);
564 auto *funcType = func.GetMIRFuncType();
565 DEBUG_ASSERT(funcType != nullptr, "null ptr check");
566 funcType->SetFirstArgReturn();
567 beCommon.AddTypeSizeAndAlign(funcType->GetTypeIndex(), GetPrimTypeSize(funcType->GetPrimType()));
568 }
569 }
570
CleanupBranches(MIRFunction &func) const571 void CGLowerer::CleanupBranches(MIRFunction &func) const
572 {
573 BlockNode *block = func.GetBody();
574 StmtNode *prev = nullptr;
575 StmtNode *next = nullptr;
576 for (StmtNode *curr = block->GetFirst(); curr != nullptr; curr = next) {
577 next = curr->GetNext();
578 if (next != nullptr) {
579 CHECK_FATAL(curr == next->GetPrev(), "unexpected node");
580 }
581 if ((next != nullptr) && (prev != nullptr) && (curr->GetOpCode() == OP_goto)) {
582 /*
583 * Skip until find a label.
584 * Note that the CURRent 'goto' statement may be the last statement
585 * when discounting comment statements.
586 * Make sure we don't lose any comments.
587 */
588 StmtNode *cmtB = nullptr;
589 StmtNode *cmtE = nullptr;
590 next = curr->GetNext();
591
592 while ((next != nullptr) && (next->GetOpCode() != OP_label)) {
593 if (next->GetOpCode() == OP_comment) {
594 if (cmtB == nullptr) {
595 cmtB = next;
596 cmtE = next;
597 } else {
598 CHECK_FATAL(cmtE != nullptr, "cmt_e is null in CGLowerer::CleanupBranches");
599 cmtE->SetNext(next);
600 next->SetPrev(cmtE);
601 cmtE = next;
602 }
603 }
604 next = next->GetNext();
605 }
606
607 curr->SetNext(next);
608
609 if (next != nullptr) {
610 next->SetPrev(curr);
611 }
612
613 StmtNode *insertAfter = nullptr;
614
615 if ((next != nullptr) &&
616 ((static_cast<GotoNode *>(curr))->GetOffset() == (static_cast<LabelNode *>(next))->GetLabelIdx())) {
617 insertAfter = prev;
618 prev->SetNext(next); /* skip goto statement (which is pointed by curr) */
619 next->SetPrev(prev);
620 curr = next; /* make curr point to the label statement */
621 next = next->GetNext(); /* advance next to the next statement of the label statement */
622 } else {
623 insertAfter = curr;
624 }
625
626 /* insert comments before 'curr' */
627 if (cmtB != nullptr) {
628 CHECK_FATAL(cmtE != nullptr, "nullptr is not expected");
629 StmtNode *iaNext = insertAfter->GetNext();
630 if (iaNext != nullptr) {
631 iaNext->SetPrev(cmtE);
632 }
633 cmtE->SetNext(iaNext);
634
635 insertAfter->SetNext(cmtB);
636 cmtB->SetPrev(insertAfter);
637
638 if (insertAfter == curr) {
639 curr = cmtE;
640 }
641 }
642 if (next == nullptr) {
643 func.GetBody()->SetLast(curr);
644 }
645 }
646 prev = curr;
647 }
648 CHECK_FATAL(func.GetBody()->GetLast() == prev, "make sure the return value of GetLast equal prev");
649 }
650
IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead, const CGLowerer &cgLowerer)651 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const RegreadNode &rRead,
652 const CGLowerer &cgLowerer)
653 {
654 StIdx stIdx = cgLowerer.GetSymbolReferredToByPseudoRegister(rRead.GetRegIdx());
655 return ((dassign.GetStIdx() == stIdx) && (dassign.GetFieldID() == 0));
656 }
657
IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)658 inline bool IsAccessingTheSameMemoryLocation(const DassignNode &dassign, const DreadNode &dread)
659 {
660 return ((dassign.GetStIdx() == dread.GetStIdx()) && (dassign.GetFieldID() == dread.GetFieldID()));
661 }
662
IsDassignNOP(const DassignNode &dassign)663 inline bool IsDassignNOP(const DassignNode &dassign)
664 {
665 if (dassign.GetRHS()->GetOpCode() == OP_dread) {
666 return IsAccessingTheSameMemoryLocation(dassign, static_cast<DreadNode &>(*dassign.GetRHS()));
667 }
668 return false;
669 }
670
IsConstvalZero(const BaseNode &n)671 inline bool IsConstvalZero(const BaseNode &n)
672 {
673 return ((n.GetOpCode() == OP_constval) && static_cast<const ConstvalNode &>(n).GetConstVal()->IsZero());
674 }
675
676 #define NEXT_ID(x) ((x) + 1)
677 #define INTRN_FIRST_SYNC_ENTER NEXT_ID(INTRN_LAST)
678 #define INTRN_SECOND_SYNC_ENTER NEXT_ID(INTRN_FIRST_SYNC_ENTER)
679 #define INTRN_THIRD_SYNC_ENTER NEXT_ID(INTRN_SECOND_SYNC_ENTER)
680 #define INTRN_FOURTH_SYNC_ENTER NEXT_ID(INTRN_THIRD_SYNC_ENTER)
681 #define INTRN_YNC_EXIT NEXT_ID(INTRN_FOURTH_SYNC_ENTER)
682
683 std::vector<std::pair<CGLowerer::BuiltinFunctionID, PUIdx>> CGLowerer::builtinFuncIDs;
684
GetLabelIdx(MIRFunction &curFunc) const685 LabelIdx CGLowerer::GetLabelIdx(MIRFunction &curFunc) const
686 {
687 std::string suffix = std::to_string(curFunc.GetLabelTab()->GetLabelTableSize());
688 GStrIdx labelStrIdx = GlobalTables::GetStrTable().GetOrCreateStrIdxFromName("__label_BC_" + suffix);
689 LabelIdx labIdx = curFunc.GetLabelTab()->AddLabel(labelStrIdx);
690 return labIdx;
691 }
692
LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)693 BaseNode *CGLowerer::LowerExpr(BaseNode &parent, BaseNode &expr, BlockNode &blkNode)
694 {
695 bool isCvtU1Expr = (expr.GetOpCode() == OP_cvt && expr.GetPrimType() == PTY_u1 &&
696 static_cast<TypeCvtNode &>(expr).FromType() != PTY_u1);
697 if (expr.GetPrimType() == PTY_u1) {
698 expr.SetPrimType(PTY_u8);
699 }
700
701 {
702 for (size_t i = 0; i < expr.NumOpnds(); ++i) {
703 expr.SetOpnd(LowerExpr(expr, *expr.Opnd(i), blkNode), i);
704 }
705 }
706 // Convert `cvt u1 xx <expr>` to `ne u8 xx (<expr>, constval xx 0)`
707 // No need to convert `cvt u1 u1 <expr>`
708 if (isCvtU1Expr) {
709 auto &cvtExpr = static_cast<TypeCvtNode &>(expr);
710 PrimType fromType = cvtExpr.FromType();
711 auto *fromMIRType = GlobalTables::GetTypeTable().GetTypeFromTyIdx(TyIdx(fromType));
712 // We use u8 instead of u1 because codegen can't recognize u1
713 auto *toMIRType = GlobalTables::GetTypeTable().GetUInt8();
714 auto *zero = GlobalTables::GetIntConstTable().GetOrCreateIntConst(0, *fromMIRType);
715 auto *converted = mirBuilder->CreateExprCompare(OP_ne, *toMIRType, *fromMIRType, cvtExpr.Opnd(0),
716 mirBuilder->CreateConstval(zero));
717 return converted;
718 }
719 switch (expr.GetOpCode()) {
720 case OP_dread:
721 return LowerDread(static_cast<DreadNode &>(expr), blkNode);
722
723 case OP_addrof:
724 return LowerAddrof(static_cast<AddrofNode &>(expr));
725
726 case OP_iread:
727 return LowerIread(static_cast<IreadNode &>(expr));
728
729 case OP_cvt:
730 case OP_retype:
731 case OP_zext:
732 case OP_sext:
733 return LowerCastExpr(expr);
734 default:
735 return &expr;
736 }
737 }
738
LowerDread(DreadNode &dread, const BlockNode &block)739 BaseNode *CGLowerer::LowerDread(DreadNode &dread, const BlockNode &block)
740 {
741 /* use PTY_u8 for boolean type in dread/iread */
742 if (dread.GetPrimType() == PTY_u1) {
743 dread.SetPrimType(PTY_u8);
744 }
745 CHECK_FATAL(dread.GetFieldID() == 0, "fieldID must be 0");
746 return LowerDreadToThreadLocal(dread, block);
747 }
748
LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)749 void CGLowerer::LowerRegassign(RegassignNode ®Node, BlockNode &newBlk)
750 {
751 BaseNode *rhsOpnd = regNode.Opnd(0);
752 regNode.SetOpnd(LowerExpr(regNode, *rhsOpnd, newBlk), 0);
753 newBlk.AddStatement(®Node);
754 }
755
ExtractSymbolAddress(const StIdx &stIdx)756 BaseNode *CGLowerer::ExtractSymbolAddress(const StIdx &stIdx)
757 {
758 auto builder = mirModule.GetMIRBuilder();
759 return builder->CreateExprAddrof(0, stIdx);
760 }
761
LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)762 BaseNode *CGLowerer::LowerDreadToThreadLocal(BaseNode &expr, const BlockNode &block)
763 {
764 auto *result = &expr;
765 if (expr.GetOpCode() != maple::OP_dread) {
766 return result;
767 }
768 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
769 auto dread = static_cast<DreadNode &>(expr);
770 StIdx stIdx = dread.GetStIdx();
771 if (!stIdx.IsGlobal()) {
772 return result;
773 }
774 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
775 CHECK_FATAL(symbol != nullptr, "symbol should not be nullptr");
776
777 if (symbol->IsThreadLocal()) {
778 // iread <* u32> 0 (regread u64 %addr)
779 auto addr = ExtractSymbolAddress(stIdx);
780 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
781 auto iread = mirModule.GetMIRBuilder()->CreateExprIread(*symbol->GetType(), *ptrType, dread.GetFieldID(), addr);
782 result = iread;
783 }
784 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
785 if (newTypeTableSize != oldTypeTableSize) {
786 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
787 }
788 return result;
789 }
790
LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)791 StmtNode *CGLowerer::LowerDassignToThreadLocal(StmtNode &stmt, const BlockNode &block)
792 {
793 StmtNode *result = &stmt;
794 if (stmt.GetOpCode() != maple::OP_dassign) {
795 return result;
796 }
797 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
798 auto dAssign = static_cast<DassignNode &>(stmt);
799 StIdx stIdx = dAssign.GetStIdx();
800 if (!stIdx.IsGlobal()) {
801 return result;
802 }
803 MIRSymbol *symbol = GlobalTables::GetGsymTable().GetSymbolFromStidx(stIdx.Idx());
804 DEBUG_ASSERT(symbol != nullptr, "symbol should not be nullptr");
805 if (symbol->IsThreadLocal()) {
806 // iassign <* u32> 0 (regread u64 %addr, dread u32 $x)
807 auto addr = ExtractSymbolAddress(stIdx);
808 auto ptrType = GlobalTables::GetTypeTable().GetOrCreatePointerType(*symbol->GetType());
809 auto iassign =
810 mirModule.GetMIRBuilder()->CreateStmtIassign(*ptrType, dAssign.GetFieldID(), addr, dAssign.GetRHS());
811 result = iassign;
812 }
813 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
814 if (newTypeTableSize != oldTypeTableSize) {
815 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
816 }
817 return result;
818 }
819
LowerDassign(DassignNode &dsNode, BlockNode &newBlk)820 void CGLowerer::LowerDassign(DassignNode &dsNode, BlockNode &newBlk)
821 {
822 StmtNode *newStmt = nullptr;
823 BaseNode *rhs = nullptr;
824 Opcode op = dsNode.GetRHS()->GetOpCode();
825 CHECK_FATAL(dsNode.GetFieldID() == 0, "fieldID must be 0");
826 if (op == OP_intrinsicop) {
827 IntrinsicopNode *intrinNode = static_cast<IntrinsicopNode *>(dsNode.GetRHS());
828 MIRType *retType = IntrinDesc::intrinTable[intrinNode->GetIntrinsic()].GetReturnType();
829 CHECK_FATAL(retType != nullptr, "retType should not be nullptr");
830 rhs = LowerExpr(dsNode, *intrinNode, newBlk);
831 dsNode.SetRHS(rhs);
832 CHECK_FATAL(dsNode.GetRHS() != nullptr, "dsNode->rhs is null in CGLowerer::LowerDassign");
833 if (!IsDassignNOP(dsNode)) {
834 newStmt = &dsNode;
835 }
836 } else {
837 rhs = LowerExpr(dsNode, *dsNode.GetRHS(), newBlk);
838 dsNode.SetRHS(rhs);
839 newStmt = &dsNode;
840 }
841
842 if (newStmt != nullptr) {
843 newBlk.AddStatement(LowerDassignToThreadLocal(*newStmt, newBlk));
844 }
845 }
846
LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)847 StmtNode *CGLowerer::LowerDefaultIntrinsicCall(IntrinsiccallNode &intrincall, MIRSymbol &st, MIRFunction &fn)
848 {
849 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
850 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
851 std::vector<TyIdx> funcTyVec;
852 std::vector<TypeAttrs> fnTaVec;
853 MapleVector<BaseNode *> &nOpnds = intrincall.GetNopnd();
854 MIRType *retTy = intrinDesc->GetReturnType();
855 CHECK_FATAL(retTy != nullptr, "retTy should not be nullptr");
856 for (uint32 i = 0; i < nOpnds.size(); ++i) {
857 MIRType *argTy = intrinDesc->GetArgType(i);
858 CHECK_FATAL(argTy != nullptr, "argTy should not be nullptr");
859 funcTyVec.emplace_back(argTy->GetTypeIndex());
860 fnTaVec.emplace_back(TypeAttrs());
861 }
862 MIRType *funcType = beCommon.BeGetOrCreateFunctionType(retTy->GetTypeIndex(), funcTyVec, fnTaVec);
863 st.SetTyIdx(funcType->GetTypeIndex());
864 fn.SetMIRFuncType(static_cast<MIRFuncType *>(funcType));
865 fn.SetReturnTyIdx(retTy->GetTypeIndex());
866 return static_cast<CallNode *>(mirBuilder->CreateStmtCall(fn.GetPuidx(), nOpnds));
867 }
868
LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)869 StmtNode *CGLowerer::LowerIntrinsiccall(IntrinsiccallNode &intrincall, BlockNode &newBlk)
870 {
871 MIRIntrinsicID intrnID = intrincall.GetIntrinsic();
872 for (size_t i = 0; i < intrincall.GetNumOpnds(); ++i) {
873 intrincall.SetOpnd(LowerExpr(intrincall, *intrincall.Opnd(i), newBlk), i);
874 }
875 IntrinDesc *intrinDesc = &IntrinDesc::intrinTable[intrnID];
876 /* default lowers intrinsic call to real function call. */
877 MIRSymbol *st = GlobalTables::GetGsymTable().CreateSymbol(kScopeGlobal);
878 CHECK_FATAL(intrinDesc->name != nullptr, "intrinsic's name should not be nullptr");
879 const std::string name = intrinDesc->name;
880 st->SetNameStrIdx(name);
881 st->SetStorageClass(kScText);
882 st->SetSKind(kStFunc);
883 MIRFunction *fn = mirBuilder->GetOrCreateFunction(intrinDesc->name, TyIdx(0));
884 beCommon.UpdateTypeTable(*fn->GetMIRFuncType());
885 fn->AllocSymTab();
886 st->SetFunction(fn);
887 st->SetAppearsInCode(true);
888 return LowerDefaultIntrinsicCall(intrincall, *st, *fn);
889 }
890
GetBuiltinToUse(BuiltinFunctionID id) const891 PUIdx CGLowerer::GetBuiltinToUse(BuiltinFunctionID id) const
892 {
893 /*
894 * use std::vector & linear search as the number of entries is small.
895 * we may revisit it if the number of entries gets larger.
896 */
897 for (const auto &funcID : builtinFuncIDs) {
898 if (funcID.first == id) {
899 return funcID.second;
900 }
901 }
902 return kFuncNotFound;
903 }
904
IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const905 bool CGLowerer::IsIntrinsicCallHandledAtLowerLevel(MIRIntrinsicID intrinsic) const
906 {
907 switch (intrinsic) {
908 // js
909 case INTRN_ADD_WITH_OVERFLOW:
910 case INTRN_SUB_WITH_OVERFLOW:
911 case INTRN_MUL_WITH_OVERFLOW:
912 return true;
913 default: {
914 return false;
915 }
916 }
917 }
918
LowerFunc(MIRFunction &func)919 void CGLowerer::LowerFunc(MIRFunction &func)
920 {
921 labelIdx = 0;
922 SetCurrentFunc(&func);
923 LowerEntry(func);
924 BlockNode *origBody = func.GetBody();
925 CHECK_FATAL(origBody != nullptr, "origBody should not be nullptr");
926
927 BlockNode *newBody = LowerBlock(*origBody);
928 func.SetBody(newBody);
929 if (needBranchCleanup) {
930 CleanupBranches(func);
931 }
932
933 uint32 oldTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
934 uint32 newTypeTableSize = GlobalTables::GetTypeTable().GetTypeTableSize();
935 if (newTypeTableSize != oldTypeTableSize) {
936 beCommon.AddNewTypeAfterBecommon(oldTypeTableSize, newTypeTableSize);
937 }
938 }
939 } /* namespace maplebe */
940