FlowGraph.h 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. class BasicBlock;
  7. class FlowEdge;
  8. class Loop;
  9. class Region;
  10. class Func;
  11. class AddPropertyCacheBucket
  12. {
  13. private:
  14. JITTypeHolder initialType;
  15. JITTypeHolder finalType;
  16. public:
  17. AddPropertyCacheBucket() : initialType(nullptr), finalType(nullptr)
  18. #if DBG
  19. , deadStoreUnavailableInitialType(nullptr), deadStoreUnavailableFinalType(nullptr)
  20. #endif
  21. {
  22. }
  23. AddPropertyCacheBucket(const AddPropertyCacheBucket& bucket) :
  24. initialType(bucket.initialType), finalType(bucket.finalType)
  25. #if DBG
  26. , deadStoreUnavailableInitialType(bucket.deadStoreUnavailableInitialType)
  27. , deadStoreUnavailableFinalType(bucket.deadStoreUnavailableFinalType)
  28. #endif
  29. {
  30. }
  31. bool operator!=(const AddPropertyCacheBucket& bucket) const
  32. {
  33. return this->initialType != bucket.initialType || this->finalType != bucket.finalType;
  34. }
  35. bool operator==(const AddPropertyCacheBucket& bucket) const
  36. {
  37. return this->initialType == bucket.initialType && this->finalType == bucket.finalType;
  38. }
  39. void Copy(AddPropertyCacheBucket *pNew) const
  40. {
  41. pNew->initialType = this->initialType;
  42. pNew->finalType = this->finalType;
  43. #if DBG
  44. pNew->deadStoreUnavailableInitialType = this->deadStoreUnavailableInitialType;
  45. pNew->deadStoreUnavailableFinalType = this->deadStoreUnavailableFinalType;
  46. #endif
  47. }
  48. JITTypeHolder GetInitialType() const { return this->initialType; }
  49. JITTypeHolder GetFinalType() const { return this->finalType; }
  50. void SetInitialType(JITTypeHolder type) { this->initialType = type; }
  51. void SetFinalType(JITTypeHolder type) { this->finalType = type; }
  52. #if DBG_DUMP
  53. void Dump() const;
  54. #endif
  55. #ifdef DBG
  56. JITTypeHolder deadStoreUnavailableInitialType;
  57. JITTypeHolder deadStoreUnavailableFinalType;
  58. #endif
  59. };
  60. class ObjTypeGuardBucket
  61. {
  62. private:
  63. BVSparse<JitArenaAllocator>* guardedPropertyOps;
  64. JITTypeHolder monoGuardType;
  65. public:
  66. ObjTypeGuardBucket() : guardedPropertyOps(nullptr), monoGuardType(nullptr) {}
  67. ObjTypeGuardBucket(BVSparse<JitArenaAllocator>* guardedPropertyOps) : monoGuardType(nullptr)
  68. {
  69. this->guardedPropertyOps = (guardedPropertyOps != nullptr ? guardedPropertyOps->CopyNew() : nullptr);
  70. }
  71. void Copy(ObjTypeGuardBucket *pNew) const
  72. {
  73. pNew->guardedPropertyOps = this->guardedPropertyOps ? this->guardedPropertyOps->CopyNew() : nullptr;
  74. pNew->monoGuardType = this->monoGuardType;
  75. }
  76. BVSparse<JitArenaAllocator> *GetGuardedPropertyOps() const { return this->guardedPropertyOps; }
  77. void SetGuardedPropertyOps(BVSparse<JitArenaAllocator> *guardedPropertyOps) { this->guardedPropertyOps = guardedPropertyOps; }
  78. void AddToGuardedPropertyOps(uint propertyOpId) { Assert(this->guardedPropertyOps != nullptr); this->guardedPropertyOps->Set(propertyOpId); }
  79. bool NeedsMonoCheck() const { return this->monoGuardType != nullptr; }
  80. void SetMonoGuardType(JITTypeHolder type) { this->monoGuardType = type; }
  81. JITTypeHolder GetMonoGuardType() const { return this->monoGuardType; }
  82. #if DBG_DUMP
  83. void Dump() const;
  84. #endif
  85. };
  86. class ObjWriteGuardBucket
  87. {
  88. private:
  89. BVSparse<JitArenaAllocator>* writeGuards;
  90. public:
  91. ObjWriteGuardBucket() : writeGuards(nullptr) {}
  92. ObjWriteGuardBucket(BVSparse<JitArenaAllocator>* writeGuards) { this->writeGuards = (writeGuards != nullptr ? writeGuards->CopyNew() : nullptr); }
  93. void Copy(ObjWriteGuardBucket *pNew) const
  94. {
  95. pNew->writeGuards = this->writeGuards ? this->writeGuards->CopyNew() : nullptr;
  96. }
  97. BVSparse<JitArenaAllocator> *GetWriteGuards() const { return this->writeGuards; }
  98. void SetWriteGuards(BVSparse<JitArenaAllocator> *writeGuards) { this->writeGuards = writeGuards; }
  99. void AddToWriteGuards(uint writeGuardId) { Assert(this->writeGuards != nullptr); this->writeGuards->Set(writeGuardId); }
  100. #if DBG_DUMP
  101. void Dump() const;
  102. #endif
  103. };
  104. class FlowGraph
  105. {
  106. friend Loop;
  107. public:
  108. static FlowGraph * New(Func *func, JitArenaAllocator *alloc);
  109. FlowGraph(Func *func, JitArenaAllocator *fgAlloc) :
  110. func(func),
  111. alloc(fgAlloc),
  112. blockList(nullptr),
  113. blockCount(0),
  114. tailBlock(nullptr),
  115. loopList(nullptr),
  116. catchLabelStack(nullptr),
  117. finallyLabelStack(nullptr),
  118. leaveNullLabelStack(nullptr),
  119. regToFinallyEndMap(nullptr),
  120. leaveNullLabelToFinallyLabelMap(nullptr),
  121. hasBackwardPassInfo(false),
  122. hasLoop(false),
  123. implicitCallFlags(Js::ImplicitCall_HasNoInfo)
  124. {
  125. }
  126. void Build(void);
  127. void Destroy(void);
  128. void RunPeeps();
  129. BasicBlock * AddBlock(IR::Instr * firstInstr, IR::Instr * lastInstr, BasicBlock * nextBlock, BasicBlock *prevBlock = nullptr);
  130. FlowEdge * AddEdge(BasicBlock * predBlock, BasicBlock * succBlock);
  131. BasicBlock * InsertCompensationCodeForBlockMove(FlowEdge * edge, // Edge where compensation code needs to be inserted
  132. bool insertCompensationBlockToLoopList = false,
  133. bool sinkBlockLoop = false, // Loop to which compensation block belongs
  134. bool afterForward = false // Inserting compentation code after forward pass
  135. );
  136. BasicBlock * InsertAirlockBlock(FlowEdge * edge, bool afterForward = false);
  137. void InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget);
  138. void RemoveUnreachableBlocks();
  139. bool RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt = nullptr);
  140. IR::Instr * RemoveInstr(IR::Instr *instr, GlobOpt * globOpt);
  141. void RemoveBlock(BasicBlock *block, GlobOpt * globOpt = nullptr, bool tailDuping = false);
  142. BasicBlock * SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr);
  143. Func* GetFunc() { return func;};
  144. static void SafeRemoveInstr(IR::Instr *instr);
  145. void SortLoopLists();
  146. FlowEdge * FindEdge(BasicBlock *predBlock, BasicBlock *succBlock);
  147. IR::LabelInstr * DeleteLeaveChainBlocks(IR::BranchInstr *leaveInstr, IR::Instr * &instrPrev);
  148. bool CheckIfEarlyExitAndAddEdgeToFinally(IR::BranchInstr *leaveInstr, Region *currentRegion, Region *branchTargetRegion, IR::Instr *&instrPrev, IR::LabelInstr *&exitLabel);
  149. bool Dominates(Region *finallyRegion, Region *exitLabelRegion);
  150. bool DoesExitLabelDominate(IR::BranchInstr *leaveInstr);
  151. void InsertEdgeFromFinallyToEarlyExit(BasicBlock * finallyEndBlock, IR::LabelInstr * exitLabel);
  152. #if DBG_DUMP
  153. void Dump();
  154. void Dump(bool verbose, const char16 *form);
  155. #endif
  156. JitArenaAllocator * alloc;
  157. BasicBlock * blockList;
  158. BasicBlock * tailBlock;
  159. Loop * loopList;
  160. SList<IR::LabelInstr*> * catchLabelStack;
  161. SList<IR::LabelInstr*> * finallyLabelStack;
  162. SList<IR::LabelInstr*> * leaveNullLabelStack;
  163. typedef JsUtil::BaseDictionary<Region *, BasicBlock *, JitArenaAllocator> RegionToFinallyEndMapType;
  164. RegionToFinallyEndMapType * regToFinallyEndMap;
  165. typedef JsUtil::BaseDictionary<IR::LabelInstr *, IR::LabelInstr *, JitArenaAllocator> LeaveNullLabelToFinallyLabelMapType;
  166. LeaveNullLabelToFinallyLabelMapType * leaveNullLabelToFinallyLabelMap;
  167. bool hasBackwardPassInfo;
  168. bool hasLoop;
  169. Js::ImplicitCallFlags implicitCallFlags;
  170. private:
  171. void FindLoops(void);
  172. bool CanonicalizeLoops(void);
  173. void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
  174. void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
  175. void AddBlockToLoop(BasicBlock *block, Loop *loop);
  176. bool IsEHTransitionInstr(IR::Instr *instr);
  177. BasicBlock * GetPredecessorForRegionPropagation(BasicBlock *block);
  178. void UpdateRegionForBlock(BasicBlock *block);
  179. void UpdateRegionForBlockFromEHPred(BasicBlock *block, bool reassign = false);
  180. Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
  181. IR::Instr * PeepCm(IR::Instr *instr);
  182. IR::Instr * PeepTypedCm(IR::Instr *instr);
  183. void MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock);
  184. bool UnsignedCmpPeep(IR::Instr *cmpInstr);
  185. bool IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1);
  186. #if DBG
  187. void VerifyLoopGraph();
  188. #endif
  189. private:
  190. void InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId);
  191. private:
  192. Func * func;
  193. unsigned int blockCount;
  194. };
  195. class BasicBlock
  196. {
  197. friend class FlowGraph;
  198. friend class Loop;
  199. public:
  200. static BasicBlock * New(FlowGraph * graph);
  201. void AddPred(FlowEdge * edge, FlowGraph * graph);
  202. void AddSucc(FlowEdge * edge, FlowGraph * graph);
  203. void RemovePred(BasicBlock *block, FlowGraph * graph);
  204. void RemoveSucc(BasicBlock *block, FlowGraph * graph);
  205. void RemoveDeadPred(BasicBlock *block, FlowGraph * graph);
  206. void RemoveDeadSucc(BasicBlock *block, FlowGraph * graph);
  207. void UnlinkPred(BasicBlock *block);
  208. void UnlinkSucc(BasicBlock *block);
  209. void UnlinkInstr(IR::Instr * Instr);
  210. void RemoveInstr(IR::Instr * instr);
  211. void InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr);
  212. void InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr);
  213. void InsertAfter(IR::Instr * newInstr);
  214. void InvertBranch(IR::BranchInstr *branch);
  215. IR::Instr * GetFirstInstr(void) const
  216. {
  217. return firstInstr;
  218. }
  219. void SetFirstInstr(IR::Instr * instr)
  220. {
  221. firstInstr = instr;
  222. }
  223. IR::Instr * GetLastInstr(void)
  224. {
  225. BasicBlock *blNext = this->next;
  226. if (blNext)
  227. {
  228. return blNext->firstInstr->m_prev;
  229. }
  230. else
  231. {
  232. return this->func->m_exitInstr;
  233. }
  234. }
  235. void SetLastInstr(IR::Instr * instr)
  236. {
  237. // Intentionally empty
  238. }
  239. SListBaseCounted<FlowEdge *> * GetPredList(void)
  240. {
  241. return &predList;
  242. }
  243. SListBaseCounted<FlowEdge *> * GetSuccList(void)
  244. {
  245. return &succList;
  246. }
  247. SListBaseCounted<FlowEdge *> * GetDeadPredList(void)
  248. {
  249. return &deadPredList;
  250. }
  251. SListBaseCounted<FlowEdge *> * GetDeadSuccList(void)
  252. {
  253. return &deadSuccList;
  254. }
  255. unsigned int GetBlockNum(void) const
  256. {
  257. return number;
  258. }
  259. void SetBlockNum(unsigned int num)
  260. {
  261. number = num;
  262. }
  263. BasicBlock * GetPrev()
  264. {
  265. BasicBlock *block = this;
  266. do {
  267. block = block->prev;
  268. } while (block->isDeleted);
  269. return block;
  270. }
  271. BasicBlock * GetNext()
  272. {
  273. BasicBlock *block = this->next;
  274. while (block && block->isDeleted) {
  275. block = block->next;
  276. }
  277. return block;
  278. }
  279. uint IncrementDataUseCount()
  280. {
  281. return ++this->dataUseCount;
  282. }
  283. uint DecrementDataUseCount()
  284. {
  285. Assert(this->dataUseCount != 0);
  286. return --this->dataUseCount;
  287. }
  288. uint GetDataUseCount()
  289. {
  290. return this->dataUseCount;
  291. }
  292. void SetDataUseCount(uint count)
  293. {
  294. this->dataUseCount = count;
  295. }
  296. bool IsLandingPad();
  297. BailOutInfo * CreateLoopTopBailOutInfo(GlobOpt * globOpt);
  298. // GlobOpt Stuff
  299. public:
  300. bool PathDepBranchFolding(GlobOpt* globOptState);
  301. void MergePredBlocksValueMaps(GlobOpt* globOptState);
  302. private:
  303. void CleanUpValueMaps();
  304. void CheckLegalityAndFoldPathDepBranches(GlobOpt* globOpt);
  305. Value * FindValueInLocalThenGlobalValueTableAndUpdate(GlobOpt *globOpt, GlobHashTable * localSymToValueMap, IR::Instr *instr, Sym *dstSym, Sym *srcSym);
  306. IR::LabelInstr* CanProveConditionalBranch(IR::BranchInstr *branch, GlobOpt* globOpt, GlobHashTable * localSymToValueMap);
  307. #if DBG_DUMP
  308. public:
  309. void DumpHeader(bool insertCR = true);
  310. void Dump();
  311. #endif
  312. public:
  313. BasicBlock * next;
  314. BasicBlock * prev;
  315. Loop * loop;
  316. uint8 isDeleted:1;
  317. uint8 isDead:1;
  318. uint8 isLoopHeader:1;
  319. uint8 hasCall:1;
  320. uint8 isVisited:1;
  321. uint8 isAirLockCompensationBlock:1;
  322. uint8 beginsBailOnNoProfile:1;
  323. #ifdef DBG
  324. uint8 isBreakBlock:1;
  325. uint8 isAirLockBlock:1;
  326. uint8 isBreakCompensationBlockAtSink:1;
  327. uint8 isBreakCompensationBlockAtSource:1;
  328. #endif
  329. // Deadstore data
  330. BVSparse<JitArenaAllocator> * upwardExposedUses;
  331. BVSparse<JitArenaAllocator> * upwardExposedFields;
  332. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout;
  333. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates;
  334. TempNumberTracker * tempNumberTracker;
  335. TempObjectTracker * tempObjectTracker;
  336. #if DBG
  337. BVSparse<JitArenaAllocator> * trackingByteCodeUpwardExposedUsed = nullptr;
  338. BVSparse<JitArenaAllocator> * excludeByteCodeUpwardExposedTracking = nullptr;
  339. TempObjectVerifyTracker * tempObjectVerifyTracker;
  340. #endif
  341. HashTable<AddPropertyCacheBucket> * stackSymToFinalType;
  342. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties; // Dead store pass only
  343. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap; // Backward pass only
  344. BVSparse<JitArenaAllocator> * noImplicitCallUses;
  345. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses;
  346. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses;
  347. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses;
  348. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses;
  349. BVSparse<JitArenaAllocator> * cloneStrCandidates;
  350. BVSparse<JitArenaAllocator> * couldRemoveNegZeroBailoutForDef; // Deadstore pass only
  351. Loop * backwardPassCurrentLoop;
  352. // Global optimizer data
  353. GlobOptBlockData globOptData;
  354. // Bailout data
  355. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  356. #if DBG
  357. StackSym ** byteCodeRestoreSyms;
  358. #endif
  359. IntOverflowDoesNotMatterRange * intOverflowDoesNotMatterRange;
  360. private:
  361. BasicBlock(JitArenaAllocator * alloc, Func *func) :
  362. next(nullptr),
  363. prev(nullptr),
  364. firstInstr(nullptr),
  365. number(k_InvalidNum),
  366. loop(nullptr),
  367. isDeleted(false),
  368. isDead(false),
  369. isLoopHeader(false),
  370. hasCall(false),
  371. upwardExposedUses(nullptr),
  372. upwardExposedFields(nullptr),
  373. typesNeedingKnownObjectLayout(nullptr),
  374. slotDeadStoreCandidates(nullptr),
  375. tempNumberTracker(nullptr),
  376. tempObjectTracker(nullptr),
  377. #if DBG
  378. tempObjectVerifyTracker(nullptr),
  379. #endif
  380. stackSymToFinalType(nullptr),
  381. stackSymToGuardedProperties(nullptr),
  382. stackSymToWriteGuardsMap(nullptr),
  383. noImplicitCallUses(nullptr),
  384. noImplicitCallNoMissingValuesUses(nullptr),
  385. noImplicitCallNativeArrayUses(nullptr),
  386. noImplicitCallJsArrayHeadSegmentSymUses(nullptr),
  387. noImplicitCallArrayLengthSymUses(nullptr),
  388. cloneStrCandidates(nullptr),
  389. couldRemoveNegZeroBailoutForDef(nullptr),
  390. byteCodeUpwardExposedUsed(nullptr),
  391. isAirLockCompensationBlock(false),
  392. beginsBailOnNoProfile(false),
  393. #if DBG
  394. byteCodeRestoreSyms(nullptr),
  395. isBreakBlock(false),
  396. isAirLockBlock(false),
  397. isBreakCompensationBlockAtSource(false),
  398. isBreakCompensationBlockAtSink(false),
  399. #endif
  400. dataUseCount(0),
  401. intOverflowDoesNotMatterRange(nullptr),
  402. func(func),
  403. globOptData(func)
  404. {
  405. }
  406. void RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead = false);
  407. void RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead = false);
  408. void UnlinkPred(BasicBlock *block, bool doCleanSucc);
  409. void UnlinkSucc(BasicBlock *block, bool doCleanPred);
  410. #if DBG_DUMP
  411. bool Contains(IR::Instr * instr);
  412. #endif
  413. private:
  414. IR::Instr * firstInstr;
  415. SListBaseCounted<FlowEdge *> predList;
  416. SListBaseCounted<FlowEdge *> succList;
  417. SListBaseCounted<FlowEdge *> deadPredList;
  418. SListBaseCounted<FlowEdge *> deadSuccList;
  419. Func * func;
  420. unsigned int number;
  421. uint dataUseCount;
  422. static const unsigned int k_InvalidNum = (unsigned)-1;
  423. };
  424. class FlowEdge
  425. {
  426. public:
  427. static FlowEdge * New(FlowGraph * graph);
  428. FlowEdge() :
  429. predBlock(nullptr),
  430. succBlock(nullptr),
  431. pathDependentInfo(nullptr)
  432. {
  433. }
  434. BasicBlock * GetPred(void) const
  435. {
  436. return predBlock;
  437. }
  438. void SetPred(BasicBlock * block)
  439. {
  440. predBlock = block;
  441. }
  442. BasicBlock * GetSucc(void) const
  443. {
  444. return succBlock;
  445. }
  446. void SetSucc(BasicBlock * block)
  447. {
  448. succBlock = block;
  449. }
  450. PathDependentInfo * GetPathDependentInfo() const
  451. {
  452. return pathDependentInfo;
  453. }
  454. void SetPathDependentInfo(const PathDependentInfo &info, JitArenaAllocator *const alloc)
  455. {
  456. Assert(info.HasInfo());
  457. if (!pathDependentInfo)
  458. {
  459. pathDependentInfo = JitAnew(alloc, PathDependentInfo, info);
  460. }
  461. else
  462. {
  463. *pathDependentInfo = info;
  464. }
  465. }
  466. void ClearPathDependentInfo(JitArenaAllocator * alloc)
  467. {
  468. JitAdelete(alloc, pathDependentInfo);
  469. pathDependentInfo = nullptr;
  470. }
  471. private:
  472. BasicBlock * predBlock;
  473. BasicBlock * succBlock;
  474. // Only valid during globopt
  475. PathDependentInfo * pathDependentInfo;
  476. };
  477. class Loop
  478. {
  479. friend FlowGraph;
  480. private:
  481. typedef JsUtil::BaseDictionary<PropertySym *, Value *, JitArenaAllocator> InitialValueFieldMap;
  482. Js::ImplicitCallFlags implicitCallFlags;
  483. Js::LoopFlags loopFlags;
  484. BasicBlock * headBlock;
  485. public:
  486. Func * topFunc;
  487. uint32 loopNumber;
  488. SList<BasicBlock *> blockList;
  489. Loop * next;
  490. Loop * parent;
  491. BasicBlock * landingPad;
  492. IR::LabelInstr * loopTopLabel;
  493. BVSparse<JitArenaAllocator> *varSymsOnEntry;
  494. BVSparse<JitArenaAllocator> *int32SymsOnEntry;
  495. BVSparse<JitArenaAllocator> *lossyInt32SymsOnEntry; // see GlobOptData::liveLossyInt32Syms
  496. BVSparse<JitArenaAllocator> *float64SymsOnEntry;
  497. BVSparse<JitArenaAllocator> *liveFieldsOnEntry;
  498. SymToValueInfoMap *symsRequiringCompensationToMergedValueInfoMap;
  499. BVSparse<JitArenaAllocator> *symsUsedBeforeDefined; // stack syms that are live in the landing pad, and used before they are defined in the loop
  500. BVSparse<JitArenaAllocator> *likelyIntSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-int value, and used before they are defined in the loop
  501. BVSparse<JitArenaAllocator> *likelyNumberSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-number value, and used before they are defined in the loop
  502. BVSparse<JitArenaAllocator> *forceFloat64SymsOnEntry;
  503. BVSparse<JitArenaAllocator> *symsDefInLoop;
  504. // This is different from symsDefInLoop which only captures syms that survived IR
  505. // cleanup in PreOptPeep in the pre-pass of a loop. For aggressively transferring
  506. // values in prepass, we need to know if a source sym was ever assigned to in a loop.
  507. BVSparse<JitArenaAllocator> *symsAssignedToInLoop;
  508. BailOutInfo * bailOutInfo;
  509. IR::BailOutInstr * toPrimitiveSideEffectCheck;
  510. IR::Instr * endDisableImplicitCall;
  511. BVSparse<JitArenaAllocator> * hoistedFields;
  512. BVSparse<JitArenaAllocator> * hoistedFieldCopySyms;
  513. BVSparse<JitArenaAllocator> * liveOutFields;
  514. ValueNumber firstValueNumberInLoop;
  515. JsArrayKills jsArrayKills;
  516. BVSparse<JitArenaAllocator> *fieldKilled;
  517. BVSparse<JitArenaAllocator> *fieldPRESymStores;
  518. InitialValueFieldMap initialValueFieldMap;
  519. InductionVariableSet *inductionVariables;
  520. BasicBlock *dominatingLoopCountableBlock;
  521. LoopCount *loopCount;
  522. SymIdToStackSymMap *loopCountBasedBoundBaseSyms;
  523. typedef SegmentClusterList<SymID, JitArenaAllocator> LoopSymClusterList;
  524. LoopSymClusterList *symClusterList;
  525. BVSparse<JitArenaAllocator> * internallyDereferencedSyms;
  526. SList<IR::ByteCodeUsesInstr*> *outwardSpeculationMaskInstrs;
  527. bool isDead : 1;
  528. bool hasDeadStoreCollectionPass : 1;
  529. bool hasDeadStorePrepass : 1;
  530. bool hasCall : 1;
  531. bool hasHoistedFields : 1;
  532. bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
  533. bool allFieldsKilled : 1;
  534. bool isLeaf : 1;
  535. bool isProcessed : 1; // Set and reset at varying places according to the phase we're in.
  536. // For example, in the lowerer, it'll be set to true when we process the loopTop for a certain loop
  537. struct MemCopyCandidate;
  538. struct MemSetCandidate;
  539. struct MemOpCandidate
  540. {
  541. SymID base;
  542. SymID index;
  543. byte count;
  544. bool bIndexAlreadyChanged;
  545. enum MemOpType
  546. {
  547. MEMSET,
  548. MEMCOPY
  549. } type;
  550. bool IsMemSet() const { return type == MEMSET; }
  551. bool IsMemCopy() const { return type == MEMCOPY; }
  552. struct Loop::MemCopyCandidate* AsMemCopy();
  553. struct Loop::MemSetCandidate* AsMemSet();
  554. MemOpCandidate(MemOpType type) :
  555. type(type)
  556. {
  557. }
  558. };
  559. struct MemSetCandidate : public MemOpCandidate
  560. {
  561. BailoutConstantValue constant;
  562. StackSym* srcSym;
  563. MemSetCandidate() : MemOpCandidate(MemOpCandidate::MEMSET), srcSym(nullptr) {}
  564. };
  565. struct MemCopyCandidate : public MemOpCandidate
  566. {
  567. SymID ldBase;
  568. StackSym* transferSym;
  569. byte ldCount;
  570. MemCopyCandidate() : MemOpCandidate(MemOpCandidate::MEMCOPY) {}
  571. };
  572. #define FOREACH_MEMOP_CANDIDATES_EDITING(data, loop, iterator) FOREACH_SLISTCOUNTED_ENTRY_EDITING(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates, iterator)
  573. #define NEXT_MEMOP_CANDIDATE_EDITING NEXT_SLISTCOUNTED_ENTRY_EDITING
  574. #define FOREACH_MEMOP_CANDIDATES(data, loop) FOREACH_SLISTCOUNTED_ENTRY(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates)
  575. #define NEXT_MEMOP_CANDIDATE NEXT_SLISTCOUNTED_ENTRY
  576. #define MEMOP_CANDIDATE_TYPE_CHECK(candidate, data, type) if(candidate->Is ## type()) {Loop:: ## type ## Candidate* data = candidate->As## type();
  577. #define FOREACH_MEMCOPY_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  578. #define NEXT_MEMCOPY_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  579. #define FOREACH_MEMCOPY_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  580. #define NEXT_MEMCOPY_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  581. #define FOREACH_MEMSET_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  582. #define NEXT_MEMSET_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  583. #define FOREACH_MEMSET_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  584. #define NEXT_MEMSET_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  585. typedef struct
  586. {
  587. byte unroll : 7;
  588. byte isIncremental : 1;
  589. } InductionVariableChangeInfo;
  590. typedef JsUtil::BaseDictionary<SymID, InductionVariableChangeInfo, JitArenaAllocator> InductionVariableChangeInfoMap;
  591. typedef JsUtil::BaseDictionary<byte, IR::Opnd*, JitArenaAllocator> InductionVariableOpndPerUnrollMap;
  592. typedef SListCounted<MemOpCandidate *> MemOpList;
  593. typedef struct
  594. {
  595. MemOpList *candidates;
  596. BVSparse<JitArenaAllocator> *inductionVariablesUsedAfterLoop;
  597. InductionVariableChangeInfoMap *inductionVariableChangeInfoMap;
  598. InductionVariableOpndPerUnrollMap *inductionVariableOpndPerUnrollMap;
  599. // This assumes that all memop operations use the same index and have the same length
  600. // Temporary map to reuse existing startIndexOpnd while emitting
  601. // 0 = !increment & !alreadyChanged, 1 = !increment & alreadyChanged, 2 = increment & !alreadyChanged, 3 = increment & alreadyChanged
  602. IR::RegOpnd* startIndexOpndCache[4];
  603. IR::Instr* instr;
  604. } MemOpInfo;
  605. bool doMemOp : 1;
  606. MemOpInfo *memOpInfo;
  607. struct RegAlloc
  608. {
  609. Lifetime ** loopTopRegContent; // Save off the state of the registers at the loop top
  610. BVSparse<JitArenaAllocator> * symRegUseBv; // If a lifetime was live in a reg into the loop, did the reg get used before being spilled?
  611. BVSparse<JitArenaAllocator> * defdInLoopBv; // Was a lifetime defined in the loop?
  612. BVSparse<JitArenaAllocator> * liveOnBackEdgeSyms; // Is a lifetime live on the back-edge of the loop?
  613. BitVector regUseBv; // Registers used in this loop so far
  614. uint32 loopStart; // loopTopLabel->GetNumber()
  615. uint32 loopEnd; // loopTailBranch->GetNumber()
  616. uint32 helperLength; // Number of instrs in helper code in loop
  617. SList<Lifetime *> * extendedLifetime; // Lifetimes to extend for this loop
  618. SList<Lifetime **> * exitRegContentList; // Linked list of regContents for the exit edges
  619. bool hasNonOpHelperCall;
  620. bool hasCall;
  621. bool hasAirLock; // Do back-edges have airlock blocks?
  622. } regAlloc;
  623. public:
  624. Loop(JitArenaAllocator * alloc, Func *func)
  625. : topFunc(func),
  626. blockList(alloc),
  627. parent(nullptr),
  628. landingPad(nullptr),
  629. loopTopLabel(nullptr),
  630. symsUsedBeforeDefined(nullptr),
  631. likelyIntSymsUsedBeforeDefined(nullptr),
  632. likelyNumberSymsUsedBeforeDefined(nullptr),
  633. forceFloat64SymsOnEntry(nullptr),
  634. symsDefInLoop(nullptr),
  635. symsAssignedToInLoop(nullptr),
  636. needImplicitCallBailoutChecksForJsArrayCheckHoist(false),
  637. inductionVariables(nullptr),
  638. dominatingLoopCountableBlock(nullptr),
  639. loopCount(nullptr),
  640. loopCountBasedBoundBaseSyms(nullptr),
  641. symClusterList(nullptr),
  642. internallyDereferencedSyms(nullptr),
  643. outwardSpeculationMaskInstrs(nullptr),
  644. isDead(false),
  645. allFieldsKilled(false),
  646. isLeaf(true),
  647. isProcessed(false),
  648. initialValueFieldMap(alloc),
  649. symsRequiringCompensationToMergedValueInfoMap(nullptr)
  650. {
  651. this->loopNumber = ++func->loopCount;
  652. }
  653. void SetHeadBlock(BasicBlock *block) { headBlock = block; }
  654. BasicBlock * GetHeadBlock() const { Assert(headBlock == blockList.Head()); return headBlock; }
  655. bool IsDescendentOrSelf(Loop const * loop) const;
  656. void EnsureMemOpVariablesInitialized();
  657. Js::ImplicitCallFlags GetImplicitCallFlags();
  658. void SetImplicitCallFlags(Js::ImplicitCallFlags flags);
  659. Js::LoopFlags GetLoopFlags() const { return loopFlags; }
  660. void SetLoopFlags(Js::LoopFlags val) { loopFlags = val; }
  661. bool CanHoistInvariants() const;
  662. bool CanDoFieldCopyProp();
  663. void SetHasCall();
  664. IR::LabelInstr * GetLoopTopInstr() const;
  665. void SetLoopTopInstr(IR::LabelInstr * loopTop);
  666. Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
  667. bool IsSymAssignedToInSelfOrParents(StackSym * const sym) const;
  668. BasicBlock * GetAnyTailBlock() const;
  669. #if DBG_DUMP
  670. bool GetHasCall() const { return hasCall; }
  671. uint GetLoopNumber() const;
  672. #endif
  673. private:
  674. void InsertLandingPad(FlowGraph *fg);
  675. bool RemoveBreakBlocks(FlowGraph *fg);
  676. };
  677. // Structure definition cannot be inside Loop in order to use it as a parameter in GlobOpt
  678. struct MemOpEmitData
  679. {
  680. Loop::MemOpCandidate* candidate;
  681. IR::Instr* stElemInstr;
  682. BasicBlock* block;
  683. Loop::InductionVariableChangeInfo inductionVar;
  684. IR::BailOutKind bailOutKind;
  685. };
  686. struct MemSetEmitData : public MemOpEmitData
  687. {
  688. };
  689. struct MemCopyEmitData : public MemOpEmitData
  690. {
  691. IR::Instr* ldElemInstr;
  692. };
  693. #define FOREACH_BLOCK_IN_FUNC(block, func)\
  694. FOREACH_BLOCK(block, func->m_fg)
  695. #define NEXT_BLOCK_IN_FUNC\
  696. NEXT_BLOCK;
  697. #define FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, func)\
  698. FOREACH_BLOCK_DEAD_OR_ALIVE(block, func->m_fg)
  699. #define NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE\
  700. NEXT_BLOCK_DEAD_OR_ALIVE;
  701. #define FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func) \
  702. FOREACH_BLOCK_BACKWARD(block, func->m_fg)
  703. #define NEXT_BLOCK_BACKWARD_IN_FUNC \
  704. NEXT_BLOCK_BACKWARD;
  705. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, func) \
  706. FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, func->m_fg)
  707. #define NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE \
  708. NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE;
  709. #define FOREACH_BLOCK_IN_FUNC_EDITING(block, func)\
  710. FOREACH_BLOCK_EDITING(block, func->m_fg)
  711. #define NEXT_BLOCK_IN_FUNC_EDITING\
  712. NEXT_BLOCK_EDITING;
  713. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_EDITING(block, func)\
  714. FOREACH_BLOCK_BACKWARD_EDITING(block, func->m_fg)
  715. #define NEXT_BLOCK_BACKWARD_IN_FUNC_EDITING\
  716. NEXT_BLOCK_BACKWARD_EDITING;
  717. #define FOREACH_BLOCK_ALL(block, graph) \
  718. for (BasicBlock *block = graph->blockList;\
  719. block != nullptr;\
  720. block = block->next)\
  721. {
  722. #define NEXT_BLOCK_ALL \
  723. }
  724. #define FOREACH_BLOCK(block, graph)\
  725. FOREACH_BLOCK_ALL(block, graph) \
  726. if (block->isDeleted) { continue; }
  727. #define NEXT_BLOCK \
  728. NEXT_BLOCK_ALL
  729. #define FOREACH_BLOCK_DEAD_OR_ALIVE(block, graph)\
  730. FOREACH_BLOCK_ALL(block, graph) \
  731. if (block->isDeleted && !block->isDead) { continue; }
  732. #define NEXT_BLOCK_DEAD_OR_ALIVE \
  733. NEXT_BLOCK_ALL
  734. #define FOREACH_BLOCK_BACKWARD(block, graph)\
  735. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, graph->tailBlock, nullptr)
  736. #define NEXT_BLOCK_BACKWARD \
  737. NEXT_BLOCK_BACKWARD_IN_RANGE
  738. #define FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, graph)\
  739. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, graph->tailBlock, nullptr)
  740. #define NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE \
  741. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE
  742. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast)\
  743. {\
  744. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  745. for (BasicBlock *block = blockList;\
  746. block != blockStop;\
  747. block = block->prev)\
  748. {
  749. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL \
  750. }}
  751. #define FOREACH_BLOCK_BACKWARD_IN_RANGE(block, blockList, blockLast) \
  752. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  753. if (block->isDeleted) { continue; }
  754. #define NEXT_BLOCK_BACKWARD_IN_RANGE \
  755. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  756. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev)\
  757. {\
  758. BasicBlock *blockPrev;\
  759. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  760. for (BasicBlock *block = blockList;\
  761. block != blockStop;\
  762. block = blockPrev)\
  763. {\
  764. blockPrev = block->prev;
  765. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING \
  766. }}
  767. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(block, blockList, blockLast, blockPrev) \
  768. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev) \
  769. if (block->isDeleted) { continue; }
  770. #define NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING \
  771. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING
  772. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, blockList, blockLast) \
  773. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  774. if (block->isDeleted && !block->isDead) { continue; }
  775. #define NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE \
  776. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  777. #define FOREACH_BLOCK_EDITING(block, graph)\
  778. {\
  779. BasicBlock *blockNext;\
  780. for (BasicBlock *block = graph->blockList;\
  781. block != nullptr;\
  782. block = blockNext)\
  783. {\
  784. blockNext = block->next; \
  785. if (block->isDeleted) { continue; }
  786. #define NEXT_BLOCK_EDITING \
  787. }}
  788. #define FOREACH_BLOCK_BACKWARD_EDITING(block, graph)\
  789. {\
  790. BasicBlock *blockPrev;\
  791. for (BasicBlock *block = graph->tailBlock;\
  792. block != nullptr;\
  793. block = blockPrev)\
  794. {\
  795. blockPrev = block->prev; \
  796. if (block->isDeleted) { continue; }
  797. #define NEXT_BLOCK_BACKWARD_EDITING \
  798. }}
  799. #define FOREACH_BLOCK_IN_LIST(block, list)\
  800. FOREACH_SLIST_ENTRY(BasicBlock*, block, list)\
  801. {\
  802. if (block->isDeleted) { continue; }
  803. #define NEXT_BLOCK_IN_LIST \
  804. NEXT_SLIST_ENTRY \
  805. }
  806. #define FOREACH_BLOCK_IN_LIST_EDITING(block, list, iter)\
  807. FOREACH_SLIST_ENTRY_EDITING(BasicBlock*, block, list, iter)\
  808. {\
  809. if (block->isDeleted) { continue; }
  810. #define NEXT_BLOCK_IN_LIST_EDITING \
  811. NEXT_SLIST_ENTRY_EDITING \
  812. }
  813. #define FOREACH_SUCCESSOR_EDGE(edge, block)\
  814. FOREACH_EDGE_IN_LIST(edge, block->GetSuccList())
  815. #define NEXT_SUCCESSOR_EDGE\
  816. NEXT_EDGE_IN_LIST
  817. #define FOREACH_SUCCESSOR_EDGE_EDITING(edge, bloc, iter)\
  818. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetSuccList(), iter)
  819. #define NEXT_SUCCESSOR_EDGE_EDITING\
  820. NEXT_EDGE_IN_LIST_EDITING
  821. #define FOREACH_PREDECESSOR_EDGE(edge, block)\
  822. FOREACH_EDGE_IN_LIST(edge, block->GetPredList())
  823. #define NEXT_PREDECESSOR_EDGE\
  824. NEXT_EDGE_IN_LIST
  825. #define FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)\
  826. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetPredList(), iter)
  827. #define NEXT_PREDECESSOR_EDGE_EDITING\
  828. NEXT_EDGE_IN_LIST_EDITING
  829. #define FOREACH_EDGE_IN_LIST(edge, list)\
  830. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, list)\
  831. {
  832. #define NEXT_EDGE_IN_LIST\
  833. NEXT_SLISTBASECOUNTED_ENTRY }
  834. #define FOREACH_EDGE_IN_LIST_EDITING(edge, list, iter)\
  835. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, list, iter)\
  836. {\
  837. #define NEXT_EDGE_IN_LIST_EDITING\
  838. NEXT_SLISTBASECOUNTED_ENTRY_EDITING }
  839. #define FOREACH_SUCCESSOR_BLOCK(blockSucc, block)\
  840. FOREACH_EDGE_IN_LIST(__edge, block->GetSuccList())\
  841. {\
  842. BasicBlock * blockSucc = __edge->GetSucc(); \
  843. AnalysisAssert(blockSucc);
  844. #define NEXT_SUCCESSOR_BLOCK\
  845. }\
  846. NEXT_EDGE_IN_LIST
  847. #define FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, block, iter)\
  848. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetSuccList(), iter)\
  849. {\
  850. BasicBlock * blockSucc = __edge->GetSucc(); \
  851. AnalysisAssert(blockSucc);
  852. #define NEXT_SUCCESSOR_BLOCK_EDITING\
  853. }\
  854. NEXT_EDGE_IN_LIST_EDITING
  855. #define FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, block)\
  856. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadSuccList())\
  857. {\
  858. BasicBlock * blockSucc = __edge->GetSucc(); \
  859. AnalysisAssert(blockSucc);
  860. #define NEXT_DEAD_SUCCESSOR_BLOCK\
  861. }\
  862. NEXT_EDGE_IN_LIST
  863. #define FOREACH_PREDECESSOR_BLOCK(blockPred, block)\
  864. FOREACH_EDGE_IN_LIST(__edge, block->GetPredList())\
  865. {\
  866. BasicBlock * blockPred = __edge->GetPred(); \
  867. AnalysisAssert(blockPred);
  868. #define NEXT_PREDECESSOR_BLOCK\
  869. }\
  870. NEXT_EDGE_IN_LIST
  871. #define FOREACH_PREDECESSOR_BLOCK_EDITING(blockPred, block, iter)\
  872. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetPredList(), iter)\
  873. {\
  874. BasicBlock * blockPred = __edge->GetPred(); \
  875. AnalysisAssert(blockPred);
  876. #define NEXT_PREDECESSOR_BLOCK_EDITING\
  877. }\
  878. NEXT_EDGE_IN_LIST_EDITING
  879. #define FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, block)\
  880. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadPredList())\
  881. {\
  882. BasicBlock * blockPred = __edge->GetPred(); \
  883. AnalysisAssert(blockPred);
  884. #define NEXT_DEAD_PREDECESSOR_BLOCK\
  885. }\
  886. NEXT_EDGE_IN_LIST
  887. #define FOREACH_BLOCK_IN_LOOP(block, loop)\
  888. FOREACH_BLOCK_IN_LIST(block, &loop->blockList)
  889. #define NEXT_BLOCK_IN_LOOP \
  890. NEXT_BLOCK_IN_LIST
  891. #define FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)\
  892. FOREACH_BLOCK_IN_LIST_EDITING(block, &loop->blockList, iter)
  893. #define NEXT_BLOCK_IN_LOOP_EDITING \
  894. NEXT_BLOCK_IN_LIST_EDITING
  895. #define FOREACH_LOOP_IN_FUNC_EDITING(loop, func)\
  896. FOREACH_LOOP_EDITING(loop, func->m_fg)
  897. #define NEXT_LOOP_IN_FUNC_EDITING\
  898. NEXT_LOOP_EDITING;
  899. #define FOREACH_LOOP_EDITING(loop, graph)\
  900. {\
  901. Loop* loopNext;\
  902. for (Loop* loop = graph->loopList;\
  903. loop != nullptr;\
  904. loop = loopNext)\
  905. {\
  906. loopNext = loop->next;
  907. #define NEXT_LOOP_EDITING \
  908. }}