FlowGraph.h 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. class BasicBlock;
  7. class FlowEdge;
  8. class Loop;
  9. class Region;
  10. class Func;
  11. class AddPropertyCacheBucket
  12. {
  13. private:
  14. JITTypeHolder initialType;
  15. JITTypeHolder finalType;
  16. public:
  17. AddPropertyCacheBucket() : initialType(nullptr), finalType(nullptr)
  18. #if DBG
  19. , deadStoreUnavailableInitialType(nullptr), deadStoreUnavailableFinalType(nullptr)
  20. #endif
  21. {
  22. }
  23. AddPropertyCacheBucket(const AddPropertyCacheBucket& bucket) :
  24. initialType(bucket.initialType), finalType(bucket.finalType)
  25. #if DBG
  26. , deadStoreUnavailableInitialType(bucket.deadStoreUnavailableInitialType)
  27. , deadStoreUnavailableFinalType(bucket.deadStoreUnavailableFinalType)
  28. #endif
  29. {
  30. }
  31. bool operator!=(const AddPropertyCacheBucket& bucket) const
  32. {
  33. return this->initialType != bucket.initialType || this->finalType != bucket.finalType;
  34. }
  35. bool operator==(const AddPropertyCacheBucket& bucket) const
  36. {
  37. return this->initialType == bucket.initialType && this->finalType == bucket.finalType;
  38. }
  39. void Copy(AddPropertyCacheBucket *pNew) const
  40. {
  41. pNew->initialType = this->initialType;
  42. pNew->finalType = this->finalType;
  43. #if DBG
  44. pNew->deadStoreUnavailableInitialType = this->deadStoreUnavailableInitialType;
  45. pNew->deadStoreUnavailableFinalType = this->deadStoreUnavailableFinalType;
  46. #endif
  47. }
  48. JITTypeHolder GetInitialType() const { return this->initialType; }
  49. JITTypeHolder GetFinalType() const { return this->finalType; }
  50. void SetInitialType(JITTypeHolder type) { this->initialType = type; }
  51. void SetFinalType(JITTypeHolder type) { this->finalType = type; }
  52. #if DBG_DUMP
  53. void Dump() const;
  54. #endif
  55. #ifdef DBG
  56. JITTypeHolder deadStoreUnavailableInitialType;
  57. JITTypeHolder deadStoreUnavailableFinalType;
  58. #endif
  59. };
  60. class ObjTypeGuardBucket
  61. {
  62. private:
  63. BVSparse<JitArenaAllocator>* guardedPropertyOps;
  64. JITTypeHolder monoGuardType;
  65. public:
  66. ObjTypeGuardBucket() : guardedPropertyOps(nullptr), monoGuardType(nullptr) {}
  67. ObjTypeGuardBucket(BVSparse<JitArenaAllocator>* guardedPropertyOps) : monoGuardType(nullptr)
  68. {
  69. this->guardedPropertyOps = (guardedPropertyOps != nullptr ? guardedPropertyOps->CopyNew() : nullptr);
  70. }
  71. void Copy(ObjTypeGuardBucket *pNew) const
  72. {
  73. pNew->guardedPropertyOps = this->guardedPropertyOps ? this->guardedPropertyOps->CopyNew() : nullptr;
  74. pNew->monoGuardType = this->monoGuardType;
  75. }
  76. BVSparse<JitArenaAllocator> *GetGuardedPropertyOps() const { return this->guardedPropertyOps; }
  77. void SetGuardedPropertyOps(BVSparse<JitArenaAllocator> *guardedPropertyOps) { this->guardedPropertyOps = guardedPropertyOps; }
  78. void AddToGuardedPropertyOps(uint propertyOpId) { Assert(this->guardedPropertyOps != nullptr); this->guardedPropertyOps->Set(propertyOpId); }
  79. bool NeedsMonoCheck() const { return this->monoGuardType != nullptr; }
  80. void SetMonoGuardType(JITTypeHolder type) { this->monoGuardType = type; }
  81. JITTypeHolder GetMonoGuardType() const { return this->monoGuardType; }
  82. #if DBG_DUMP
  83. void Dump() const;
  84. #endif
  85. };
  86. class ObjWriteGuardBucket
  87. {
  88. private:
  89. BVSparse<JitArenaAllocator>* writeGuards;
  90. public:
  91. ObjWriteGuardBucket() : writeGuards(nullptr) {}
  92. ObjWriteGuardBucket(BVSparse<JitArenaAllocator>* writeGuards) { this->writeGuards = (writeGuards != nullptr ? writeGuards->CopyNew() : nullptr); }
  93. void Copy(ObjWriteGuardBucket *pNew) const
  94. {
  95. pNew->writeGuards = this->writeGuards ? this->writeGuards->CopyNew() : nullptr;
  96. }
  97. BVSparse<JitArenaAllocator> *GetWriteGuards() const { return this->writeGuards; }
  98. void SetWriteGuards(BVSparse<JitArenaAllocator> *writeGuards) { this->writeGuards = writeGuards; }
  99. void AddToWriteGuards(uint writeGuardId) { Assert(this->writeGuards != nullptr); this->writeGuards->Set(writeGuardId); }
  100. #if DBG_DUMP
  101. void Dump() const;
  102. #endif
  103. };
  104. class FlowGraph
  105. {
  106. friend Loop;
  107. public:
  108. static FlowGraph * New(Func *func, JitArenaAllocator *alloc);
  109. FlowGraph(Func *func, JitArenaAllocator *fgAlloc) :
  110. func(func),
  111. alloc(fgAlloc),
  112. blockList(nullptr),
  113. blockCount(0),
  114. tailBlock(nullptr),
  115. loopList(nullptr),
  116. catchLabelStack(nullptr),
  117. finallyLabelStack(nullptr),
  118. leaveNullLabelStack(nullptr),
  119. regToFinallyEndMap(nullptr),
  120. leaveNullLabelToFinallyLabelMap(nullptr),
  121. hasBackwardPassInfo(false),
  122. hasLoop(false),
  123. implicitCallFlags(Js::ImplicitCall_HasNoInfo)
  124. {
  125. }
  126. void Build(void);
  127. void Destroy(void);
  128. void RunPeeps();
  129. BasicBlock * AddBlock(IR::Instr * firstInstr, IR::Instr * lastInstr, BasicBlock * nextBlock, BasicBlock *prevBlock = nullptr);
  130. FlowEdge * AddEdge(BasicBlock * predBlock, BasicBlock * succBlock);
  131. BasicBlock * InsertCompensationCodeForBlockMove(FlowEdge * edge, // Edge where compensation code needs to be inserted
  132. bool insertCompensationBlockToLoopList = false,
  133. bool sinkBlockLoop = false // Loop to which compensation block belongs
  134. );
  135. BasicBlock * InsertAirlockBlock(FlowEdge * edge);
  136. void InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget);
  137. void RemoveUnreachableBlocks();
  138. bool RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt = nullptr);
  139. IR::Instr * RemoveInstr(IR::Instr *instr, GlobOpt * globOpt);
  140. void RemoveBlock(BasicBlock *block, GlobOpt * globOpt = nullptr, bool tailDuping = false);
  141. BasicBlock * SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr);
  142. Func* GetFunc() { return func;};
  143. static void SafeRemoveInstr(IR::Instr *instr);
  144. void SortLoopLists();
  145. FlowEdge * FindEdge(BasicBlock *predBlock, BasicBlock *succBlock);
  146. IR::LabelInstr * DeleteLeaveChainBlocks(IR::BranchInstr *leaveInstr, IR::Instr * &instrPrev);
  147. bool IsEarlyExitFromFinally(IR::BranchInstr *leaveInstr, Region *currentRegion, Region *branchTargetRegion, IR::Instr *&instrPrev, IR::LabelInstr *&exitLabel);
  148. bool Dominates(Region *finallyRegion, Region *exitLabelRegion);
  149. bool DoesExitLabelDominate(IR::BranchInstr *leaveInstr);
  150. void InsertEdgeFromFinallyToEarlyExit(BasicBlock * finallyEndBlock, IR::LabelInstr * exitLabel);
  151. #if DBG_DUMP
  152. void Dump();
  153. void Dump(bool verbose, const char16 *form);
  154. #endif
  155. JitArenaAllocator * alloc;
  156. BasicBlock * blockList;
  157. BasicBlock * tailBlock;
  158. Loop * loopList;
  159. SList<IR::LabelInstr*> * catchLabelStack;
  160. SList<IR::LabelInstr*> * finallyLabelStack;
  161. SList<IR::LabelInstr*> * leaveNullLabelStack;
  162. typedef JsUtil::BaseDictionary<Region *, BasicBlock *, JitArenaAllocator> RegionToFinallyEndMapType;
  163. RegionToFinallyEndMapType * regToFinallyEndMap;
  164. typedef JsUtil::BaseDictionary<IR::LabelInstr *, IR::LabelInstr *, JitArenaAllocator> LeaveNullLabelToFinallyLabelMapType;
  165. LeaveNullLabelToFinallyLabelMapType * leaveNullLabelToFinallyLabelMap;
  166. bool hasBackwardPassInfo;
  167. bool hasLoop;
  168. Js::ImplicitCallFlags implicitCallFlags;
  169. private:
  170. void FindLoops(void);
  171. bool CanonicalizeLoops(void);
  172. void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
  173. void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
  174. void AddBlockToLoop(BasicBlock *block, Loop *loop);
  175. bool IsEHTransitionInstr(IR::Instr *instr);
  176. BasicBlock * GetPredecessorForRegionPropagation(BasicBlock *block);
  177. void UpdateRegionForBlock(BasicBlock *block);
  178. void UpdateRegionForBlockFromEHPred(BasicBlock *block, bool reassign = false);
  179. Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
  180. IR::Instr * PeepCm(IR::Instr *instr);
  181. IR::Instr * PeepTypedCm(IR::Instr *instr);
  182. void MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock);
  183. bool UnsignedCmpPeep(IR::Instr *cmpInstr);
  184. bool IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1);
  185. #if DBG
  186. void VerifyLoopGraph();
  187. #endif
  188. private:
  189. void InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId);
  190. private:
  191. Func * func;
  192. unsigned int blockCount;
  193. };
  194. class BasicBlock
  195. {
  196. friend class FlowGraph;
  197. friend class Loop;
  198. public:
  199. static BasicBlock * New(FlowGraph * graph);
  200. void AddPred(FlowEdge * edge, FlowGraph * graph);
  201. void AddSucc(FlowEdge * edge, FlowGraph * graph);
  202. void RemovePred(BasicBlock *block, FlowGraph * graph);
  203. void RemoveSucc(BasicBlock *block, FlowGraph * graph);
  204. void RemoveDeadPred(BasicBlock *block, FlowGraph * graph);
  205. void RemoveDeadSucc(BasicBlock *block, FlowGraph * graph);
  206. void UnlinkPred(BasicBlock *block);
  207. void UnlinkSucc(BasicBlock *block);
  208. void UnlinkInstr(IR::Instr * Instr);
  209. void RemoveInstr(IR::Instr * instr);
  210. void InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr);
  211. void InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr);
  212. void InsertAfter(IR::Instr * newInstr);
  213. void InvertBranch(IR::BranchInstr *branch);
  214. IR::Instr * GetFirstInstr(void) const
  215. {
  216. return firstInstr;
  217. }
  218. void SetFirstInstr(IR::Instr * instr)
  219. {
  220. firstInstr = instr;
  221. }
  222. IR::Instr * GetLastInstr(void)
  223. {
  224. BasicBlock *blNext = this->next;
  225. if (blNext)
  226. {
  227. return blNext->firstInstr->m_prev;
  228. }
  229. else
  230. {
  231. return this->func->m_exitInstr;
  232. }
  233. }
  234. void SetLastInstr(IR::Instr * instr)
  235. {
  236. // Intentionally empty
  237. }
  238. SListBaseCounted<FlowEdge *> * GetPredList(void)
  239. {
  240. return &predList;
  241. }
  242. SListBaseCounted<FlowEdge *> * GetSuccList(void)
  243. {
  244. return &succList;
  245. }
  246. SListBaseCounted<FlowEdge *> * GetDeadPredList(void)
  247. {
  248. return &deadPredList;
  249. }
  250. SListBaseCounted<FlowEdge *> * GetDeadSuccList(void)
  251. {
  252. return &deadSuccList;
  253. }
  254. unsigned int GetBlockNum(void) const
  255. {
  256. return number;
  257. }
  258. void SetBlockNum(unsigned int num)
  259. {
  260. number = num;
  261. }
  262. BasicBlock * GetPrev()
  263. {
  264. BasicBlock *block = this;
  265. do {
  266. block = block->prev;
  267. } while (block->isDeleted);
  268. return block;
  269. }
  270. BasicBlock * GetNext()
  271. {
  272. BasicBlock *block = this->next;
  273. while (block && block->isDeleted) {
  274. block = block->next;
  275. }
  276. return block;
  277. }
  278. uint IncrementDataUseCount()
  279. {
  280. return ++this->dataUseCount;
  281. }
  282. uint DecrementDataUseCount()
  283. {
  284. Assert(this->dataUseCount != 0);
  285. return --this->dataUseCount;
  286. }
  287. uint GetDataUseCount()
  288. {
  289. return this->dataUseCount;
  290. }
  291. void SetDataUseCount(uint count)
  292. {
  293. this->dataUseCount = count;
  294. }
  295. bool IsLandingPad();
  296. // GlobOpt Stuff
  297. public:
  298. void MergePredBlocksValueMaps(GlobOpt* globOptState);
  299. private:
  300. void CleanUpValueMaps();
  301. #if DBG_DUMP
  302. public:
  303. void DumpHeader(bool insertCR = true);
  304. void Dump();
  305. #endif
  306. public:
  307. BasicBlock * next;
  308. BasicBlock * prev;
  309. Loop * loop;
  310. uint8 isDeleted:1;
  311. uint8 isDead:1;
  312. uint8 isLoopHeader:1;
  313. uint8 hasCall:1;
  314. uint8 isVisited:1;
  315. uint8 isAirLockCompensationBlock:1;
  316. uint8 beginsBailOnNoProfile:1;
  317. #ifdef DBG
  318. uint8 isBreakBlock:1;
  319. uint8 isAirLockBlock:1;
  320. uint8 isBreakCompensationBlockAtSink:1;
  321. uint8 isBreakCompensationBlockAtSource:1;
  322. #endif
  323. // Deadstore data
  324. BVSparse<JitArenaAllocator> * upwardExposedUses;
  325. BVSparse<JitArenaAllocator> * upwardExposedFields;
  326. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout;
  327. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  328. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates;
  329. TempNumberTracker * tempNumberTracker;
  330. TempObjectTracker * tempObjectTracker;
  331. #if DBG
  332. TempObjectVerifyTracker * tempObjectVerifyTracker;
  333. #endif
  334. HashTable<AddPropertyCacheBucket> * stackSymToFinalType;
  335. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties; // Dead store pass only
  336. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap; // Backward pass only
  337. BVSparse<JitArenaAllocator> * noImplicitCallUses;
  338. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses;
  339. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses;
  340. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses;
  341. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses;
  342. BVSparse<JitArenaAllocator> * cloneStrCandidates;
  343. BVSparse<JitArenaAllocator> * couldRemoveNegZeroBailoutForDef; // Deadstore pass only
  344. Loop * backwardPassCurrentLoop;
  345. // Global optimizer data
  346. GlobOptBlockData globOptData;
  347. // Bailout data
  348. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  349. #if DBG
  350. StackSym ** byteCodeRestoreSyms;
  351. #endif
  352. IntOverflowDoesNotMatterRange * intOverflowDoesNotMatterRange;
  353. private:
  354. BasicBlock(JitArenaAllocator * alloc, Func *func) :
  355. next(nullptr),
  356. prev(nullptr),
  357. firstInstr(nullptr),
  358. number(k_InvalidNum),
  359. loop(nullptr),
  360. isDeleted(false),
  361. isDead(false),
  362. isLoopHeader(false),
  363. hasCall(false),
  364. upwardExposedUses(nullptr),
  365. upwardExposedFields(nullptr),
  366. typesNeedingKnownObjectLayout(nullptr),
  367. slotDeadStoreCandidates(nullptr),
  368. tempNumberTracker(nullptr),
  369. tempObjectTracker(nullptr),
  370. #if DBG
  371. tempObjectVerifyTracker(nullptr),
  372. #endif
  373. stackSymToFinalType(nullptr),
  374. stackSymToGuardedProperties(nullptr),
  375. stackSymToWriteGuardsMap(nullptr),
  376. noImplicitCallUses(nullptr),
  377. noImplicitCallNoMissingValuesUses(nullptr),
  378. noImplicitCallNativeArrayUses(nullptr),
  379. noImplicitCallJsArrayHeadSegmentSymUses(nullptr),
  380. noImplicitCallArrayLengthSymUses(nullptr),
  381. cloneStrCandidates(nullptr),
  382. couldRemoveNegZeroBailoutForDef(nullptr),
  383. byteCodeUpwardExposedUsed(nullptr),
  384. isAirLockCompensationBlock(false),
  385. beginsBailOnNoProfile(false),
  386. #if DBG
  387. byteCodeRestoreSyms(nullptr),
  388. isBreakBlock(false),
  389. isAirLockBlock(false),
  390. isBreakCompensationBlockAtSource(false),
  391. isBreakCompensationBlockAtSink(false),
  392. #endif
  393. fieldHoistCandidates(nullptr),
  394. dataUseCount(0),
  395. intOverflowDoesNotMatterRange(nullptr),
  396. func(func),
  397. globOptData(func)
  398. {
  399. }
  400. void RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead = false);
  401. void RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead = false);
  402. void UnlinkPred(BasicBlock *block, bool doCleanSucc);
  403. void UnlinkSucc(BasicBlock *block, bool doCleanPred);
  404. #if DBG_DUMP
  405. bool Contains(IR::Instr * instr);
  406. #endif
  407. private:
  408. IR::Instr * firstInstr;
  409. SListBaseCounted<FlowEdge *> predList;
  410. SListBaseCounted<FlowEdge *> succList;
  411. SListBaseCounted<FlowEdge *> deadPredList;
  412. SListBaseCounted<FlowEdge *> deadSuccList;
  413. Func * func;
  414. unsigned int number;
  415. uint dataUseCount;
  416. static const unsigned int k_InvalidNum = (unsigned)-1;
  417. };
  418. class FlowEdge
  419. {
  420. public:
  421. static FlowEdge * New(FlowGraph * graph);
  422. FlowEdge() :
  423. predBlock(nullptr),
  424. succBlock(nullptr),
  425. pathDependentInfo(nullptr)
  426. {
  427. }
  428. BasicBlock * GetPred(void) const
  429. {
  430. return predBlock;
  431. }
  432. void SetPred(BasicBlock * block)
  433. {
  434. predBlock = block;
  435. }
  436. BasicBlock * GetSucc(void) const
  437. {
  438. return succBlock;
  439. }
  440. void SetSucc(BasicBlock * block)
  441. {
  442. succBlock = block;
  443. }
  444. PathDependentInfo * GetPathDependentInfo() const
  445. {
  446. return pathDependentInfo;
  447. }
  448. void SetPathDependentInfo(const PathDependentInfo &info, JitArenaAllocator *const alloc)
  449. {
  450. Assert(info.HasInfo());
  451. if (!pathDependentInfo)
  452. {
  453. pathDependentInfo = JitAnew(alloc, PathDependentInfo, info);
  454. }
  455. else
  456. {
  457. *pathDependentInfo = info;
  458. }
  459. }
  460. void ClearPathDependentInfo(JitArenaAllocator * alloc)
  461. {
  462. JitAdelete(alloc, pathDependentInfo);
  463. pathDependentInfo = nullptr;
  464. }
  465. private:
  466. BasicBlock * predBlock;
  467. BasicBlock * succBlock;
  468. // Only valid during globopt
  469. PathDependentInfo * pathDependentInfo;
  470. };
  471. class Loop
  472. {
  473. friend FlowGraph;
  474. private:
  475. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator, PowerOf2SizePolicy> FieldHoistSymMap;
  476. typedef JsUtil::BaseDictionary<PropertySym *, Value *, JitArenaAllocator> InitialValueFieldMap;
  477. Js::ImplicitCallFlags implicitCallFlags;
  478. Js::LoopFlags loopFlags;
  479. BasicBlock * headBlock;
  480. public:
  481. Func * topFunc;
  482. uint32 loopNumber;
  483. SList<BasicBlock *> blockList;
  484. Loop * next;
  485. Loop * parent;
  486. BasicBlock * landingPad;
  487. IR::LabelInstr * loopTopLabel;
  488. BVSparse<JitArenaAllocator> *varSymsOnEntry;
  489. BVSparse<JitArenaAllocator> *int32SymsOnEntry;
  490. BVSparse<JitArenaAllocator> *lossyInt32SymsOnEntry; // see GlobOptData::liveLossyInt32Syms
  491. BVSparse<JitArenaAllocator> *float64SymsOnEntry;
  492. BVSparse<JitArenaAllocator> *liveFieldsOnEntry;
  493. #ifdef ENABLE_SIMDJS
  494. // SIMD_JS
  495. // live syms upon entering loop header (from pred merge + forced syms + used before defs in loop)
  496. BVSparse<JitArenaAllocator> *simd128F4SymsOnEntry;
  497. BVSparse<JitArenaAllocator> *simd128I4SymsOnEntry;
  498. BVSparse<JitArenaAllocator> *likelySimd128F4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128F4 value, and used before they are defined in the loop
  499. BVSparse<JitArenaAllocator> *likelySimd128I4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128I4 value, and used before they are defined in the loop
  500. // syms need to be forced to certain type due to hoisting
  501. BVSparse<JitArenaAllocator> *forceSimd128F4SymsOnEntry;
  502. BVSparse<JitArenaAllocator> *forceSimd128I4SymsOnEntry;
  503. #endif
  504. BVSparse<JitArenaAllocator> *symsUsedBeforeDefined; // stack syms that are live in the landing pad, and used before they are defined in the loop
  505. BVSparse<JitArenaAllocator> *likelyIntSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-int value, and used before they are defined in the loop
  506. BVSparse<JitArenaAllocator> *likelyNumberSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-number value, and used before they are defined in the loop
  507. BVSparse<JitArenaAllocator> *forceFloat64SymsOnEntry;
  508. BVSparse<JitArenaAllocator> *symsDefInLoop;
  509. BailOutInfo * bailOutInfo;
  510. IR::BailOutInstr * toPrimitiveSideEffectCheck;
  511. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  512. BVSparse<JitArenaAllocator> * liveInFieldHoistCandidates;
  513. BVSparse<JitArenaAllocator> * fieldHoistCandidateTypes;
  514. SListBase<IR::Instr *> prepassFieldHoistInstrCandidates;
  515. FieldHoistSymMap fieldHoistSymMap;
  516. IR::Instr * endDisableImplicitCall;
  517. BVSparse<JitArenaAllocator> * hoistedFields;
  518. BVSparse<JitArenaAllocator> * hoistedFieldCopySyms;
  519. BVSparse<JitArenaAllocator> * liveOutFields;
  520. ValueNumber firstValueNumberInLoop;
  521. JsArrayKills jsArrayKills;
  522. BVSparse<JitArenaAllocator> *fieldKilled;
  523. BVSparse<JitArenaAllocator> *fieldPRESymStore;
  524. InitialValueFieldMap initialValueFieldMap;
  525. InductionVariableSet *inductionVariables;
  526. BasicBlock *dominatingLoopCountableBlock;
  527. LoopCount *loopCount;
  528. SymIdToStackSymMap *loopCountBasedBoundBaseSyms;
  529. bool isDead : 1;
  530. bool hasDeadStoreCollectionPass : 1;
  531. bool hasDeadStorePrepass : 1;
  532. bool hasCall : 1;
  533. bool hasHoistedFields : 1;
  534. bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
  535. bool allFieldsKilled : 1;
  536. bool isLeaf : 1;
  537. bool isProcessed : 1; // Set and reset at varying places according to the phase we're in.
  538. // For example, in the lowerer, it'll be set to true when we process the loopTop for a certain loop
  539. struct MemCopyCandidate;
  540. struct MemSetCandidate;
  541. struct MemOpCandidate
  542. {
  543. SymID base;
  544. SymID index;
  545. byte count;
  546. bool bIndexAlreadyChanged;
  547. enum MemOpType
  548. {
  549. MEMSET,
  550. MEMCOPY
  551. } type;
  552. bool IsMemSet() const { return type == MEMSET; }
  553. bool IsMemCopy() const { return type == MEMCOPY; }
  554. struct Loop::MemCopyCandidate* AsMemCopy();
  555. struct Loop::MemSetCandidate* AsMemSet();
  556. MemOpCandidate(MemOpType type) :
  557. type(type)
  558. {
  559. }
  560. };
  561. struct MemSetCandidate : public MemOpCandidate
  562. {
  563. BailoutConstantValue constant;
  564. StackSym* srcSym;
  565. MemSetCandidate() : MemOpCandidate(MemOpCandidate::MEMSET), srcSym(nullptr) {}
  566. };
  567. struct MemCopyCandidate : public MemOpCandidate
  568. {
  569. SymID ldBase;
  570. StackSym* transferSym;
  571. byte ldCount;
  572. MemCopyCandidate() : MemOpCandidate(MemOpCandidate::MEMCOPY) {}
  573. };
  574. #define FOREACH_MEMOP_CANDIDATES_EDITING(data, loop, iterator) FOREACH_SLISTCOUNTED_ENTRY_EDITING(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates, iterator)
  575. #define NEXT_MEMOP_CANDIDATE_EDITING NEXT_SLISTCOUNTED_ENTRY_EDITING
  576. #define FOREACH_MEMOP_CANDIDATES(data, loop) FOREACH_SLISTCOUNTED_ENTRY(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates)
  577. #define NEXT_MEMOP_CANDIDATE NEXT_SLISTCOUNTED_ENTRY
  578. #define MEMOP_CANDIDATE_TYPE_CHECK(candidate, data, type) if(candidate->Is ## type()) {Loop:: ## type ## Candidate* data = candidate->As## type();
  579. #define FOREACH_MEMCOPY_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  580. #define NEXT_MEMCOPY_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  581. #define FOREACH_MEMCOPY_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  582. #define NEXT_MEMCOPY_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  583. #define FOREACH_MEMSET_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  584. #define NEXT_MEMSET_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  585. #define FOREACH_MEMSET_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  586. #define NEXT_MEMSET_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  587. typedef struct
  588. {
  589. byte unroll : 7;
  590. byte isIncremental : 1;
  591. } InductionVariableChangeInfo;
  592. typedef JsUtil::BaseDictionary<SymID, InductionVariableChangeInfo, JitArenaAllocator> InductionVariableChangeInfoMap;
  593. typedef JsUtil::BaseDictionary<byte, IR::Opnd*, JitArenaAllocator> InductionVariableOpndPerUnrollMap;
  594. typedef SListCounted<MemOpCandidate *> MemOpList;
  595. typedef struct
  596. {
  597. MemOpList *candidates;
  598. BVSparse<JitArenaAllocator> *inductionVariablesUsedAfterLoop;
  599. InductionVariableChangeInfoMap *inductionVariableChangeInfoMap;
  600. InductionVariableOpndPerUnrollMap *inductionVariableOpndPerUnrollMap;
  601. // This assumes that all memop operations use the same index and have the same length
  602. // Temporary map to reuse existing startIndexOpnd while emitting
  603. // 0 = !increment & !alreadyChanged, 1 = !increment & alreadyChanged, 2 = increment & !alreadyChanged, 3 = increment & alreadyChanged
  604. IR::RegOpnd* startIndexOpndCache[4];
  605. } MemOpInfo;
  606. bool doMemOp : 1;
  607. MemOpInfo *memOpInfo;
  608. struct RegAlloc
  609. {
  610. Lifetime ** loopTopRegContent; // Save off the state of the registers at the loop top
  611. BVSparse<JitArenaAllocator> * symRegUseBv; // If a lifetime was live in a reg into the loop, did the reg get used before being spilled?
  612. BVSparse<JitArenaAllocator> * defdInLoopBv; // Was a lifetime defined in the loop?
  613. BVSparse<JitArenaAllocator> * liveOnBackEdgeSyms; // Is a lifetime live on the back-edge of the loop?
  614. BitVector regUseBv; // Registers used in this loop so far
  615. uint32 loopStart; // loopTopLabel->GetNumber()
  616. uint32 loopEnd; // loopTailBranch->GetNumber()
  617. uint32 helperLength; // Number of instrs in helper code in loop
  618. SList<Lifetime *> * extendedLifetime; // Lifetimes to extend for this loop
  619. SList<Lifetime **> * exitRegContentList; // Linked list of regContents for the exit edges
  620. bool hasNonOpHelperCall;
  621. bool hasCall;
  622. bool hasAirLock; // Do back-edges have airlock blocks?
  623. } regAlloc;
  624. public:
  625. Loop(JitArenaAllocator * alloc, Func *func)
  626. : topFunc(func),
  627. blockList(alloc),
  628. parent(nullptr),
  629. landingPad(nullptr),
  630. loopTopLabel(nullptr),
  631. symsUsedBeforeDefined(nullptr),
  632. likelyIntSymsUsedBeforeDefined(nullptr),
  633. likelyNumberSymsUsedBeforeDefined(nullptr),
  634. #ifdef ENABLE_SIMDJS
  635. likelySimd128F4SymsUsedBeforeDefined(nullptr),
  636. likelySimd128I4SymsUsedBeforeDefined(nullptr),
  637. forceSimd128F4SymsOnEntry(nullptr),
  638. forceSimd128I4SymsOnEntry(nullptr),
  639. #endif
  640. forceFloat64SymsOnEntry(nullptr),
  641. symsDefInLoop(nullptr),
  642. fieldHoistCandidateTypes(nullptr),
  643. fieldHoistSymMap(alloc),
  644. needImplicitCallBailoutChecksForJsArrayCheckHoist(false),
  645. inductionVariables(nullptr),
  646. dominatingLoopCountableBlock(nullptr),
  647. loopCount(nullptr),
  648. loopCountBasedBoundBaseSyms(nullptr),
  649. isDead(false),
  650. allFieldsKilled(false),
  651. isLeaf(true),
  652. isProcessed(false),
  653. initialValueFieldMap(alloc)
  654. {
  655. this->loopNumber = ++func->loopCount;
  656. }
  657. void SetHeadBlock(BasicBlock *block) { headBlock = block; }
  658. BasicBlock * GetHeadBlock() const { Assert(headBlock == blockList.Head()); return headBlock; }
  659. bool IsDescendentOrSelf(Loop const * loop) const;
  660. void EnsureMemOpVariablesInitialized();
  661. Js::ImplicitCallFlags GetImplicitCallFlags();
  662. void SetImplicitCallFlags(Js::ImplicitCallFlags flags);
  663. Js::LoopFlags GetLoopFlags() const { return loopFlags; }
  664. void SetLoopFlags(Js::LoopFlags val) { loopFlags = val; }
  665. bool CanHoistInvariants() const;
  666. bool CanDoFieldCopyProp();
  667. bool CanDoFieldHoist();
  668. void SetHasCall();
  669. IR::LabelInstr * GetLoopTopInstr() const;
  670. void SetLoopTopInstr(IR::LabelInstr * loopTop);
  671. Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
  672. #if DBG_DUMP
  673. bool GetHasCall() const { return hasCall; }
  674. uint GetLoopNumber() const;
  675. #endif
  676. private:
  677. void InsertLandingPad(FlowGraph *fg);
  678. bool RemoveBreakBlocks(FlowGraph *fg);
  679. };
  680. // Structure definition cannot be inside Loop in order to use it as a parameter in GlobOpt
  681. struct MemOpEmitData
  682. {
  683. Loop::MemOpCandidate* candidate;
  684. IR::Instr* stElemInstr;
  685. BasicBlock* block;
  686. Loop::InductionVariableChangeInfo inductionVar;
  687. IR::BailOutKind bailOutKind;
  688. };
  689. struct MemSetEmitData : public MemOpEmitData
  690. {
  691. };
  692. struct MemCopyEmitData : public MemOpEmitData
  693. {
  694. IR::Instr* ldElemInstr;
  695. };
  696. #define FOREACH_BLOCK_IN_FUNC(block, func)\
  697. FOREACH_BLOCK(block, func->m_fg)
  698. #define NEXT_BLOCK_IN_FUNC\
  699. NEXT_BLOCK;
  700. #define FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, func)\
  701. FOREACH_BLOCK_DEAD_OR_ALIVE(block, func->m_fg)
  702. #define NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE\
  703. NEXT_BLOCK_DEAD_OR_ALIVE;
  704. #define FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func) \
  705. FOREACH_BLOCK_BACKWARD(block, func->m_fg)
  706. #define NEXT_BLOCK_BACKWARD_IN_FUNC \
  707. NEXT_BLOCK_BACKWARD;
  708. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, func) \
  709. FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, func->m_fg)
  710. #define NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE \
  711. NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE;
  712. #define FOREACH_BLOCK_IN_FUNC_EDITING(block, func)\
  713. FOREACH_BLOCK_EDITING(block, func->m_fg)
  714. #define NEXT_BLOCK_IN_FUNC_EDITING\
  715. NEXT_BLOCK_EDITING;
  716. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_EDITING(block, func)\
  717. FOREACH_BLOCK_BACKWARD_EDITING(block, func->m_fg)
  718. #define NEXT_BLOCK_BACKWARD_IN_FUNC_EDITING\
  719. NEXT_BLOCK_BACKWARD_EDITING;
  720. #define FOREACH_BLOCK_ALL(block, graph) \
  721. for (BasicBlock *block = graph->blockList;\
  722. block != nullptr;\
  723. block = block->next)\
  724. {
  725. #define NEXT_BLOCK_ALL \
  726. }
  727. #define FOREACH_BLOCK(block, graph)\
  728. FOREACH_BLOCK_ALL(block, graph) \
  729. if (block->isDeleted) { continue; }
  730. #define NEXT_BLOCK \
  731. NEXT_BLOCK_ALL
  732. #define FOREACH_BLOCK_DEAD_OR_ALIVE(block, graph)\
  733. FOREACH_BLOCK_ALL(block, graph) \
  734. if (block->isDeleted && !block->isDead) { continue; }
  735. #define NEXT_BLOCK_DEAD_OR_ALIVE \
  736. NEXT_BLOCK_ALL
  737. #define FOREACH_BLOCK_BACKWARD(block, graph)\
  738. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, graph->tailBlock, nullptr)
  739. #define NEXT_BLOCK_BACKWARD \
  740. NEXT_BLOCK_BACKWARD_IN_RANGE
  741. #define FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, graph)\
  742. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, graph->tailBlock, nullptr)
  743. #define NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE \
  744. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE
  745. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast)\
  746. {\
  747. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  748. for (BasicBlock *block = blockList;\
  749. block != blockStop;\
  750. block = block->prev)\
  751. {
  752. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL \
  753. }}
  754. #define FOREACH_BLOCK_BACKWARD_IN_RANGE(block, blockList, blockLast) \
  755. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  756. if (block->isDeleted) { continue; }
  757. #define NEXT_BLOCK_BACKWARD_IN_RANGE \
  758. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  759. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev)\
  760. {\
  761. BasicBlock *blockPrev;\
  762. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  763. for (BasicBlock *block = blockList;\
  764. block != blockStop;\
  765. block = blockPrev)\
  766. {\
  767. blockPrev = block->prev;
  768. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING \
  769. }}
  770. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(block, blockList, blockLast, blockPrev) \
  771. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev) \
  772. if (block->isDeleted) { continue; }
  773. #define NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING \
  774. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING
  775. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, blockList, blockLast) \
  776. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  777. if (block->isDeleted && !block->isDead) { continue; }
  778. #define NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE \
  779. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  780. #define FOREACH_BLOCK_EDITING(block, graph)\
  781. {\
  782. BasicBlock *blockNext;\
  783. for (BasicBlock *block = graph->blockList;\
  784. block != nullptr;\
  785. block = blockNext)\
  786. {\
  787. blockNext = block->next; \
  788. if (block->isDeleted) { continue; }
  789. #define NEXT_BLOCK_EDITING \
  790. }}
  791. #define FOREACH_BLOCK_BACKWARD_EDITING(block, graph)\
  792. {\
  793. BasicBlock *blockPrev;\
  794. for (BasicBlock *block = graph->tailBlock;\
  795. block != nullptr;\
  796. block = blockPrev)\
  797. {\
  798. blockPrev = block->prev; \
  799. if (block->isDeleted) { continue; }
  800. #define NEXT_BLOCK_BACKWARD_EDITING \
  801. }}
  802. #define FOREACH_BLOCK_IN_LIST(block, list)\
  803. FOREACH_SLIST_ENTRY(BasicBlock*, block, list)\
  804. {\
  805. if (block->isDeleted) { continue; }
  806. #define NEXT_BLOCK_IN_LIST \
  807. NEXT_SLIST_ENTRY \
  808. }
  809. #define FOREACH_BLOCK_IN_LIST_EDITING(block, list, iter)\
  810. FOREACH_SLIST_ENTRY_EDITING(BasicBlock*, block, list, iter)\
  811. {\
  812. if (block->isDeleted) { continue; }
  813. #define NEXT_BLOCK_IN_LIST_EDITING \
  814. NEXT_SLIST_ENTRY_EDITING \
  815. }
  816. #define FOREACH_SUCCESSOR_EDGE(edge, block)\
  817. FOREACH_EDGE_IN_LIST(edge, block->GetSuccList())
  818. #define NEXT_SUCCESSOR_EDGE\
  819. NEXT_EDGE_IN_LIST
  820. #define FOREACH_SUCCESSOR_EDGE_EDITING(edge, bloc, iter)\
  821. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetSuccList(), iter)
  822. #define NEXT_SUCCESSOR_EDGE_EDITING\
  823. NEXT_EDGE_IN_LIST_EDITING
  824. #define FOREACH_PREDECESSOR_EDGE(edge, block)\
  825. FOREACH_EDGE_IN_LIST(edge, block->GetPredList())
  826. #define NEXT_PREDECESSOR_EDGE\
  827. NEXT_EDGE_IN_LIST
  828. #define FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)\
  829. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetPredList(), iter)
  830. #define NEXT_PREDECESSOR_EDGE_EDITING\
  831. NEXT_EDGE_IN_LIST_EDITING
  832. #define FOREACH_EDGE_IN_LIST(edge, list)\
  833. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, list)\
  834. {
  835. #define NEXT_EDGE_IN_LIST\
  836. NEXT_SLISTBASECOUNTED_ENTRY }
  837. #define FOREACH_EDGE_IN_LIST_EDITING(edge, list, iter)\
  838. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, list, iter)\
  839. {\
  840. #define NEXT_EDGE_IN_LIST_EDITING\
  841. NEXT_SLISTBASECOUNTED_ENTRY_EDITING }
  842. #define FOREACH_SUCCESSOR_BLOCK(blockSucc, block)\
  843. FOREACH_EDGE_IN_LIST(__edge, block->GetSuccList())\
  844. {\
  845. BasicBlock * blockSucc = __edge->GetSucc(); \
  846. AnalysisAssert(blockSucc);
  847. #define NEXT_SUCCESSOR_BLOCK\
  848. }\
  849. NEXT_EDGE_IN_LIST
  850. #define FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, block, iter)\
  851. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetSuccList(), iter)\
  852. {\
  853. BasicBlock * blockSucc = __edge->GetSucc(); \
  854. AnalysisAssert(blockSucc);
  855. #define NEXT_SUCCESSOR_BLOCK_EDITING\
  856. }\
  857. NEXT_EDGE_IN_LIST_EDITING
  858. #define FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, block)\
  859. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadSuccList())\
  860. {\
  861. BasicBlock * blockSucc = __edge->GetSucc(); \
  862. AnalysisAssert(blockSucc);
  863. #define NEXT_DEAD_SUCCESSOR_BLOCK\
  864. }\
  865. NEXT_EDGE_IN_LIST
  866. #define FOREACH_PREDECESSOR_BLOCK(blockPred, block)\
  867. FOREACH_EDGE_IN_LIST(__edge, block->GetPredList())\
  868. {\
  869. BasicBlock * blockPred = __edge->GetPred(); \
  870. AnalysisAssert(blockPred);
  871. #define NEXT_PREDECESSOR_BLOCK\
  872. }\
  873. NEXT_EDGE_IN_LIST
  874. #define FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, block)\
  875. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadPredList())\
  876. {\
  877. BasicBlock * blockPred = __edge->GetPred(); \
  878. AnalysisAssert(blockPred);
  879. #define NEXT_DEAD_PREDECESSOR_BLOCK\
  880. }\
  881. NEXT_EDGE_IN_LIST
  882. #define FOREACH_BLOCK_IN_LOOP(block, loop)\
  883. FOREACH_BLOCK_IN_LIST(block, &loop->blockList)
  884. #define NEXT_BLOCK_IN_LOOP \
  885. NEXT_BLOCK_IN_LIST
  886. #define FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)\
  887. FOREACH_BLOCK_IN_LIST_EDITING(block, &loop->blockList, iter)
  888. #define NEXT_BLOCK_IN_LOOP_EDITING \
  889. NEXT_BLOCK_IN_LIST_EDITING
  890. #define FOREACH_LOOP_IN_FUNC_EDITING(loop, func)\
  891. FOREACH_LOOP_EDITING(loop, func->m_fg)
  892. #define NEXT_LOOP_IN_FUNC_EDITING\
  893. NEXT_LOOP_EDITING;
  894. #define FOREACH_LOOP_EDITING(loop, graph)\
  895. {\
  896. Loop* loopNext;\
  897. for (Loop* loop = graph->loopList;\
  898. loop != nullptr;\
  899. loop = loopNext)\
  900. {\
  901. loopNext = loop->next;
  902. #define NEXT_LOOP_EDITING \
  903. }}