FlowGraph.h 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. class BasicBlock;
  7. class FlowEdge;
  8. class Loop;
  9. class Region;
  10. class Func;
  11. class AddPropertyCacheBucket
  12. {
  13. private:
  14. JITTypeHolder initialType;
  15. JITTypeHolder finalType;
  16. public:
  17. AddPropertyCacheBucket() : initialType(nullptr), finalType(nullptr)
  18. #if DBG
  19. , deadStoreUnavailableInitialType(nullptr), deadStoreUnavailableFinalType(nullptr)
  20. #endif
  21. {
  22. }
  23. AddPropertyCacheBucket(const AddPropertyCacheBucket& bucket) :
  24. initialType(bucket.initialType), finalType(bucket.finalType)
  25. #if DBG
  26. , deadStoreUnavailableInitialType(bucket.deadStoreUnavailableInitialType)
  27. , deadStoreUnavailableFinalType(bucket.deadStoreUnavailableFinalType)
  28. #endif
  29. {
  30. }
  31. bool operator!=(const AddPropertyCacheBucket& bucket) const
  32. {
  33. return this->initialType != bucket.initialType || this->finalType != bucket.finalType;
  34. }
  35. bool operator==(const AddPropertyCacheBucket& bucket) const
  36. {
  37. return this->initialType == bucket.initialType && this->finalType == bucket.finalType;
  38. }
  39. void Copy(AddPropertyCacheBucket *pNew) const
  40. {
  41. pNew->initialType = this->initialType;
  42. pNew->finalType = this->finalType;
  43. #if DBG
  44. pNew->deadStoreUnavailableInitialType = this->deadStoreUnavailableInitialType;
  45. pNew->deadStoreUnavailableFinalType = this->deadStoreUnavailableFinalType;
  46. #endif
  47. }
  48. JITTypeHolder GetInitialType() const { return this->initialType; }
  49. JITTypeHolder GetFinalType() const { return this->finalType; }
  50. void SetInitialType(JITTypeHolder type) { this->initialType = type; }
  51. void SetFinalType(JITTypeHolder type) { this->finalType = type; }
  52. #if DBG_DUMP
  53. void Dump() const;
  54. #endif
  55. #ifdef DBG
  56. JITTypeHolder deadStoreUnavailableInitialType;
  57. JITTypeHolder deadStoreUnavailableFinalType;
  58. #endif
  59. };
  60. class ObjTypeGuardBucket
  61. {
  62. private:
  63. BVSparse<JitArenaAllocator>* guardedPropertyOps;
  64. JITTypeHolder monoGuardType;
  65. public:
  66. ObjTypeGuardBucket() : guardedPropertyOps(nullptr), monoGuardType(nullptr) {}
  67. ObjTypeGuardBucket(BVSparse<JitArenaAllocator>* guardedPropertyOps) : monoGuardType(nullptr)
  68. {
  69. this->guardedPropertyOps = (guardedPropertyOps != nullptr ? guardedPropertyOps->CopyNew() : nullptr);
  70. }
  71. void Copy(ObjTypeGuardBucket *pNew) const
  72. {
  73. pNew->guardedPropertyOps = this->guardedPropertyOps ? this->guardedPropertyOps->CopyNew() : nullptr;
  74. pNew->monoGuardType = this->monoGuardType;
  75. }
  76. BVSparse<JitArenaAllocator> *GetGuardedPropertyOps() const { return this->guardedPropertyOps; }
  77. void SetGuardedPropertyOps(BVSparse<JitArenaAllocator> *guardedPropertyOps) { this->guardedPropertyOps = guardedPropertyOps; }
  78. void AddToGuardedPropertyOps(uint propertyOpId) { Assert(this->guardedPropertyOps != nullptr); this->guardedPropertyOps->Set(propertyOpId); }
  79. bool NeedsMonoCheck() const { return this->monoGuardType != nullptr; }
  80. void SetMonoGuardType(JITTypeHolder type) { this->monoGuardType = type; }
  81. JITTypeHolder GetMonoGuardType() const { return this->monoGuardType; }
  82. #if DBG_DUMP
  83. void Dump() const;
  84. #endif
  85. };
  86. class ObjWriteGuardBucket
  87. {
  88. private:
  89. BVSparse<JitArenaAllocator>* writeGuards;
  90. public:
  91. ObjWriteGuardBucket() : writeGuards(nullptr) {}
  92. ObjWriteGuardBucket(BVSparse<JitArenaAllocator>* writeGuards) { this->writeGuards = (writeGuards != nullptr ? writeGuards->CopyNew() : nullptr); }
  93. void Copy(ObjWriteGuardBucket *pNew) const
  94. {
  95. pNew->writeGuards = this->writeGuards ? this->writeGuards->CopyNew() : nullptr;
  96. }
  97. BVSparse<JitArenaAllocator> *GetWriteGuards() const { return this->writeGuards; }
  98. void SetWriteGuards(BVSparse<JitArenaAllocator> *writeGuards) { this->writeGuards = writeGuards; }
  99. void AddToWriteGuards(uint writeGuardId) { Assert(this->writeGuards != nullptr); this->writeGuards->Set(writeGuardId); }
  100. #if DBG_DUMP
  101. void Dump() const;
  102. #endif
  103. };
  104. class FlowGraph
  105. {
  106. friend Loop;
  107. public:
  108. static FlowGraph * New(Func *func, JitArenaAllocator *alloc);
  109. FlowGraph(Func *func, JitArenaAllocator *fgAlloc) :
  110. func(func),
  111. alloc(fgAlloc),
  112. blockList(nullptr),
  113. blockCount(0),
  114. tailBlock(nullptr),
  115. loopList(nullptr),
  116. catchLabelStack(nullptr),
  117. hasBackwardPassInfo(false),
  118. hasLoop(false),
  119. implicitCallFlags(Js::ImplicitCall_HasNoInfo)
  120. {
  121. }
  122. void Build(void);
  123. void Destroy(void);
  124. void RunPeeps();
  125. BasicBlock * AddBlock(IR::Instr * firstInstr, IR::Instr * lastInstr, BasicBlock * nextBlock);
  126. FlowEdge * AddEdge(BasicBlock * predBlock, BasicBlock * succBlock);
  127. BasicBlock * InsertCompensationCodeForBlockMove(FlowEdge * edge, // Edge where compensation code needs to be inserted
  128. bool insertCompensationBlockToLoopList = false,
  129. bool sinkBlockLoop = false // Loop to which compensation block belongs
  130. );
  131. BasicBlock * InsertAirlockBlock(FlowEdge * edge);
  132. void InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget);
  133. void RemoveUnreachableBlocks();
  134. bool RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt = nullptr);
  135. IR::Instr * RemoveInstr(IR::Instr *instr, GlobOpt * globOpt);
  136. void RemoveBlock(BasicBlock *block, GlobOpt * globOpt = nullptr, bool tailDuping = false);
  137. BasicBlock * SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr);
  138. Func* GetFunc() { return func;};
  139. static void SafeRemoveInstr(IR::Instr *instr);
  140. void SortLoopLists();
  141. FlowEdge * FindEdge(BasicBlock *predBlock, BasicBlock *succBlock);
  142. #if DBG_DUMP
  143. void Dump();
  144. void Dump(bool verbose, const char16 *form);
  145. #endif
  146. JitArenaAllocator * alloc;
  147. BasicBlock * blockList;
  148. BasicBlock * tailBlock;
  149. Loop * loopList;
  150. SList<IR::LabelInstr*> * catchLabelStack;
  151. bool hasBackwardPassInfo;
  152. bool hasLoop;
  153. Js::ImplicitCallFlags implicitCallFlags;
  154. private:
  155. void FindLoops(void);
  156. bool CanonicalizeLoops(void);
  157. void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
  158. void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
  159. void AddBlockToLoop(BasicBlock *block, Loop *loop);
  160. void UpdateRegionForBlock(BasicBlock *block, Region **blockToRegion);
  161. Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
  162. IR::Instr * PeepCm(IR::Instr *instr);
  163. IR::Instr * PeepTypedCm(IR::Instr *instr);
  164. void MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock);
  165. bool UnsignedCmpPeep(IR::Instr *cmpInstr);
  166. bool IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1);
  167. #if DBG
  168. void VerifyLoopGraph();
  169. #endif
  170. private:
  171. void InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId);
  172. private:
  173. Func * func;
  174. unsigned int blockCount;
  175. };
  176. class BasicBlock
  177. {
  178. friend class FlowGraph;
  179. friend class Loop;
  180. public:
  181. static BasicBlock * New(FlowGraph * graph);
  182. void AddPred(FlowEdge * edge, FlowGraph * graph);
  183. void AddSucc(FlowEdge * edge, FlowGraph * graph);
  184. void RemovePred(BasicBlock *block, FlowGraph * graph);
  185. void RemoveSucc(BasicBlock *block, FlowGraph * graph);
  186. void RemoveDeadPred(BasicBlock *block, FlowGraph * graph);
  187. void RemoveDeadSucc(BasicBlock *block, FlowGraph * graph);
  188. void UnlinkPred(BasicBlock *block);
  189. void UnlinkSucc(BasicBlock *block);
  190. void UnlinkInstr(IR::Instr * Instr);
  191. void RemoveInstr(IR::Instr * instr);
  192. void InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr);
  193. void InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr);
  194. void InsertAfter(IR::Instr * newInstr);
  195. void InvertBranch(IR::BranchInstr *branch);
  196. IR::Instr * GetFirstInstr(void) const
  197. {
  198. return firstInstr;
  199. }
  200. void SetFirstInstr(IR::Instr * instr)
  201. {
  202. firstInstr = instr;
  203. }
  204. IR::Instr * GetLastInstr(void)
  205. {
  206. BasicBlock *blNext = this->next;
  207. if (blNext)
  208. {
  209. return blNext->firstInstr->m_prev;
  210. }
  211. else
  212. {
  213. return this->func->m_exitInstr;
  214. }
  215. }
  216. void SetLastInstr(IR::Instr * instr)
  217. {
  218. // Intentionally empty
  219. }
  220. SListBaseCounted<FlowEdge *> * GetPredList(void)
  221. {
  222. return &predList;
  223. }
  224. SListBaseCounted<FlowEdge *> * GetSuccList(void)
  225. {
  226. return &succList;
  227. }
  228. SListBaseCounted<FlowEdge *> * GetDeadPredList(void)
  229. {
  230. return &deadPredList;
  231. }
  232. SListBaseCounted<FlowEdge *> * GetDeadSuccList(void)
  233. {
  234. return &deadSuccList;
  235. }
  236. unsigned int GetBlockNum(void) const
  237. {
  238. return number;
  239. }
  240. void SetBlockNum(unsigned int num)
  241. {
  242. number = num;
  243. }
  244. BasicBlock * GetPrev()
  245. {
  246. BasicBlock *block = this;
  247. do {
  248. block = block->prev;
  249. } while (block->isDeleted);
  250. return block;
  251. }
  252. BasicBlock * GetNext()
  253. {
  254. BasicBlock *block = this;
  255. do {
  256. block = block->next;
  257. } while (block && block->isDeleted);
  258. return block;
  259. }
  260. uint IncrementDataUseCount()
  261. {
  262. return ++this->dataUseCount;
  263. }
  264. uint DecrementDataUseCount()
  265. {
  266. Assert(this->dataUseCount != 0);
  267. return --this->dataUseCount;
  268. }
  269. uint GetDataUseCount()
  270. {
  271. return this->dataUseCount;
  272. }
  273. void SetDataUseCount(uint count)
  274. {
  275. this->dataUseCount = count;
  276. }
  277. bool IsLandingPad();
  278. #if DBG_DUMP
  279. void DumpHeader(bool insertCR = true);
  280. void Dump();
  281. #endif
  282. public:
  283. BasicBlock * next;
  284. BasicBlock * prev;
  285. Loop * loop;
  286. uint8 isDeleted:1;
  287. uint8 isDead:1;
  288. uint8 isLoopHeader:1;
  289. uint8 hasCall:1;
  290. uint8 isVisited:1;
  291. uint8 isAirLockCompensationBlock:1;
  292. uint8 beginsBailOnNoProfile:1;
  293. #ifdef DBG
  294. uint8 isBreakBlock:1;
  295. uint8 isAirLockBlock:1;
  296. uint8 isBreakCompensationBlockAtSink:1;
  297. uint8 isBreakCompensationBlockAtSource:1;
  298. #endif
  299. // Deadstore data
  300. BVSparse<JitArenaAllocator> * upwardExposedUses;
  301. BVSparse<JitArenaAllocator> * upwardExposedFields;
  302. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout;
  303. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  304. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates;
  305. TempNumberTracker * tempNumberTracker;
  306. TempObjectTracker * tempObjectTracker;
  307. #if DBG
  308. TempObjectVerifyTracker * tempObjectVerifyTracker;
  309. #endif
  310. HashTable<AddPropertyCacheBucket> * stackSymToFinalType;
  311. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties; // Dead store pass only
  312. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap; // Backward pass only
  313. BVSparse<JitArenaAllocator> * noImplicitCallUses;
  314. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses;
  315. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses;
  316. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses;
  317. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses;
  318. BVSparse<JitArenaAllocator> * cloneStrCandidates;
  319. BVSparse<JitArenaAllocator> * couldRemoveNegZeroBailoutForDef; // Deadstore pass only
  320. Loop * backwardPassCurrentLoop;
  321. // Global optimizer data
  322. GlobOptBlockData globOptData;
  323. // Bailout data
  324. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  325. #if DBG
  326. StackSym ** byteCodeRestoreSyms;
  327. #endif
  328. IntOverflowDoesNotMatterRange * intOverflowDoesNotMatterRange;
  329. private:
  330. BasicBlock(JitArenaAllocator * alloc, Func *func) :
  331. next(nullptr),
  332. prev(nullptr),
  333. firstInstr(nullptr),
  334. number(k_InvalidNum),
  335. loop(nullptr),
  336. isDeleted(false),
  337. isDead(false),
  338. isLoopHeader(false),
  339. hasCall(false),
  340. upwardExposedUses(nullptr),
  341. upwardExposedFields(nullptr),
  342. typesNeedingKnownObjectLayout(nullptr),
  343. slotDeadStoreCandidates(nullptr),
  344. tempNumberTracker(nullptr),
  345. tempObjectTracker(nullptr),
  346. #if DBG
  347. tempObjectVerifyTracker(nullptr),
  348. #endif
  349. stackSymToFinalType(nullptr),
  350. stackSymToGuardedProperties(nullptr),
  351. stackSymToWriteGuardsMap(nullptr),
  352. noImplicitCallUses(nullptr),
  353. noImplicitCallNoMissingValuesUses(nullptr),
  354. noImplicitCallNativeArrayUses(nullptr),
  355. noImplicitCallJsArrayHeadSegmentSymUses(nullptr),
  356. noImplicitCallArrayLengthSymUses(nullptr),
  357. cloneStrCandidates(nullptr),
  358. couldRemoveNegZeroBailoutForDef(nullptr),
  359. byteCodeUpwardExposedUsed(nullptr),
  360. isAirLockCompensationBlock(false),
  361. beginsBailOnNoProfile(false),
  362. #if DBG
  363. byteCodeRestoreSyms(nullptr),
  364. isBreakBlock(false),
  365. isAirLockBlock(false),
  366. isBreakCompensationBlockAtSource(false),
  367. isBreakCompensationBlockAtSink(false),
  368. #endif
  369. fieldHoistCandidates(nullptr),
  370. dataUseCount(0),
  371. intOverflowDoesNotMatterRange(nullptr),
  372. func(func),
  373. globOptData(func)
  374. {
  375. }
  376. void RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead = false);
  377. void RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead = false);
  378. void UnlinkPred(BasicBlock *block, bool doCleanSucc);
  379. void UnlinkSucc(BasicBlock *block, bool doCleanPred);
  380. #if DBG_DUMP
  381. bool Contains(IR::Instr * instr);
  382. #endif
  383. private:
  384. IR::Instr * firstInstr;
  385. SListBaseCounted<FlowEdge *> predList;
  386. SListBaseCounted<FlowEdge *> succList;
  387. SListBaseCounted<FlowEdge *> deadPredList;
  388. SListBaseCounted<FlowEdge *> deadSuccList;
  389. Func * func;
  390. unsigned int number;
  391. uint dataUseCount;
  392. static const unsigned int k_InvalidNum = (unsigned)-1;
  393. };
  394. class FlowEdge
  395. {
  396. public:
  397. static FlowEdge * New(FlowGraph * graph);
  398. FlowEdge() :
  399. predBlock(nullptr),
  400. succBlock(nullptr),
  401. pathDependentInfo(nullptr)
  402. {
  403. }
  404. BasicBlock * GetPred(void) const
  405. {
  406. return predBlock;
  407. }
  408. void SetPred(BasicBlock * block)
  409. {
  410. predBlock = block;
  411. }
  412. BasicBlock * GetSucc(void) const
  413. {
  414. return succBlock;
  415. }
  416. void SetSucc(BasicBlock * block)
  417. {
  418. succBlock = block;
  419. }
  420. PathDependentInfo * GetPathDependentInfo() const
  421. {
  422. return pathDependentInfo;
  423. }
  424. void SetPathDependentInfo(const PathDependentInfo &info, JitArenaAllocator *const alloc)
  425. {
  426. Assert(info.HasInfo());
  427. if (!pathDependentInfo)
  428. {
  429. pathDependentInfo = JitAnew(alloc, PathDependentInfo, info);
  430. }
  431. else
  432. {
  433. *pathDependentInfo = info;
  434. }
  435. }
  436. void ClearPathDependentInfo(JitArenaAllocator * alloc)
  437. {
  438. JitAdelete(alloc, pathDependentInfo);
  439. pathDependentInfo = nullptr;
  440. }
  441. private:
  442. BasicBlock * predBlock;
  443. BasicBlock * succBlock;
  444. // Only valid during globopt
  445. PathDependentInfo * pathDependentInfo;
  446. };
  447. class Loop
  448. {
  449. friend FlowGraph;
  450. private:
  451. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator, PowerOf2SizePolicy> FieldHoistSymMap;
  452. typedef JsUtil::BaseDictionary<PropertySym *, Value *, JitArenaAllocator> InitialValueFieldMap;
  453. Js::ImplicitCallFlags implicitCallFlags;
  454. Js::LoopFlags loopFlags;
  455. BasicBlock * headBlock;
  456. public:
  457. Func * topFunc;
  458. uint32 loopNumber;
  459. SList<BasicBlock *> blockList;
  460. Loop * next;
  461. Loop * parent;
  462. BasicBlock * landingPad;
  463. IR::LabelInstr * loopTopLabel;
  464. BVSparse<JitArenaAllocator> *varSymsOnEntry;
  465. BVSparse<JitArenaAllocator> *int32SymsOnEntry;
  466. BVSparse<JitArenaAllocator> *lossyInt32SymsOnEntry; // see GlobOptData::liveLossyInt32Syms
  467. BVSparse<JitArenaAllocator> *float64SymsOnEntry;
  468. BVSparse<JitArenaAllocator> *liveFieldsOnEntry;
  469. // SIMD_JS
  470. // live syms upon entering loop header (from pred merge + forced syms + used before defs in loop)
  471. BVSparse<JitArenaAllocator> *simd128F4SymsOnEntry;
  472. BVSparse<JitArenaAllocator> *simd128I4SymsOnEntry;
  473. BVSparse<JitArenaAllocator> *symsUsedBeforeDefined; // stack syms that are live in the landing pad, and used before they are defined in the loop
  474. BVSparse<JitArenaAllocator> *likelyIntSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-int value, and used before they are defined in the loop
  475. BVSparse<JitArenaAllocator> *likelyNumberSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-number value, and used before they are defined in the loop
  476. // SIMD_JS
  477. BVSparse<JitArenaAllocator> *likelySimd128F4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128F4 value, and used before they are defined in the loop
  478. BVSparse<JitArenaAllocator> *likelySimd128I4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128I4 value, and used before they are defined in the loop
  479. BVSparse<JitArenaAllocator> *forceFloat64SymsOnEntry;
  480. // SIMD_JS
  481. // syms need to be forced to certain type due to hoisting
  482. BVSparse<JitArenaAllocator> *forceSimd128F4SymsOnEntry;
  483. BVSparse<JitArenaAllocator> *forceSimd128I4SymsOnEntry;
  484. BVSparse<JitArenaAllocator> *symsDefInLoop;
  485. BailOutInfo * bailOutInfo;
  486. IR::BailOutInstr * toPrimitiveSideEffectCheck;
  487. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  488. BVSparse<JitArenaAllocator> * liveInFieldHoistCandidates;
  489. BVSparse<JitArenaAllocator> * fieldHoistCandidateTypes;
  490. SListBase<IR::Instr *> prepassFieldHoistInstrCandidates;
  491. FieldHoistSymMap fieldHoistSymMap;
  492. IR::Instr * endDisableImplicitCall;
  493. BVSparse<JitArenaAllocator> * hoistedFields;
  494. BVSparse<JitArenaAllocator> * hoistedFieldCopySyms;
  495. BVSparse<JitArenaAllocator> * liveOutFields;
  496. ValueNumber firstValueNumberInLoop;
  497. JsArrayKills jsArrayKills;
  498. BVSparse<JitArenaAllocator> *fieldKilled;
  499. BVSparse<JitArenaAllocator> *fieldPRESymStore;
  500. InitialValueFieldMap initialValueFieldMap;
  501. InductionVariableSet *inductionVariables;
  502. BasicBlock *dominatingLoopCountableBlock;
  503. LoopCount *loopCount;
  504. SymIdToStackSymMap *loopCountBasedBoundBaseSyms;
  505. bool isDead : 1;
  506. bool hasDeadStoreCollectionPass : 1;
  507. bool hasDeadStorePrepass : 1;
  508. bool hasCall : 1;
  509. bool hasHoistedFields : 1;
  510. bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
  511. bool allFieldsKilled : 1;
  512. bool isLeaf : 1;
  513. bool isProcessed : 1; // Set and reset at varying places according to the phase we're in.
  514. // For example, in the lowerer, it'll be set to true when we process the loopTop for a certain loop
  515. struct MemCopyCandidate;
  516. struct MemSetCandidate;
  517. struct MemOpCandidate
  518. {
  519. SymID base;
  520. SymID index;
  521. byte count;
  522. bool bIndexAlreadyChanged;
  523. enum MemOpType
  524. {
  525. MEMSET,
  526. MEMCOPY
  527. } type;
  528. bool IsMemSet() const { return type == MEMSET; }
  529. bool IsMemCopy() const { return type == MEMCOPY; }
  530. struct Loop::MemCopyCandidate* AsMemCopy();
  531. struct Loop::MemSetCandidate* AsMemSet();
  532. MemOpCandidate(MemOpType type) :
  533. type(type)
  534. {
  535. }
  536. };
  537. struct MemSetCandidate : public MemOpCandidate
  538. {
  539. BailoutConstantValue constant;
  540. StackSym* srcSym;
  541. MemSetCandidate() : MemOpCandidate(MemOpCandidate::MEMSET), srcSym(nullptr) {}
  542. };
  543. struct MemCopyCandidate : public MemOpCandidate
  544. {
  545. SymID ldBase;
  546. StackSym* transferSym;
  547. byte ldCount;
  548. MemCopyCandidate() : MemOpCandidate(MemOpCandidate::MEMCOPY) {}
  549. };
  550. #define FOREACH_MEMOP_CANDIDATES_EDITING(data, loop, iterator) FOREACH_SLISTCOUNTED_ENTRY_EDITING(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates, iterator)
  551. #define NEXT_MEMOP_CANDIDATE_EDITING NEXT_SLISTCOUNTED_ENTRY_EDITING
  552. #define FOREACH_MEMOP_CANDIDATES(data, loop) FOREACH_SLISTCOUNTED_ENTRY(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates)
  553. #define NEXT_MEMOP_CANDIDATE NEXT_SLISTCOUNTED_ENTRY
  554. #define MEMOP_CANDIDATE_TYPE_CHECK(candidate, data, type) if(candidate->Is ## type()) {Loop:: ## type ## Candidate* data = candidate->As## type();
  555. #define FOREACH_MEMCOPY_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  556. #define NEXT_MEMCOPY_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  557. #define FOREACH_MEMCOPY_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  558. #define NEXT_MEMCOPY_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  559. #define FOREACH_MEMSET_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  560. #define NEXT_MEMSET_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  561. #define FOREACH_MEMSET_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  562. #define NEXT_MEMSET_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  563. typedef struct
  564. {
  565. byte unroll : 7;
  566. byte isIncremental : 1;
  567. } InductionVariableChangeInfo;
  568. typedef JsUtil::BaseDictionary<SymID, InductionVariableChangeInfo, JitArenaAllocator> InductionVariableChangeInfoMap;
  569. typedef JsUtil::BaseDictionary<byte, IR::Opnd*, JitArenaAllocator> InductionVariableOpndPerUnrollMap;
  570. typedef SListCounted<MemOpCandidate *> MemOpList;
  571. typedef struct
  572. {
  573. MemOpList *candidates;
  574. BVSparse<JitArenaAllocator> *inductionVariablesUsedAfterLoop;
  575. InductionVariableChangeInfoMap *inductionVariableChangeInfoMap;
  576. InductionVariableOpndPerUnrollMap *inductionVariableOpndPerUnrollMap;
  577. // This assumes that all memop operations use the same index and have the same length
  578. // Temporary map to reuse existing startIndexOpnd while emitting
  579. // 0 = !increment & !alreadyChanged, 1 = !increment & alreadyChanged, 2 = increment & !alreadyChanged, 3 = increment & alreadyChanged
  580. IR::RegOpnd* startIndexOpndCache[4];
  581. } MemOpInfo;
  582. bool doMemOp : 1;
  583. MemOpInfo *memOpInfo;
  584. struct RegAlloc
  585. {
  586. Lifetime ** loopTopRegContent; // Save off the state of the registers at the loop top
  587. BVSparse<JitArenaAllocator> * symRegUseBv; // If a lifetime was live in a reg into the loop, did the reg get used before being spilled?
  588. BVSparse<JitArenaAllocator> * defdInLoopBv; // Was a lifetime defined in the loop?
  589. BVSparse<JitArenaAllocator> * liveOnBackEdgeSyms; // Is a lifetime live on the back-edge of the loop?
  590. BitVector regUseBv; // Registers used in this loop so far
  591. uint32 loopStart; // loopTopLabel->GetNumber()
  592. uint32 loopEnd; // loopTailBranch->GetNumber()
  593. uint32 helperLength; // Number of instrs in helper code in loop
  594. SList<Lifetime *> * extendedLifetime; // Lifetimes to extend for this loop
  595. SList<Lifetime **> * exitRegContentList; // Linked list of regContents for the exit edges
  596. bool hasNonOpHelperCall;
  597. bool hasCall;
  598. bool hasAirLock; // Do back-edges have airlock blocks?
  599. } regAlloc;
  600. public:
  601. Loop(JitArenaAllocator * alloc, Func *func)
  602. : topFunc(func),
  603. blockList(alloc),
  604. parent(nullptr),
  605. landingPad(nullptr),
  606. loopTopLabel(nullptr),
  607. symsUsedBeforeDefined(nullptr),
  608. likelyIntSymsUsedBeforeDefined(nullptr),
  609. likelyNumberSymsUsedBeforeDefined(nullptr),
  610. likelySimd128F4SymsUsedBeforeDefined(nullptr),
  611. likelySimd128I4SymsUsedBeforeDefined(nullptr),
  612. forceFloat64SymsOnEntry(nullptr),
  613. forceSimd128F4SymsOnEntry(nullptr),
  614. forceSimd128I4SymsOnEntry(nullptr),
  615. symsDefInLoop(nullptr),
  616. fieldHoistCandidateTypes(nullptr),
  617. fieldHoistSymMap(alloc),
  618. needImplicitCallBailoutChecksForJsArrayCheckHoist(false),
  619. inductionVariables(nullptr),
  620. dominatingLoopCountableBlock(nullptr),
  621. loopCount(nullptr),
  622. loopCountBasedBoundBaseSyms(nullptr),
  623. isDead(false),
  624. allFieldsKilled(false),
  625. isLeaf(true),
  626. isProcessed(false),
  627. initialValueFieldMap(alloc)
  628. {
  629. this->loopNumber = ++func->loopCount;
  630. }
  631. void SetHeadBlock(BasicBlock *block) { headBlock = block; }
  632. BasicBlock * GetHeadBlock() const { Assert(headBlock == blockList.Head()); return headBlock; }
  633. bool IsDescendentOrSelf(Loop const * loop) const;
  634. void EnsureMemOpVariablesInitialized();
  635. Js::ImplicitCallFlags GetImplicitCallFlags();
  636. void SetImplicitCallFlags(Js::ImplicitCallFlags flags);
  637. Js::LoopFlags GetLoopFlags() const { return loopFlags; }
  638. void SetLoopFlags(Js::LoopFlags val) { loopFlags = val; }
  639. bool CanHoistInvariants();
  640. bool CanDoFieldCopyProp();
  641. bool CanDoFieldHoist();
  642. void SetHasCall();
  643. IR::LabelInstr * GetLoopTopInstr() const;
  644. void SetLoopTopInstr(IR::LabelInstr * loopTop);
  645. Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
  646. #if DBG_DUMP
  647. bool GetHasCall() const { return hasCall; }
  648. uint GetLoopNumber() const;
  649. #endif
  650. private:
  651. void InsertLandingPad(FlowGraph *fg);
  652. bool RemoveBreakBlocks(FlowGraph *fg);
  653. };
  654. // Structure definition cannot be inside Loop in order to use it as a parameter in GlobOpt
  655. struct MemOpEmitData
  656. {
  657. Loop::MemOpCandidate* candidate;
  658. IR::Instr* stElemInstr;
  659. BasicBlock* block;
  660. Loop::InductionVariableChangeInfo inductionVar;
  661. IR::BailOutKind bailOutKind;
  662. };
  663. struct MemSetEmitData : public MemOpEmitData
  664. {
  665. };
  666. struct MemCopyEmitData : public MemOpEmitData
  667. {
  668. IR::Instr* ldElemInstr;
  669. };
  670. #define FOREACH_BLOCK_IN_FUNC(block, func)\
  671. FOREACH_BLOCK(block, func->m_fg)
  672. #define NEXT_BLOCK_IN_FUNC\
  673. NEXT_BLOCK;
  674. #define FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, func)\
  675. FOREACH_BLOCK_DEAD_OR_ALIVE(block, func->m_fg)
  676. #define NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE\
  677. NEXT_BLOCK_DEAD_OR_ALIVE;
  678. #define FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func) \
  679. FOREACH_BLOCK_BACKWARD(block, func->m_fg)
  680. #define NEXT_BLOCK_BACKWARD_IN_FUNC \
  681. NEXT_BLOCK_BACKWARD;
  682. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, func) \
  683. FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, func->m_fg)
  684. #define NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE \
  685. NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE;
  686. #define FOREACH_BLOCK_IN_FUNC_EDITING(block, func)\
  687. FOREACH_BLOCK_EDITING(block, func->m_fg)
  688. #define NEXT_BLOCK_IN_FUNC_EDITING\
  689. NEXT_BLOCK_EDITING;
  690. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_EDITING(block, func)\
  691. FOREACH_BLOCK_BACKWARD_EDITING(block, func->m_fg)
  692. #define NEXT_BLOCK_BACKWARD_IN_FUNC_EDITING\
  693. NEXT_BLOCK_BACKWARD_EDITING;
  694. #define FOREACH_BLOCK_ALL(block, graph) \
  695. for (BasicBlock *block = graph->blockList;\
  696. block != nullptr;\
  697. block = block->next)\
  698. {
  699. #define NEXT_BLOCK_ALL \
  700. }
  701. #define FOREACH_BLOCK(block, graph)\
  702. FOREACH_BLOCK_ALL(block, graph) \
  703. if (block->isDeleted) { continue; }
  704. #define NEXT_BLOCK \
  705. NEXT_BLOCK_ALL
  706. #define FOREACH_BLOCK_DEAD_OR_ALIVE(block, graph)\
  707. FOREACH_BLOCK_ALL(block, graph) \
  708. if (block->isDeleted && !block->isDead) { continue; }
  709. #define NEXT_BLOCK_DEAD_OR_ALIVE \
  710. NEXT_BLOCK_ALL
  711. #define FOREACH_BLOCK_BACKWARD(block, graph)\
  712. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, graph->tailBlock, nullptr)
  713. #define NEXT_BLOCK_BACKWARD \
  714. NEXT_BLOCK_BACKWARD_IN_RANGE
  715. #define FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, graph)\
  716. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, graph->tailBlock, nullptr)
  717. #define NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE \
  718. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE
  719. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast)\
  720. {\
  721. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  722. for (BasicBlock *block = blockList;\
  723. block != blockStop;\
  724. block = block->prev)\
  725. {
  726. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL \
  727. }}
  728. #define FOREACH_BLOCK_BACKWARD_IN_RANGE(block, blockList, blockLast) \
  729. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  730. if (block->isDeleted) { continue; }
  731. #define NEXT_BLOCK_BACKWARD_IN_RANGE \
  732. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  733. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev)\
  734. {\
  735. BasicBlock *blockPrev;\
  736. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  737. for (BasicBlock *block = blockList;\
  738. block != blockStop;\
  739. block = blockPrev)\
  740. {\
  741. blockPrev = block->prev;
  742. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING \
  743. }}
  744. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(block, blockList, blockLast, blockPrev) \
  745. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev) \
  746. if (block->isDeleted) { continue; }
  747. #define NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING \
  748. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING
  749. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, blockList, blockLast) \
  750. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  751. if (block->isDeleted && !block->isDead) { continue; }
  752. #define NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE \
  753. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  754. #define FOREACH_BLOCK_EDITING(block, graph)\
  755. {\
  756. BasicBlock *blockNext;\
  757. for (BasicBlock *block = graph->blockList;\
  758. block != nullptr;\
  759. block = blockNext)\
  760. {\
  761. blockNext = block->next; \
  762. if (block->isDeleted) { continue; }
  763. #define NEXT_BLOCK_EDITING \
  764. }}
  765. #define FOREACH_BLOCK_BACKWARD_EDITING(block, graph)\
  766. {\
  767. BasicBlock *blockPrev;\
  768. for (BasicBlock *block = graph->tailBlock;\
  769. block != nullptr;\
  770. block = blockPrev)\
  771. {\
  772. blockPrev = block->prev; \
  773. if (block->isDeleted) { continue; }
  774. #define NEXT_BLOCK_BACKWARD_EDITING \
  775. }}
  776. #define FOREACH_BLOCK_IN_LIST(block, list)\
  777. FOREACH_SLIST_ENTRY(BasicBlock*, block, list)\
  778. {\
  779. if (block->isDeleted) { continue; }
  780. #define NEXT_BLOCK_IN_LIST \
  781. NEXT_SLIST_ENTRY \
  782. }
  783. #define FOREACH_BLOCK_IN_LIST_EDITING(block, list, iter)\
  784. FOREACH_SLIST_ENTRY_EDITING(BasicBlock*, block, list, iter)\
  785. {\
  786. if (block->isDeleted) { continue; }
  787. #define NEXT_BLOCK_IN_LIST_EDITING \
  788. NEXT_SLIST_ENTRY_EDITING \
  789. }
  790. #define FOREACH_SUCCESSOR_EDGE(edge, block)\
  791. FOREACH_EDGE_IN_LIST(edge, block->GetSuccList())
  792. #define NEXT_SUCCESSOR_EDGE\
  793. NEXT_EDGE_IN_LIST
  794. #define FOREACH_SUCCESSOR_EDGE_EDITING(edge, bloc, iter)\
  795. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetSuccList(), iter)
  796. #define NEXT_SUCCESSOR_EDGE_EDITING\
  797. NEXT_EDGE_IN_LIST_EDITING
  798. #define FOREACH_PREDECESSOR_EDGE(edge, block)\
  799. FOREACH_EDGE_IN_LIST(edge, block->GetPredList())
  800. #define NEXT_PREDECESSOR_EDGE\
  801. NEXT_EDGE_IN_LIST
  802. #define FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)\
  803. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetPredList(), iter)
  804. #define NEXT_PREDECESSOR_EDGE_EDITING\
  805. NEXT_EDGE_IN_LIST_EDITING
  806. #define FOREACH_EDGE_IN_LIST(edge, list)\
  807. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, list)\
  808. {
  809. #define NEXT_EDGE_IN_LIST\
  810. NEXT_SLISTBASECOUNTED_ENTRY }
  811. #define FOREACH_EDGE_IN_LIST_EDITING(edge, list, iter)\
  812. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, list, iter)\
  813. {\
  814. #define NEXT_EDGE_IN_LIST_EDITING\
  815. NEXT_SLISTBASECOUNTED_ENTRY_EDITING }
  816. #define FOREACH_SUCCESSOR_BLOCK(blockSucc, block)\
  817. FOREACH_EDGE_IN_LIST(__edge, block->GetSuccList())\
  818. {\
  819. BasicBlock * blockSucc = __edge->GetSucc(); \
  820. AnalysisAssert(blockSucc);
  821. #define NEXT_SUCCESSOR_BLOCK\
  822. }\
  823. NEXT_EDGE_IN_LIST
  824. #define FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, block, iter)\
  825. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetSuccList(), iter)\
  826. {\
  827. BasicBlock * blockSucc = __edge->GetSucc(); \
  828. AnalysisAssert(blockSucc);
  829. #define NEXT_SUCCESSOR_BLOCK_EDITING\
  830. }\
  831. NEXT_EDGE_IN_LIST_EDITING
  832. #define FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, block)\
  833. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadSuccList())\
  834. {\
  835. BasicBlock * blockSucc = __edge->GetSucc(); \
  836. AnalysisAssert(blockSucc);
  837. #define NEXT_DEAD_SUCCESSOR_BLOCK\
  838. }\
  839. NEXT_EDGE_IN_LIST
  840. #define FOREACH_PREDECESSOR_BLOCK(blockPred, block)\
  841. FOREACH_EDGE_IN_LIST(__edge, block->GetPredList())\
  842. {\
  843. BasicBlock * blockPred = __edge->GetPred(); \
  844. AnalysisAssert(blockPred);
  845. #define NEXT_PREDECESSOR_BLOCK\
  846. }\
  847. NEXT_EDGE_IN_LIST
  848. #define FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, block)\
  849. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadPredList())\
  850. {\
  851. BasicBlock * blockPred = __edge->GetPred(); \
  852. AnalysisAssert(blockPred);
  853. #define NEXT_DEAD_PREDECESSOR_BLOCK\
  854. }\
  855. NEXT_EDGE_IN_LIST
  856. #define FOREACH_BLOCK_IN_LOOP(block, loop)\
  857. FOREACH_BLOCK_IN_LIST(block, &loop->blockList)
  858. #define NEXT_BLOCK_IN_LOOP \
  859. NEXT_BLOCK_IN_LIST
  860. #define FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)\
  861. FOREACH_BLOCK_IN_LIST_EDITING(block, &loop->blockList, iter)
  862. #define NEXT_BLOCK_IN_LOOP_EDITING \
  863. NEXT_BLOCK_IN_LIST_EDITING
  864. #define FOREACH_LOOP_IN_FUNC_EDITING(loop, func)\
  865. FOREACH_LOOP_EDITING(loop, func->m_fg)
  866. #define NEXT_LOOP_IN_FUNC_EDITING\
  867. NEXT_LOOP_EDITING;
  868. #define FOREACH_LOOP_EDITING(loop, graph)\
  869. {\
  870. Loop* loopNext;\
  871. for (Loop* loop = graph->loopList;\
  872. loop != nullptr;\
  873. loop = loopNext)\
  874. {\
  875. loopNext = loop->next;
  876. #define NEXT_LOOP_EDITING \
  877. }}