FlowGraph.h 35 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. class BasicBlock;
  7. class FlowEdge;
  8. class Loop;
  9. class Region;
  10. class Func;
  11. class AddPropertyCacheBucket
  12. {
  13. private:
  14. Js::Type* initialType;
  15. Js::Type* finalType;
  16. public:
  17. AddPropertyCacheBucket() : initialType(nullptr), finalType(nullptr)
  18. #if DBG
  19. , deadStoreUnavailableInitialType(nullptr), deadStoreUnavailableFinalType(nullptr)
  20. #endif
  21. {
  22. }
  23. AddPropertyCacheBucket(const AddPropertyCacheBucket& bucket) :
  24. initialType(bucket.initialType), finalType(bucket.finalType)
  25. #if DBG
  26. , deadStoreUnavailableInitialType(bucket.deadStoreUnavailableInitialType)
  27. , deadStoreUnavailableFinalType(bucket.deadStoreUnavailableFinalType)
  28. #endif
  29. {
  30. }
  31. bool operator!=(const AddPropertyCacheBucket& bucket) const
  32. {
  33. return this->initialType != bucket.initialType || this->finalType != bucket.finalType;
  34. }
  35. bool operator==(const AddPropertyCacheBucket& bucket) const
  36. {
  37. return this->initialType == bucket.initialType && this->finalType == bucket.finalType;
  38. }
  39. void Copy(AddPropertyCacheBucket *pNew) const
  40. {
  41. pNew->initialType = this->initialType;
  42. pNew->finalType = this->finalType;
  43. #if DBG
  44. pNew->deadStoreUnavailableInitialType = this->deadStoreUnavailableInitialType;
  45. pNew->deadStoreUnavailableFinalType = this->deadStoreUnavailableFinalType;
  46. #endif
  47. }
  48. Js::Type *GetInitialType() const { return this->initialType; }
  49. Js::Type *GetFinalType() const { return this->finalType; }
  50. void SetInitialType(Js::Type *type) { this->initialType = type; }
  51. void SetFinalType(Js::Type *type) { this->finalType = type; }
  52. #if DBG_DUMP
  53. void Dump() const;
  54. #endif
  55. #ifdef DBG
  56. Js::Type * deadStoreUnavailableInitialType;
  57. Js::Type * deadStoreUnavailableFinalType;
  58. #endif
  59. };
  60. class ObjTypeGuardBucket
  61. {
  62. private:
  63. BVSparse<JitArenaAllocator>* guardedPropertyOps;
  64. Js::Type * monoGuardType;
  65. public:
  66. ObjTypeGuardBucket() : guardedPropertyOps(nullptr), monoGuardType(nullptr) {}
  67. ObjTypeGuardBucket(BVSparse<JitArenaAllocator>* guardedPropertyOps) : monoGuardType(nullptr)
  68. {
  69. this->guardedPropertyOps = (guardedPropertyOps != nullptr ? guardedPropertyOps->CopyNew() : nullptr);
  70. }
  71. void Copy(ObjTypeGuardBucket *pNew) const
  72. {
  73. pNew->guardedPropertyOps = this->guardedPropertyOps ? this->guardedPropertyOps->CopyNew() : nullptr;
  74. pNew->monoGuardType = this->monoGuardType;
  75. }
  76. BVSparse<JitArenaAllocator> *GetGuardedPropertyOps() const { return this->guardedPropertyOps; }
  77. void SetGuardedPropertyOps(BVSparse<JitArenaAllocator> *guardedPropertyOps) { this->guardedPropertyOps = guardedPropertyOps; }
  78. void AddToGuardedPropertyOps(uint propertyOpId) { Assert(this->guardedPropertyOps != nullptr); this->guardedPropertyOps->Set(propertyOpId); }
  79. bool NeedsMonoCheck() const { return this->monoGuardType != nullptr; }
  80. void SetMonoGuardType(Js::Type *type) { this->monoGuardType = type; }
  81. Js::Type * GetMonoGuardType() const { return this->monoGuardType; }
  82. #if DBG_DUMP
  83. void Dump() const;
  84. #endif
  85. };
  86. class ObjWriteGuardBucket
  87. {
  88. private:
  89. BVSparse<JitArenaAllocator>* writeGuards;
  90. public:
  91. ObjWriteGuardBucket() : writeGuards(nullptr) {}
  92. ObjWriteGuardBucket(BVSparse<JitArenaAllocator>* writeGuards) { this->writeGuards = (writeGuards != nullptr ? writeGuards->CopyNew() : nullptr); }
  93. void Copy(ObjWriteGuardBucket *pNew) const
  94. {
  95. pNew->writeGuards = this->writeGuards ? this->writeGuards->CopyNew() : nullptr;
  96. }
  97. BVSparse<JitArenaAllocator> *GetWriteGuards() const { return this->writeGuards; }
  98. void SetWriteGuards(BVSparse<JitArenaAllocator> *writeGuards) { this->writeGuards = writeGuards; }
  99. void AddToWriteGuards(uint writeGuardId) { Assert(this->writeGuards != nullptr); this->writeGuards->Set(writeGuardId); }
  100. #if DBG_DUMP
  101. void Dump() const;
  102. #endif
  103. };
  104. class FlowGraph
  105. {
  106. friend Loop;
  107. public:
  108. static FlowGraph * New(Func *func, JitArenaAllocator *alloc);
  109. FlowGraph(Func *func, JitArenaAllocator *fgAlloc) :
  110. func(func),
  111. alloc(fgAlloc),
  112. blockList(nullptr),
  113. blockCount(0),
  114. tailBlock(nullptr),
  115. loopList(nullptr),
  116. catchLabelStack(nullptr),
  117. hasBackwardPassInfo(false),
  118. hasLoop(false),
  119. implicitCallFlags(Js::ImplicitCall_HasNoInfo)
  120. {
  121. }
  122. void Build(void);
  123. void Destroy(void);
  124. void RunPeeps();
  125. BasicBlock * AddBlock(IR::Instr * firstInstr, IR::Instr * lastInstr, BasicBlock * nextBlock);
  126. FlowEdge * AddEdge(BasicBlock * predBlock, BasicBlock * succBlock);
  127. BasicBlock * InsertCompensationCodeForBlockMove(FlowEdge * edge, // Edge where compensation code needs to be inserted
  128. bool insertCompensationBlockToLoopList = false,
  129. bool sinkBlockLoop = false // Loop to which compensation block belongs
  130. );
  131. BasicBlock * InsertAirlockBlock(FlowEdge * edge);
  132. void InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget);
  133. void RemoveUnreachableBlocks();
  134. bool RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt = nullptr);
  135. IR::Instr * RemoveInstr(IR::Instr *instr, GlobOpt * globOpt);
  136. void RemoveBlock(BasicBlock *block, GlobOpt * globOpt = nullptr, bool tailDuping = false);
  137. BasicBlock * SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr);
  138. Func* GetFunc() { return func;};
  139. static void SafeRemoveInstr(IR::Instr *instr);
  140. void SortLoopLists();
  141. FlowEdge * FindEdge(BasicBlock *predBlock, BasicBlock *succBlock);
  142. #if DBG_DUMP
  143. void Dump();
  144. void Dump(bool verbose, const wchar_t *form);
  145. #endif
  146. JitArenaAllocator * alloc;
  147. BasicBlock * blockList;
  148. BasicBlock * tailBlock;
  149. Loop * loopList;
  150. SList<IR::LabelInstr*> * catchLabelStack;
  151. bool hasBackwardPassInfo;
  152. bool hasLoop;
  153. Js::ImplicitCallFlags implicitCallFlags;
  154. private:
  155. void FindLoops(void);
  156. bool CanonicalizeLoops(void);
  157. void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
  158. void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
  159. void AddBlockToLoop(BasicBlock *block, Loop *loop);
  160. void UpdateRegionForBlock(BasicBlock *block, Region **blockToRegion);
  161. Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
  162. IR::Instr * PeepCm(IR::Instr *instr);
  163. IR::Instr * PeepTypedCm(IR::Instr *instr);
  164. void MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock);
  165. bool UnsignedCmpPeep(IR::Instr *cmpInstr);
  166. bool IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1);
  167. #if DBG
  168. void VerifyLoopGraph();
  169. #endif
  170. private:
  171. void InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, uint32 origBranchSrcSymId);
  172. private:
  173. Func * func;
  174. unsigned int blockCount;
  175. };
  176. class BasicBlock
  177. {
  178. friend class FlowGraph;
  179. friend class Loop;
  180. public:
  181. static BasicBlock * New(FlowGraph * graph);
  182. void AddPred(FlowEdge * edge, FlowGraph * graph);
  183. void AddSucc(FlowEdge * edge, FlowGraph * graph);
  184. void RemovePred(BasicBlock *block, FlowGraph * graph);
  185. void RemoveSucc(BasicBlock *block, FlowGraph * graph);
  186. void RemoveDeadPred(BasicBlock *block, FlowGraph * graph);
  187. void RemoveDeadSucc(BasicBlock *block, FlowGraph * graph);
  188. void UnlinkPred(BasicBlock *block);
  189. void UnlinkSucc(BasicBlock *block);
  190. void UnlinkInstr(IR::Instr * Instr);
  191. void RemoveInstr(IR::Instr * instr);
  192. void InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr);
  193. void InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr);
  194. void InsertAfter(IR::Instr * newInstr);
  195. void InvertBranch(IR::BranchInstr *branch);
  196. IR::Instr * GetFirstInstr(void) const
  197. {
  198. return firstInstr;
  199. }
  200. void SetFirstInstr(IR::Instr * instr)
  201. {
  202. firstInstr = instr;
  203. }
  204. IR::Instr * GetLastInstr(void)
  205. {
  206. BasicBlock *blNext = this->next;
  207. if (blNext)
  208. {
  209. return blNext->firstInstr->m_prev;
  210. }
  211. else
  212. {
  213. return this->func->m_exitInstr;
  214. }
  215. }
  216. void SetLastInstr(IR::Instr * instr)
  217. {
  218. // Intentionally empty
  219. }
  220. SListBaseCounted<FlowEdge *> * GetPredList(void)
  221. {
  222. return &predList;
  223. }
  224. SListBaseCounted<FlowEdge *> * GetSuccList(void)
  225. {
  226. return &succList;
  227. }
  228. SListBaseCounted<FlowEdge *> * GetDeadPredList(void)
  229. {
  230. return &deadPredList;
  231. }
  232. SListBaseCounted<FlowEdge *> * GetDeadSuccList(void)
  233. {
  234. return &deadSuccList;
  235. }
  236. unsigned int GetBlockNum(void) const
  237. {
  238. return number;
  239. }
  240. void SetBlockNum(unsigned int num)
  241. {
  242. number = num;
  243. }
  244. BasicBlock * GetPrev()
  245. {
  246. BasicBlock *block = this;
  247. do {
  248. block = block->prev;
  249. } while (block->isDeleted);
  250. return block;
  251. }
  252. BasicBlock * GetNext()
  253. {
  254. BasicBlock *block = this;
  255. do {
  256. block = block->next;
  257. } while (block && block->isDeleted);
  258. return block;
  259. }
  260. uint IncrementDataUseCount()
  261. {
  262. return ++this->dataUseCount;
  263. }
  264. uint DecrementDataUseCount()
  265. {
  266. Assert(this->dataUseCount != 0);
  267. return --this->dataUseCount;
  268. }
  269. uint GetDataUseCount()
  270. {
  271. return this->dataUseCount;
  272. }
  273. void SetDataUseCount(uint count)
  274. {
  275. this->dataUseCount = count;
  276. }
  277. bool IsLandingPad();
  278. #if DBG_DUMP
  279. void DumpHeader(bool insertCR = true);
  280. void Dump();
  281. #endif
  282. public:
  283. BasicBlock * next;
  284. BasicBlock * prev;
  285. Loop * loop;
  286. uint8 isDeleted:1;
  287. uint8 isDead:1;
  288. uint8 isLoopHeader:1;
  289. uint8 hasCall:1;
  290. uint8 isVisited:1;
  291. uint8 isAirLockCompensationBlock:1;
  292. #ifdef DBG
  293. uint8 isBreakBlock:1;
  294. uint8 isAirLockBlock:1;
  295. uint8 isBreakCompensationBlockAtSink:1;
  296. uint8 isBreakCompensationBlockAtSource:1;
  297. #endif
  298. // Deadstore data
  299. BVSparse<JitArenaAllocator> * upwardExposedUses;
  300. BVSparse<JitArenaAllocator> * upwardExposedFields;
  301. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout;
  302. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  303. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates;
  304. TempNumberTracker * tempNumberTracker;
  305. TempObjectTracker * tempObjectTracker;
  306. #if DBG
  307. TempObjectVerifyTracker * tempObjectVerifyTracker;
  308. #endif
  309. HashTable<AddPropertyCacheBucket> * stackSymToFinalType;
  310. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties; // Dead store pass only
  311. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap; // Backward pass only
  312. BVSparse<JitArenaAllocator> * noImplicitCallUses;
  313. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses;
  314. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses;
  315. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses;
  316. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses;
  317. BVSparse<JitArenaAllocator> * cloneStrCandidates;
  318. Loop * backwardPassCurrentLoop;
  319. // Global optimizer data
  320. GlobOptBlockData globOptData;
  321. // Bailout data
  322. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  323. #if DBG
  324. StackSym ** byteCodeRestoreSyms;
  325. #endif
  326. IntOverflowDoesNotMatterRange * intOverflowDoesNotMatterRange;
  327. private:
  328. BasicBlock(JitArenaAllocator * alloc, Func *func) :
  329. next(nullptr),
  330. prev(nullptr),
  331. firstInstr(nullptr),
  332. number(k_InvalidNum),
  333. loop(nullptr),
  334. isDeleted(false),
  335. isDead(false),
  336. isLoopHeader(false),
  337. hasCall(false),
  338. upwardExposedUses(nullptr),
  339. upwardExposedFields(nullptr),
  340. typesNeedingKnownObjectLayout(nullptr),
  341. slotDeadStoreCandidates(nullptr),
  342. tempNumberTracker(nullptr),
  343. tempObjectTracker(nullptr),
  344. #if DBG
  345. tempObjectVerifyTracker(nullptr),
  346. #endif
  347. stackSymToFinalType(nullptr),
  348. stackSymToGuardedProperties(nullptr),
  349. stackSymToWriteGuardsMap(nullptr),
  350. noImplicitCallUses(nullptr),
  351. noImplicitCallNoMissingValuesUses(nullptr),
  352. noImplicitCallNativeArrayUses(nullptr),
  353. noImplicitCallJsArrayHeadSegmentSymUses(nullptr),
  354. noImplicitCallArrayLengthSymUses(nullptr),
  355. cloneStrCandidates(nullptr),
  356. byteCodeUpwardExposedUsed(nullptr),
  357. isAirLockCompensationBlock(false),
  358. #if DBG
  359. byteCodeRestoreSyms(nullptr),
  360. isBreakBlock(false),
  361. isAirLockBlock(false),
  362. isBreakCompensationBlockAtSource(false),
  363. isBreakCompensationBlockAtSink(false),
  364. #endif
  365. fieldHoistCandidates(nullptr),
  366. dataUseCount(0),
  367. intOverflowDoesNotMatterRange(nullptr),
  368. func(func),
  369. globOptData(func)
  370. {
  371. }
  372. void RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead = false);
  373. void RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead = false);
  374. void UnlinkPred(BasicBlock *block, bool doCleanSucc);
  375. void UnlinkSucc(BasicBlock *block, bool doCleanPred);
  376. #if DBG_DUMP
  377. bool Contains(IR::Instr * instr);
  378. #endif
  379. private:
  380. IR::Instr * firstInstr;
  381. SListBaseCounted<FlowEdge *> predList;
  382. SListBaseCounted<FlowEdge *> succList;
  383. SListBaseCounted<FlowEdge *> deadPredList;
  384. SListBaseCounted<FlowEdge *> deadSuccList;
  385. Func * func;
  386. unsigned int number;
  387. uint dataUseCount;
  388. static const unsigned int k_InvalidNum = (unsigned)-1;
  389. };
  390. class FlowEdge
  391. {
  392. public:
  393. static FlowEdge * New(FlowGraph * graph);
  394. FlowEdge() :
  395. predBlock(nullptr),
  396. succBlock(nullptr),
  397. pathDependentInfo(nullptr)
  398. {
  399. }
  400. BasicBlock * GetPred(void) const
  401. {
  402. return predBlock;
  403. }
  404. void SetPred(BasicBlock * block)
  405. {
  406. predBlock = block;
  407. }
  408. BasicBlock * GetSucc(void) const
  409. {
  410. return succBlock;
  411. }
  412. void SetSucc(BasicBlock * block)
  413. {
  414. succBlock = block;
  415. }
  416. PathDependentInfo * GetPathDependentInfo() const
  417. {
  418. return pathDependentInfo;
  419. }
  420. void SetPathDependentInfo(const PathDependentInfo &info, JitArenaAllocator *const alloc)
  421. {
  422. Assert(info.HasInfo());
  423. if (!pathDependentInfo)
  424. {
  425. pathDependentInfo = JitAnew(alloc, PathDependentInfo, info);
  426. }
  427. else
  428. {
  429. *pathDependentInfo = info;
  430. }
  431. }
  432. void ClearPathDependentInfo(JitArenaAllocator * alloc)
  433. {
  434. JitAdelete(alloc, pathDependentInfo);
  435. pathDependentInfo = nullptr;
  436. }
  437. private:
  438. BasicBlock * predBlock;
  439. BasicBlock * succBlock;
  440. // Only valid during globopt
  441. PathDependentInfo * pathDependentInfo;
  442. };
  443. class Loop
  444. {
  445. friend FlowGraph;
  446. private:
  447. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator, PowerOf2SizePolicy> FieldHoistSymMap;
  448. typedef JsUtil::BaseDictionary<PropertySym *, Value *, JitArenaAllocator> InitialValueFieldMap;
  449. Js::ImplicitCallFlags implicitCallFlags;
  450. Js::LoopFlags loopFlags;
  451. BasicBlock * headBlock;
  452. public:
  453. Func * topFunc;
  454. uint32 loopNumber;
  455. SList<BasicBlock *> blockList;
  456. Loop * next;
  457. Loop * parent;
  458. BasicBlock * landingPad;
  459. IR::LabelInstr * loopTopLabel;
  460. BVSparse<JitArenaAllocator> *varSymsOnEntry;
  461. BVSparse<JitArenaAllocator> *int32SymsOnEntry;
  462. BVSparse<JitArenaAllocator> *lossyInt32SymsOnEntry; // see GlobOptData::liveLossyInt32Syms
  463. BVSparse<JitArenaAllocator> *float64SymsOnEntry;
  464. BVSparse<JitArenaAllocator> *liveFieldsOnEntry;
  465. // SIMD_JS
  466. // live syms upon entering loop header (from pred merge + forced syms + used before defs in loop)
  467. BVSparse<JitArenaAllocator> *simd128F4SymsOnEntry;
  468. BVSparse<JitArenaAllocator> *simd128I4SymsOnEntry;
  469. BVSparse<JitArenaAllocator> *symsUsedBeforeDefined; // stack syms that are live in the landing pad, and used before they are defined in the loop
  470. BVSparse<JitArenaAllocator> *likelyIntSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-int value, and used before they are defined in the loop
  471. BVSparse<JitArenaAllocator> *likelyNumberSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-number value, and used before they are defined in the loop
  472. // SIMD_JS
  473. BVSparse<JitArenaAllocator> *likelySimd128F4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128F4 value, and used before they are defined in the loop
  474. BVSparse<JitArenaAllocator> *likelySimd128I4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128I4 value, and used before they are defined in the loop
  475. BVSparse<JitArenaAllocator> *forceFloat64SymsOnEntry;
  476. // SIMD_JS
  477. // syms need to be forced to certain type due to hoisting
  478. BVSparse<JitArenaAllocator> *forceSimd128F4SymsOnEntry;
  479. BVSparse<JitArenaAllocator> *forceSimd128I4SymsOnEntry;
  480. BVSparse<JitArenaAllocator> *symsDefInLoop;
  481. BailOutInfo * bailOutInfo;
  482. IR::BailOutInstr * toPrimitiveSideEffectCheck;
  483. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  484. BVSparse<JitArenaAllocator> * liveInFieldHoistCandidates;
  485. BVSparse<JitArenaAllocator> * fieldHoistCandidateTypes;
  486. SListBase<IR::Instr *> prepassFieldHoistInstrCandidates;
  487. FieldHoistSymMap fieldHoistSymMap;
  488. IR::Instr * endDisableImplicitCall;
  489. BVSparse<JitArenaAllocator> * hoistedFields;
  490. BVSparse<JitArenaAllocator> * hoistedFieldCopySyms;
  491. BVSparse<JitArenaAllocator> * liveOutFields;
  492. ValueNumber firstValueNumberInLoop;
  493. JsArrayKills jsArrayKills;
  494. BVSparse<JitArenaAllocator> *fieldKilled;
  495. BVSparse<JitArenaAllocator> *fieldPRESymStore;
  496. InitialValueFieldMap initialValueFieldMap;
  497. InductionVariableSet *inductionVariables;
  498. BasicBlock *dominatingLoopCountableBlock;
  499. LoopCount *loopCount;
  500. SymIdToStackSymMap *loopCountBasedBoundBaseSyms;
  501. bool isDead : 1;
  502. bool hasDeadStoreCollectionPass : 1;
  503. bool hasDeadStorePrepass : 1;
  504. bool hasCall : 1;
  505. bool hasHoistedFields : 1;
  506. bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
  507. bool allFieldsKilled : 1;
  508. bool isLeaf : 1;
  509. bool isProcessed : 1; // Set and reset at varying places according to the phase we're in.
  510. // For example, in the lowerer, it'll be set to true when we process the loopTop for a certain loop
  511. struct MemCopyCandidate;
  512. struct MemSetCandidate;
  513. struct MemOpCandidate
  514. {
  515. SymID base;
  516. SymID index;
  517. byte count;
  518. bool bIndexAlreadyChanged;
  519. enum MemOpType
  520. {
  521. MEMSET,
  522. MEMCOPY
  523. } type;
  524. bool IsMemSet() const { return type == MEMSET; }
  525. bool IsMemCopy() const { return type == MEMCOPY; }
  526. struct Loop::MemCopyCandidate* AsMemCopy();
  527. struct Loop::MemSetCandidate* AsMemSet();
  528. MemOpCandidate(MemOpType type) :
  529. type(type)
  530. {
  531. }
  532. };
  533. struct MemSetCandidate : public MemOpCandidate
  534. {
  535. BailoutConstantValue constant;
  536. StackSym* srcSym;
  537. MemSetCandidate() : MemOpCandidate(MemOpCandidate::MEMSET), srcSym(nullptr) {}
  538. };
  539. struct MemCopyCandidate : public MemOpCandidate
  540. {
  541. SymID ldBase;
  542. StackSym* transferSym;
  543. byte ldCount;
  544. MemCopyCandidate() : MemOpCandidate(MemOpCandidate::MEMCOPY) {}
  545. };
  546. #define FOREACH_MEMOP_CANDIDATES_EDITING(data, loop, iterator) FOREACH_SLISTCOUNTED_ENTRY_EDITING(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates, iterator)
  547. #define NEXT_MEMOP_CANDIDATE_EDITING NEXT_SLISTCOUNTED_ENTRY_EDITING
  548. #define FOREACH_MEMOP_CANDIDATES(data, loop) FOREACH_SLISTCOUNTED_ENTRY(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates)
  549. #define NEXT_MEMOP_CANDIDATE NEXT_SLISTCOUNTED_ENTRY
  550. #define MEMOP_CANDIDATE_TYPE_CHECK(candidate, data, type) if(candidate->Is ## type()) {Loop:: ## type ## Candidate* data = candidate->As## type();
  551. #define FOREACH_MEMCOPY_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  552. #define NEXT_MEMCOPY_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  553. #define FOREACH_MEMCOPY_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  554. #define NEXT_MEMCOPY_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  555. #define FOREACH_MEMSET_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  556. #define NEXT_MEMSET_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  557. #define FOREACH_MEMSET_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  558. #define NEXT_MEMSET_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  559. typedef struct
  560. {
  561. byte unroll : 7;
  562. byte isIncremental : 1;
  563. } InductionVariableChangeInfo;
  564. typedef JsUtil::BaseDictionary<SymID, InductionVariableChangeInfo, JitArenaAllocator> InductionVariableChangeInfoMap;
  565. typedef JsUtil::BaseDictionary<byte, IR::Opnd*, JitArenaAllocator> InductionVariableOpndPerUnrollMap;
  566. typedef SListCounted<MemOpCandidate *> MemOpList;
  567. typedef struct
  568. {
  569. MemOpList *candidates;
  570. bool doMemOp : 1;
  571. BVSparse<JitArenaAllocator> *inductionVariablesUsedAfterLoop;
  572. InductionVariableChangeInfoMap *inductionVariableChangeInfoMap;
  573. InductionVariableOpndPerUnrollMap *inductionVariableOpndPerUnrollMap;
  574. // This assumes that all memop operations use the same index and have the same length
  575. // Temporary map to reuse existing startIndexOpnd while emitting
  576. // 0 = !increment & !alreadyChanged, 1 = !increment & alreadyChanged, 2 = increment & !alreadyChanged, 3 = increment & alreadyChanged
  577. IR::RegOpnd* startIndexOpndCache[4];
  578. } MemOpInfo;
  579. MemOpInfo *memOpInfo;
  580. struct RegAlloc
  581. {
  582. Lifetime ** loopTopRegContent; // Save off the state of the registers at the loop top
  583. BVSparse<JitArenaAllocator> * symRegUseBv; // If a lifetime was live in a reg into the loop, did the reg get used before being spilled?
  584. BVSparse<JitArenaAllocator> * defdInLoopBv; // Was a lifetime defined in the loop?
  585. BVSparse<JitArenaAllocator> * liveOnBackEdgeSyms; // Is a lifetime live on the back-edge of the loop?
  586. BitVector regUseBv; // Registers used in this loop so far
  587. uint32 loopStart; // loopTopLabel->GetNumber()
  588. uint32 loopEnd; // loopTailBranch->GetNumber()
  589. uint32 helperLength; // Number of instrs in helper code in loop
  590. SList<Lifetime *> * extendedLifetime; // Lifetimes to extend for this loop
  591. SList<Lifetime **> * exitRegContentList; // Linked list of regContents for the exit edges
  592. bool hasNonOpHelperCall;
  593. bool hasCall;
  594. bool hasAirLock; // Do back-edges have airlock blocks?
  595. } regAlloc;
  596. public:
  597. Loop(JitArenaAllocator * alloc, Func *func)
  598. : topFunc(func),
  599. blockList(alloc),
  600. parent(nullptr),
  601. landingPad(nullptr),
  602. loopTopLabel(nullptr),
  603. symsUsedBeforeDefined(nullptr),
  604. likelyIntSymsUsedBeforeDefined(nullptr),
  605. likelyNumberSymsUsedBeforeDefined(nullptr),
  606. likelySimd128F4SymsUsedBeforeDefined(nullptr),
  607. likelySimd128I4SymsUsedBeforeDefined(nullptr),
  608. forceFloat64SymsOnEntry(nullptr),
  609. forceSimd128F4SymsOnEntry(nullptr),
  610. forceSimd128I4SymsOnEntry(nullptr),
  611. symsDefInLoop(nullptr),
  612. fieldHoistCandidateTypes(nullptr),
  613. fieldHoistSymMap(alloc),
  614. needImplicitCallBailoutChecksForJsArrayCheckHoist(false),
  615. inductionVariables(nullptr),
  616. dominatingLoopCountableBlock(nullptr),
  617. loopCount(nullptr),
  618. loopCountBasedBoundBaseSyms(nullptr),
  619. isDead(false),
  620. allFieldsKilled(false),
  621. isLeaf(true),
  622. isProcessed(false),
  623. initialValueFieldMap(alloc)
  624. {
  625. this->loopNumber = ++func->loopCount;
  626. }
  627. void SetHeadBlock(BasicBlock *block) { headBlock = block; }
  628. BasicBlock * GetHeadBlock() const { Assert(headBlock == blockList.Head()); return headBlock; }
  629. bool IsDescendentOrSelf(Loop const * loop) const;
  630. bool EnsureMemOpVariablesInitialized();
  631. Js::ImplicitCallFlags GetImplicitCallFlags();
  632. void SetImplicitCallFlags(Js::ImplicitCallFlags flags);
  633. Js::LoopFlags GetLoopFlags() const { return loopFlags; }
  634. void SetLoopFlags(Js::LoopFlags val) { loopFlags = val; }
  635. bool CanHoistInvariants();
  636. bool CanDoFieldCopyProp();
  637. bool CanDoFieldHoist();
  638. void SetHasCall();
  639. IR::LabelInstr * GetLoopTopInstr() const;
  640. void SetLoopTopInstr(IR::LabelInstr * loopTop);
  641. Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
  642. #if DBG_DUMP
  643. bool GetHasCall() const { return hasCall; }
  644. uint GetLoopNumber() const;
  645. #endif
  646. private:
  647. void InsertLandingPad(FlowGraph *fg);
  648. bool RemoveBreakBlocks(FlowGraph *fg);
  649. };
  650. // Structure definition cannot be inside Loop in order to use it as a parameter in GlobOpt
  651. struct MemOpEmitData
  652. {
  653. Loop::MemOpCandidate* candidate;
  654. IR::Instr* stElemInstr;
  655. BasicBlock* block;
  656. Loop::InductionVariableChangeInfo inductionVar;
  657. IR::BailOutKind bailOutKind;
  658. };
  659. struct MemSetEmitData : public MemOpEmitData
  660. {
  661. };
  662. struct MemCopyEmitData : public MemOpEmitData
  663. {
  664. IR::Instr* ldElemInstr;
  665. };
  666. #define FOREACH_BLOCK_IN_FUNC(block, func)\
  667. FOREACH_BLOCK(block, func->m_fg)
  668. #define NEXT_BLOCK_IN_FUNC\
  669. NEXT_BLOCK;
  670. #define FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, func)\
  671. FOREACH_BLOCK_DEAD_OR_ALIVE(block, func->m_fg)
  672. #define NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE\
  673. NEXT_BLOCK_DEAD_OR_ALIVE;
  674. #define FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func) \
  675. FOREACH_BLOCK_BACKWARD(block, func->m_fg)
  676. #define NEXT_BLOCK_BACKWARD_IN_FUNC \
  677. NEXT_BLOCK_BACKWARD;
  678. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, func) \
  679. FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, func->m_fg)
  680. #define NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE \
  681. NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE;
  682. #define FOREACH_BLOCK_IN_FUNC_EDITING(block, func)\
  683. FOREACH_BLOCK_EDITING(block, func->m_fg)
  684. #define NEXT_BLOCK_IN_FUNC_EDITING\
  685. NEXT_BLOCK_EDITING;
  686. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_EDITING(block, func)\
  687. FOREACH_BLOCK_BACKWARD_EDITING(block, func->m_fg)
  688. #define NEXT_BLOCK_BACKWARD_IN_FUNC_EDITING\
  689. NEXT_BLOCK_BACKWARD_EDITING;
  690. #define FOREACH_BLOCK_ALL(block, graph) \
  691. for (BasicBlock *block = graph->blockList;\
  692. block != nullptr;\
  693. block = block->next)\
  694. {
  695. #define NEXT_BLOCK_ALL \
  696. }
  697. #define FOREACH_BLOCK(block, graph)\
  698. FOREACH_BLOCK_ALL(block, graph) \
  699. if (block->isDeleted) { continue; }
  700. #define NEXT_BLOCK \
  701. NEXT_BLOCK_ALL
  702. #define FOREACH_BLOCK_DEAD_OR_ALIVE(block, graph)\
  703. FOREACH_BLOCK_ALL(block, graph) \
  704. if (block->isDeleted && !block->isDead) { continue; }
  705. #define NEXT_BLOCK_DEAD_OR_ALIVE \
  706. NEXT_BLOCK_ALL
  707. #define FOREACH_BLOCK_BACKWARD(block, graph)\
  708. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, graph->tailBlock, nullptr)
  709. #define NEXT_BLOCK_BACKWARD \
  710. NEXT_BLOCK_BACKWARD_IN_RANGE
  711. #define FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, graph)\
  712. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, graph->tailBlock, nullptr)
  713. #define NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE \
  714. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE
  715. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast)\
  716. {\
  717. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  718. for (BasicBlock *block = blockList;\
  719. block != blockStop;\
  720. block = block->prev)\
  721. {
  722. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL \
  723. }}
  724. #define FOREACH_BLOCK_BACKWARD_IN_RANGE(block, blockList, blockLast) \
  725. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  726. if (block->isDeleted) { continue; }
  727. #define NEXT_BLOCK_BACKWARD_IN_RANGE \
  728. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  729. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev)\
  730. {\
  731. BasicBlock *blockPrev;\
  732. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  733. for (BasicBlock *block = blockList;\
  734. block != blockStop;\
  735. block = blockPrev)\
  736. {\
  737. blockPrev = block->prev;
  738. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING \
  739. }}
  740. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(block, blockList, blockLast, blockPrev) \
  741. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev) \
  742. if (block->isDeleted) { continue; }
  743. #define NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING \
  744. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING
  745. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, blockList, blockLast) \
  746. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  747. if (block->isDeleted && !block->isDead) { continue; }
  748. #define NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE \
  749. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  750. #define FOREACH_BLOCK_EDITING(block, graph)\
  751. {\
  752. BasicBlock *blockNext;\
  753. for (BasicBlock *block = graph->blockList;\
  754. block != nullptr;\
  755. block = blockNext)\
  756. {\
  757. blockNext = block->next; \
  758. if (block->isDeleted) { continue; }
  759. #define NEXT_BLOCK_EDITING \
  760. }}
  761. #define FOREACH_BLOCK_BACKWARD_EDITING(block, graph)\
  762. {\
  763. BasicBlock *blockPrev;\
  764. for (BasicBlock *block = graph->tailBlock;\
  765. block != nullptr;\
  766. block = blockPrev)\
  767. {\
  768. blockPrev = block->prev; \
  769. if (block->isDeleted) { continue; }
  770. #define NEXT_BLOCK_BACKWARD_EDITING \
  771. }}
  772. #define FOREACH_BLOCK_IN_LIST(block, list)\
  773. FOREACH_SLIST_ENTRY(BasicBlock*, block, list)\
  774. {\
  775. if (block->isDeleted) { continue; }
  776. #define NEXT_BLOCK_IN_LIST \
  777. NEXT_SLIST_ENTRY \
  778. }
  779. #define FOREACH_BLOCK_IN_LIST_EDITING(block, list, iter)\
  780. FOREACH_SLIST_ENTRY_EDITING(BasicBlock*, block, list, iter)\
  781. {\
  782. if (block->isDeleted) { continue; }
  783. #define NEXT_BLOCK_IN_LIST_EDITING \
  784. NEXT_SLIST_ENTRY_EDITING \
  785. }
  786. #define FOREACH_SUCCESSOR_EDGE(edge, block)\
  787. FOREACH_EDGE_IN_LIST(edge, block->GetSuccList())
  788. #define NEXT_SUCCESSOR_EDGE\
  789. NEXT_EDGE_IN_LIST
  790. #define FOREACH_SUCCESSOR_EDGE_EDITING(edge, bloc, iter)\
  791. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetSuccList(), iter)
  792. #define NEXT_SUCCESSOR_EDGE_EDITING\
  793. NEXT_EDGE_IN_LIST_EDITING
  794. #define FOREACH_PREDECESSOR_EDGE(edge, block)\
  795. FOREACH_EDGE_IN_LIST(edge, block->GetPredList())
  796. #define NEXT_PREDECESSOR_EDGE\
  797. NEXT_EDGE_IN_LIST
  798. #define FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)\
  799. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetPredList(), iter)
  800. #define NEXT_PREDECESSOR_EDGE_EDITING\
  801. NEXT_EDGE_IN_LIST_EDITING
  802. #define FOREACH_EDGE_IN_LIST(edge, list)\
  803. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, list)\
  804. {
  805. #define NEXT_EDGE_IN_LIST\
  806. NEXT_SLISTBASECOUNTED_ENTRY }
  807. #define FOREACH_EDGE_IN_LIST_EDITING(edge, list, iter)\
  808. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, list, iter)\
  809. {\
  810. #define NEXT_EDGE_IN_LIST_EDITING\
  811. NEXT_SLISTBASECOUNTED_ENTRY_EDITING }
  812. #define FOREACH_SUCCESSOR_BLOCK(blockSucc, block)\
  813. FOREACH_EDGE_IN_LIST(__edge, block->GetSuccList())\
  814. {\
  815. BasicBlock * blockSucc = __edge->GetSucc(); \
  816. AnalysisAssert(blockSucc);
  817. #define NEXT_SUCCESSOR_BLOCK\
  818. }\
  819. NEXT_EDGE_IN_LIST
  820. #define FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, block, iter)\
  821. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetSuccList(), iter)\
  822. {\
  823. BasicBlock * blockSucc = __edge->GetSucc(); \
  824. AnalysisAssert(blockSucc);
  825. #define NEXT_SUCCESSOR_BLOCK_EDITING\
  826. }\
  827. NEXT_EDGE_IN_LIST_EDITING
  828. #define FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, block)\
  829. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadSuccList())\
  830. {\
  831. BasicBlock * blockSucc = __edge->GetSucc(); \
  832. AnalysisAssert(blockSucc);
  833. #define NEXT_DEAD_SUCCESSOR_BLOCK\
  834. }\
  835. NEXT_EDGE_IN_LIST
  836. #define FOREACH_PREDECESSOR_BLOCK(blockPred, block)\
  837. FOREACH_EDGE_IN_LIST(__edge, block->GetPredList())\
  838. {\
  839. BasicBlock * blockPred = __edge->GetPred(); \
  840. AnalysisAssert(blockPred);
  841. #define NEXT_PREDECESSOR_BLOCK\
  842. }\
  843. NEXT_EDGE_IN_LIST
  844. #define FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, block)\
  845. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadPredList())\
  846. {\
  847. BasicBlock * blockPred = __edge->GetPred(); \
  848. AnalysisAssert(blockPred);
  849. #define NEXT_DEAD_PREDECESSOR_BLOCK\
  850. }\
  851. NEXT_EDGE_IN_LIST
  852. #define FOREACH_BLOCK_IN_LOOP(block, loop)\
  853. FOREACH_BLOCK_IN_LIST(block, &loop->blockList)
  854. #define NEXT_BLOCK_IN_LOOP \
  855. NEXT_BLOCK_IN_LIST
  856. #define FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)\
  857. FOREACH_BLOCK_IN_LIST_EDITING(block, &loop->blockList, iter)
  858. #define NEXT_BLOCK_IN_LOOP_EDITING \
  859. NEXT_BLOCK_IN_LIST_EDITING
  860. #define FOREACH_LOOP_IN_FUNC_EDITING(loop, func)\
  861. FOREACH_LOOP_EDITING(loop, func->m_fg)
  862. #define NEXT_LOOP_IN_FUNC_EDITING\
  863. NEXT_LOOP_EDITING;
  864. #define FOREACH_LOOP_EDITING(loop, graph)\
  865. {\
  866. Loop* loopNext;\
  867. for (Loop* loop = graph->loopList;\
  868. loop != nullptr;\
  869. loop = loopNext)\
  870. {\
  871. loopNext = loop->next;
  872. #define NEXT_LOOP_EDITING \
  873. }}