FlowGraph.h 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #pragma once
  6. class BasicBlock;
  7. class FlowEdge;
  8. class Loop;
  9. class Region;
  10. class Func;
  11. class AddPropertyCacheBucket
  12. {
  13. private:
  14. Js::Type* initialType;
  15. Js::Type* finalType;
  16. public:
  17. AddPropertyCacheBucket() : initialType(nullptr), finalType(nullptr)
  18. #if DBG
  19. , deadStoreUnavailableInitialType(nullptr), deadStoreUnavailableFinalType(nullptr)
  20. #endif
  21. {
  22. }
  23. AddPropertyCacheBucket(const AddPropertyCacheBucket& bucket) :
  24. initialType(bucket.initialType), finalType(bucket.finalType)
  25. #if DBG
  26. , deadStoreUnavailableInitialType(bucket.deadStoreUnavailableInitialType)
  27. , deadStoreUnavailableFinalType(bucket.deadStoreUnavailableFinalType)
  28. #endif
  29. {
  30. }
  31. bool operator!=(const AddPropertyCacheBucket& bucket) const
  32. {
  33. return this->initialType != bucket.initialType || this->finalType != bucket.finalType;
  34. }
  35. bool operator==(const AddPropertyCacheBucket& bucket) const
  36. {
  37. return this->initialType == bucket.initialType && this->finalType == bucket.finalType;
  38. }
  39. void Copy(AddPropertyCacheBucket *pNew) const
  40. {
  41. pNew->initialType = this->initialType;
  42. pNew->finalType = this->finalType;
  43. #if DBG
  44. pNew->deadStoreUnavailableInitialType = this->deadStoreUnavailableInitialType;
  45. pNew->deadStoreUnavailableFinalType = this->deadStoreUnavailableFinalType;
  46. #endif
  47. }
  48. Js::Type *GetInitialType() const { return this->initialType; }
  49. Js::Type *GetFinalType() const { return this->finalType; }
  50. void SetInitialType(Js::Type *type) { this->initialType = type; }
  51. void SetFinalType(Js::Type *type) { this->finalType = type; }
  52. #if DBG_DUMP
  53. void Dump() const;
  54. #endif
  55. #ifdef DBG
  56. Js::Type * deadStoreUnavailableInitialType;
  57. Js::Type * deadStoreUnavailableFinalType;
  58. #endif
  59. };
  60. class ObjTypeGuardBucket
  61. {
  62. private:
  63. BVSparse<JitArenaAllocator>* guardedPropertyOps;
  64. bool needsMonoCheck;
  65. public:
  66. ObjTypeGuardBucket() : guardedPropertyOps(nullptr), needsMonoCheck(false) {}
  67. ObjTypeGuardBucket(BVSparse<JitArenaAllocator>* guardedPropertyOps) : needsMonoCheck(false)
  68. {
  69. this->guardedPropertyOps = (guardedPropertyOps != nullptr ? guardedPropertyOps->CopyNew() : nullptr);
  70. }
  71. void Copy(ObjTypeGuardBucket *pNew) const
  72. {
  73. pNew->guardedPropertyOps = this->guardedPropertyOps ? this->guardedPropertyOps->CopyNew() : nullptr;
  74. pNew->needsMonoCheck = this->needsMonoCheck;
  75. }
  76. BVSparse<JitArenaAllocator> *GetGuardedPropertyOps() const { return this->guardedPropertyOps; }
  77. void SetGuardedPropertyOps(BVSparse<JitArenaAllocator> *guardedPropertyOps) { this->guardedPropertyOps = guardedPropertyOps; }
  78. void AddToGuardedPropertyOps(uint propertyOpId) { Assert(this->guardedPropertyOps != nullptr); this->guardedPropertyOps->Set(propertyOpId); }
  79. bool NeedsMonoCheck() const { return this->needsMonoCheck; }
  80. void SetNeedsMonoCheck(bool value) { this->needsMonoCheck = value; }
  81. #if DBG_DUMP
  82. void Dump() const;
  83. #endif
  84. };
  85. class ObjWriteGuardBucket
  86. {
  87. private:
  88. BVSparse<JitArenaAllocator>* writeGuards;
  89. public:
  90. ObjWriteGuardBucket() : writeGuards(nullptr) {}
  91. ObjWriteGuardBucket(BVSparse<JitArenaAllocator>* writeGuards) { this->writeGuards = (writeGuards != nullptr ? writeGuards->CopyNew() : nullptr); }
  92. void Copy(ObjWriteGuardBucket *pNew) const
  93. {
  94. pNew->writeGuards = this->writeGuards ? this->writeGuards->CopyNew() : nullptr;
  95. }
  96. BVSparse<JitArenaAllocator> *GetWriteGuards() const { return this->writeGuards; }
  97. void SetWriteGuards(BVSparse<JitArenaAllocator> *writeGuards) { this->writeGuards = writeGuards; }
  98. void AddToWriteGuards(uint writeGuardId) { Assert(this->writeGuards != nullptr); this->writeGuards->Set(writeGuardId); }
  99. #if DBG_DUMP
  100. void Dump() const;
  101. #endif
  102. };
  103. class FlowGraph
  104. {
  105. friend Loop;
  106. public:
  107. static FlowGraph * New(Func *func, JitArenaAllocator *alloc);
  108. FlowGraph(Func *func, JitArenaAllocator *fgAlloc) :
  109. func(func),
  110. alloc(fgAlloc),
  111. blockList(nullptr),
  112. blockCount(0),
  113. tailBlock(nullptr),
  114. loopList(nullptr),
  115. catchLabelStack(nullptr),
  116. hasBackwardPassInfo(false),
  117. hasLoop(false),
  118. implicitCallFlags(Js::ImplicitCall_HasNoInfo)
  119. {
  120. }
  121. void Build(void);
  122. void Destroy(void);
  123. void RunPeeps();
  124. BasicBlock * AddBlock(IR::Instr * firstInstr, IR::Instr * lastInstr, BasicBlock * nextBlock);
  125. FlowEdge * AddEdge(BasicBlock * predBlock, BasicBlock * succBlock);
  126. BasicBlock * InsertCompensationCodeForBlockMove(FlowEdge * edge, // Edge where compensation code needs to be inserted
  127. bool insertCompensationBlockToLoopList = false,
  128. bool sinkBlockLoop = false // Loop to which compensation block belongs
  129. );
  130. BasicBlock * InsertAirlockBlock(FlowEdge * edge);
  131. void InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget);
  132. void RemoveUnreachableBlocks();
  133. bool RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt = nullptr);
  134. IR::Instr * RemoveInstr(IR::Instr *instr, GlobOpt * globOpt);
  135. void RemoveBlock(BasicBlock *block, GlobOpt * globOpt = nullptr, bool tailDuping = false);
  136. BasicBlock * SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr);
  137. Func* GetFunc() { return func;};
  138. static void SafeRemoveInstr(IR::Instr *instr);
  139. void SortLoopLists();
  140. FlowEdge * FindEdge(BasicBlock *predBlock, BasicBlock *succBlock);
  141. #if DBG_DUMP
  142. void Dump();
  143. void Dump(bool verbose, const wchar_t *form);
  144. #endif
  145. JitArenaAllocator * alloc;
  146. BasicBlock * blockList;
  147. BasicBlock * tailBlock;
  148. Loop * loopList;
  149. SList<IR::LabelInstr*> * catchLabelStack;
  150. bool hasBackwardPassInfo;
  151. bool hasLoop;
  152. Js::ImplicitCallFlags implicitCallFlags;
  153. private:
  154. void FindLoops(void);
  155. bool CanonicalizeLoops(void);
  156. void BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop = nullptr);
  157. void WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc);
  158. void AddBlockToLoop(BasicBlock *block, Loop *loop);
  159. void UpdateRegionForBlock(BasicBlock *block, Region **blockToRegion);
  160. Region * PropagateRegionFromPred(BasicBlock *block, BasicBlock *predBlock, Region *predRegion, IR::Instr * &tryInstr);
  161. IR::Instr * PeepCm(IR::Instr *instr);
  162. IR::Instr * PeepTypedCm(IR::Instr *instr);
  163. void MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock);
  164. bool UnsignedCmpPeep(IR::Instr *cmpInstr);
  165. bool IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1);
  166. #if DBG
  167. void VerifyLoopGraph();
  168. #endif
  169. private:
  170. void InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, uint32 origBranchSrcSymId);
  171. private:
  172. Func * func;
  173. unsigned int blockCount;
  174. };
  175. class BasicBlock
  176. {
  177. friend class FlowGraph;
  178. friend class Loop;
  179. public:
  180. static BasicBlock * New(FlowGraph * graph);
  181. void AddPred(FlowEdge * edge, FlowGraph * graph);
  182. void AddSucc(FlowEdge * edge, FlowGraph * graph);
  183. void RemovePred(BasicBlock *block, FlowGraph * graph);
  184. void RemoveSucc(BasicBlock *block, FlowGraph * graph);
  185. void RemoveDeadPred(BasicBlock *block, FlowGraph * graph);
  186. void RemoveDeadSucc(BasicBlock *block, FlowGraph * graph);
  187. void UnlinkPred(BasicBlock *block);
  188. void UnlinkSucc(BasicBlock *block);
  189. void UnlinkInstr(IR::Instr * Instr);
  190. void RemoveInstr(IR::Instr * instr);
  191. void InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr);
  192. void InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr);
  193. void InsertAfter(IR::Instr * newInstr);
  194. void InvertBranch(IR::BranchInstr *branch);
  195. IR::Instr * GetFirstInstr(void) const
  196. {
  197. return firstInstr;
  198. }
  199. void SetFirstInstr(IR::Instr * instr)
  200. {
  201. firstInstr = instr;
  202. }
  203. IR::Instr * GetLastInstr(void)
  204. {
  205. BasicBlock *blNext = this->next;
  206. if (blNext)
  207. {
  208. return blNext->firstInstr->m_prev;
  209. }
  210. else
  211. {
  212. return this->func->m_exitInstr;
  213. }
  214. }
  215. void SetLastInstr(IR::Instr * instr)
  216. {
  217. // Intentionally empty
  218. }
  219. SListBaseCounted<FlowEdge *> * GetPredList(void)
  220. {
  221. return &predList;
  222. }
  223. SListBaseCounted<FlowEdge *> * GetSuccList(void)
  224. {
  225. return &succList;
  226. }
  227. SListBaseCounted<FlowEdge *> * GetDeadPredList(void)
  228. {
  229. return &deadPredList;
  230. }
  231. SListBaseCounted<FlowEdge *> * GetDeadSuccList(void)
  232. {
  233. return &deadSuccList;
  234. }
  235. unsigned int GetBlockNum(void) const
  236. {
  237. return number;
  238. }
  239. void SetBlockNum(unsigned int num)
  240. {
  241. number = num;
  242. }
  243. BasicBlock * GetPrev()
  244. {
  245. BasicBlock *block = this;
  246. do {
  247. block = block->prev;
  248. } while (block->isDeleted);
  249. return block;
  250. }
  251. BasicBlock * GetNext()
  252. {
  253. BasicBlock *block = this;
  254. do {
  255. block = block->next;
  256. } while (block && block->isDeleted);
  257. return block;
  258. }
  259. uint IncrementDataUseCount()
  260. {
  261. return ++this->dataUseCount;
  262. }
  263. uint DecrementDataUseCount()
  264. {
  265. Assert(this->dataUseCount != 0);
  266. return --this->dataUseCount;
  267. }
  268. uint GetDataUseCount()
  269. {
  270. return this->dataUseCount;
  271. }
  272. void SetDataUseCount(uint count)
  273. {
  274. this->dataUseCount = count;
  275. }
  276. bool IsLandingPad();
  277. #if DBG_DUMP
  278. void DumpHeader(bool insertCR = true);
  279. void Dump();
  280. #endif
  281. public:
  282. BasicBlock * next;
  283. BasicBlock * prev;
  284. Loop * loop;
  285. uint8 isDeleted:1;
  286. uint8 isDead:1;
  287. uint8 isLoopHeader:1;
  288. uint8 hasCall:1;
  289. uint8 isVisited:1;
  290. uint8 isAirLockCompensationBlock:1;
  291. #ifdef DBG
  292. uint8 isBreakBlock:1;
  293. uint8 isAirLockBlock:1;
  294. uint8 isBreakCompensationBlockAtSink:1;
  295. uint8 isBreakCompensationBlockAtSource:1;
  296. #endif
  297. // Deadstore data
  298. BVSparse<JitArenaAllocator> * upwardExposedUses;
  299. BVSparse<JitArenaAllocator> * upwardExposedFields;
  300. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout;
  301. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  302. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates;
  303. TempNumberTracker * tempNumberTracker;
  304. TempObjectTracker * tempObjectTracker;
  305. #if DBG
  306. TempObjectVerifyTracker * tempObjectVerifyTracker;
  307. #endif
  308. HashTable<AddPropertyCacheBucket> * stackSymToFinalType;
  309. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties; // Dead store pass only
  310. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap; // Backward pass only
  311. BVSparse<JitArenaAllocator> * noImplicitCallUses;
  312. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses;
  313. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses;
  314. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses;
  315. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses;
  316. BVSparse<JitArenaAllocator> * cloneStrCandidates;
  317. Loop * backwardPassCurrentLoop;
  318. // Global optimizer data
  319. GlobOptBlockData globOptData;
  320. // Bailout data
  321. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed;
  322. #if DBG
  323. StackSym ** byteCodeRestoreSyms;
  324. #endif
  325. IntOverflowDoesNotMatterRange * intOverflowDoesNotMatterRange;
  326. private:
  327. BasicBlock(JitArenaAllocator * alloc, Func *func) :
  328. next(nullptr),
  329. prev(nullptr),
  330. firstInstr(nullptr),
  331. number(k_InvalidNum),
  332. loop(nullptr),
  333. isDeleted(false),
  334. isDead(false),
  335. isLoopHeader(false),
  336. hasCall(false),
  337. upwardExposedUses(nullptr),
  338. upwardExposedFields(nullptr),
  339. typesNeedingKnownObjectLayout(nullptr),
  340. slotDeadStoreCandidates(nullptr),
  341. tempNumberTracker(nullptr),
  342. tempObjectTracker(nullptr),
  343. #if DBG
  344. tempObjectVerifyTracker(nullptr),
  345. #endif
  346. stackSymToFinalType(nullptr),
  347. stackSymToGuardedProperties(nullptr),
  348. stackSymToWriteGuardsMap(nullptr),
  349. noImplicitCallUses(nullptr),
  350. noImplicitCallNoMissingValuesUses(nullptr),
  351. noImplicitCallNativeArrayUses(nullptr),
  352. noImplicitCallJsArrayHeadSegmentSymUses(nullptr),
  353. noImplicitCallArrayLengthSymUses(nullptr),
  354. cloneStrCandidates(nullptr),
  355. byteCodeUpwardExposedUsed(nullptr),
  356. isAirLockCompensationBlock(false),
  357. #if DBG
  358. byteCodeRestoreSyms(nullptr),
  359. isBreakBlock(false),
  360. isAirLockBlock(false),
  361. isBreakCompensationBlockAtSource(false),
  362. isBreakCompensationBlockAtSink(false),
  363. #endif
  364. fieldHoistCandidates(nullptr),
  365. dataUseCount(0),
  366. intOverflowDoesNotMatterRange(nullptr),
  367. func(func),
  368. globOptData(func)
  369. {
  370. }
  371. void RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead = false);
  372. void RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead = false);
  373. void UnlinkPred(BasicBlock *block, bool doCleanSucc);
  374. void UnlinkSucc(BasicBlock *block, bool doCleanPred);
  375. #if DBG_DUMP
  376. bool Contains(IR::Instr * instr);
  377. #endif
  378. private:
  379. IR::Instr * firstInstr;
  380. SListBaseCounted<FlowEdge *> predList;
  381. SListBaseCounted<FlowEdge *> succList;
  382. SListBaseCounted<FlowEdge *> deadPredList;
  383. SListBaseCounted<FlowEdge *> deadSuccList;
  384. Func * func;
  385. unsigned int number;
  386. uint dataUseCount;
  387. static const unsigned int k_InvalidNum = (unsigned)-1;
  388. };
  389. class FlowEdge
  390. {
  391. public:
  392. static FlowEdge * New(FlowGraph * graph);
  393. FlowEdge() :
  394. predBlock(nullptr),
  395. succBlock(nullptr),
  396. pathDependentInfo(nullptr)
  397. {
  398. }
  399. BasicBlock * GetPred(void) const
  400. {
  401. return predBlock;
  402. }
  403. void SetPred(BasicBlock * block)
  404. {
  405. predBlock = block;
  406. }
  407. BasicBlock * GetSucc(void) const
  408. {
  409. return succBlock;
  410. }
  411. void SetSucc(BasicBlock * block)
  412. {
  413. succBlock = block;
  414. }
  415. PathDependentInfo * GetPathDependentInfo() const
  416. {
  417. return pathDependentInfo;
  418. }
  419. void SetPathDependentInfo(const PathDependentInfo &info, JitArenaAllocator *const alloc)
  420. {
  421. Assert(info.HasInfo());
  422. if (!pathDependentInfo)
  423. {
  424. pathDependentInfo = JitAnew(alloc, PathDependentInfo, info);
  425. }
  426. else
  427. {
  428. *pathDependentInfo = info;
  429. }
  430. }
  431. void ClearPathDependentInfo(JitArenaAllocator * alloc)
  432. {
  433. JitAdelete(alloc, pathDependentInfo);
  434. pathDependentInfo = nullptr;
  435. }
  436. private:
  437. BasicBlock * predBlock;
  438. BasicBlock * succBlock;
  439. // Only valid during globopt
  440. PathDependentInfo * pathDependentInfo;
  441. };
  442. class Loop
  443. {
  444. friend FlowGraph;
  445. private:
  446. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator, PowerOf2SizePolicy> FieldHoistSymMap;
  447. typedef JsUtil::BaseDictionary<PropertySym *, Value *, JitArenaAllocator> InitialValueFieldMap;
  448. Js::ImplicitCallFlags implicitCallFlags;
  449. Js::LoopFlags loopFlags;
  450. BasicBlock * headBlock;
  451. public:
  452. Func * topFunc;
  453. uint32 loopNumber;
  454. SList<BasicBlock *> blockList;
  455. Loop * next;
  456. Loop * parent;
  457. BasicBlock * landingPad;
  458. IR::LabelInstr * loopTopLabel;
  459. BVSparse<JitArenaAllocator> *varSymsOnEntry;
  460. BVSparse<JitArenaAllocator> *int32SymsOnEntry;
  461. BVSparse<JitArenaAllocator> *lossyInt32SymsOnEntry; // see GlobOptData::liveLossyInt32Syms
  462. BVSparse<JitArenaAllocator> *float64SymsOnEntry;
  463. BVSparse<JitArenaAllocator> *liveFieldsOnEntry;
  464. // SIMD_JS
  465. // live syms upon entering loop header (from pred merge + forced syms + used before defs in loop)
  466. BVSparse<JitArenaAllocator> *simd128F4SymsOnEntry;
  467. BVSparse<JitArenaAllocator> *simd128I4SymsOnEntry;
  468. BVSparse<JitArenaAllocator> *symsUsedBeforeDefined; // stack syms that are live in the landing pad, and used before they are defined in the loop
  469. BVSparse<JitArenaAllocator> *likelyIntSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-int value, and used before they are defined in the loop
  470. BVSparse<JitArenaAllocator> *likelyNumberSymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-number value, and used before they are defined in the loop
  471. // SIMD_JS
  472. BVSparse<JitArenaAllocator> *likelySimd128F4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128F4 value, and used before they are defined in the loop
  473. BVSparse<JitArenaAllocator> *likelySimd128I4SymsUsedBeforeDefined; // stack syms that are live in the landing pad with a likely-Simd128I4 value, and used before they are defined in the loop
  474. BVSparse<JitArenaAllocator> *forceFloat64SymsOnEntry;
  475. // SIMD_JS
  476. // syms need to be forced to certain type due to hoisting
  477. BVSparse<JitArenaAllocator> *forceSimd128F4SymsOnEntry;
  478. BVSparse<JitArenaAllocator> *forceSimd128I4SymsOnEntry;
  479. BVSparse<JitArenaAllocator> *symsDefInLoop;
  480. BailOutInfo * bailOutInfo;
  481. IR::BailOutInstr * toPrimitiveSideEffectCheck;
  482. BVSparse<JitArenaAllocator> * fieldHoistCandidates;
  483. BVSparse<JitArenaAllocator> * liveInFieldHoistCandidates;
  484. BVSparse<JitArenaAllocator> * fieldHoistCandidateTypes;
  485. SListBase<IR::Instr *> prepassFieldHoistInstrCandidates;
  486. FieldHoistSymMap fieldHoistSymMap;
  487. IR::Instr * endDisableImplicitCall;
  488. BVSparse<JitArenaAllocator> * hoistedFields;
  489. BVSparse<JitArenaAllocator> * hoistedFieldCopySyms;
  490. BVSparse<JitArenaAllocator> * liveOutFields;
  491. ValueNumber firstValueNumberInLoop;
  492. JsArrayKills jsArrayKills;
  493. BVSparse<JitArenaAllocator> *fieldKilled;
  494. BVSparse<JitArenaAllocator> *fieldPRESymStore;
  495. InitialValueFieldMap initialValueFieldMap;
  496. InductionVariableSet *inductionVariables;
  497. BasicBlock *dominatingLoopCountableBlock;
  498. LoopCount *loopCount;
  499. SymIdToStackSymMap *loopCountBasedBoundBaseSyms;
  500. bool isDead : 1;
  501. bool hasDeadStoreCollectionPass : 1;
  502. bool hasDeadStorePrepass : 1;
  503. bool hasCall : 1;
  504. bool hasHoistedFields : 1;
  505. bool needImplicitCallBailoutChecksForJsArrayCheckHoist : 1;
  506. bool allFieldsKilled : 1;
  507. bool isLeaf : 1;
  508. bool isProcessed : 1; // Set and reset at varying places according to the phase we're in.
  509. // For example, in the lowerer, it'll be set to true when we process the loopTop for a certain loop
  510. struct MemCopyCandidate;
  511. struct MemSetCandidate;
  512. struct MemOpCandidate
  513. {
  514. SymID base;
  515. SymID index;
  516. byte count;
  517. bool bIndexAlreadyChanged;
  518. enum MemOpType
  519. {
  520. MEMSET,
  521. MEMCOPY
  522. } type;
  523. bool IsMemSet() const { return type == MEMSET; }
  524. bool IsMemCopy() const { return type == MEMCOPY; }
  525. struct Loop::MemCopyCandidate* AsMemCopy();
  526. struct Loop::MemSetCandidate* AsMemSet();
  527. MemOpCandidate(MemOpType type) :
  528. type(type)
  529. {
  530. }
  531. };
  532. struct MemSetCandidate : public MemOpCandidate
  533. {
  534. BailoutConstantValue constant;
  535. StackSym* varSym;
  536. MemSetCandidate() : MemOpCandidate(MemOpCandidate::MEMSET), varSym(nullptr) {}
  537. };
  538. struct MemCopyCandidate : public MemOpCandidate
  539. {
  540. SymID ldBase;
  541. StackSym* transferSym;
  542. byte ldCount;
  543. MemCopyCandidate() : MemOpCandidate(MemOpCandidate::MEMCOPY) {}
  544. };
  545. #define FOREACH_MEMOP_CANDIDATES_EDITING(data, loop, iterator) FOREACH_SLISTCOUNTED_ENTRY_EDITING(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates, iterator)
  546. #define NEXT_MEMOP_CANDIDATE_EDITING NEXT_SLISTCOUNTED_ENTRY_EDITING
  547. #define FOREACH_MEMOP_CANDIDATES(data, loop) FOREACH_SLISTCOUNTED_ENTRY(Loop::MemOpCandidate*, data, loop->memOpInfo->candidates)
  548. #define NEXT_MEMOP_CANDIDATE NEXT_SLISTCOUNTED_ENTRY
  549. #define MEMOP_CANDIDATE_TYPE_CHECK(candidate, data, type) if(candidate->Is ## type()) {Loop:: ## type ## Candidate* data = candidate->As## type();
  550. #define FOREACH_MEMCOPY_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  551. #define NEXT_MEMCOPY_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  552. #define FOREACH_MEMCOPY_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemCopy)
  553. #define NEXT_MEMCOPY_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  554. #define FOREACH_MEMSET_CANDIDATES_EDITING(data, loop, iterator) {FOREACH_MEMOP_CANDIDATES_EDITING(_memopCandidate, loop, iterator) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  555. #define NEXT_MEMSET_CANDIDATE_EDITING }}NEXT_MEMOP_CANDIDATE_EDITING}
  556. #define FOREACH_MEMSET_CANDIDATES(data, loop) {FOREACH_MEMOP_CANDIDATES(_memopCandidate, loop) {MEMOP_CANDIDATE_TYPE_CHECK(_memopCandidate, data, MemSet)
  557. #define NEXT_MEMSET_CANDIDATE }}NEXT_MEMOP_CANDIDATE}
  558. typedef struct
  559. {
  560. byte unroll : 7;
  561. byte isIncremental : 1;
  562. } InductionVariableChangeInfo;
  563. typedef JsUtil::BaseDictionary<SymID, InductionVariableChangeInfo, JitArenaAllocator> InductionVariableChangeInfoMap;
  564. typedef JsUtil::BaseDictionary<byte, IR::Opnd*, JitArenaAllocator> InductionVariableOpndPerUnrollMap;
  565. typedef SListCounted<MemOpCandidate *> MemOpList;
  566. typedef struct
  567. {
  568. MemOpList *candidates;
  569. bool doMemOp : 1;
  570. BVSparse<JitArenaAllocator> *inductionVariablesUsedAfterLoop;
  571. InductionVariableChangeInfoMap *inductionVariableChangeInfoMap;
  572. InductionVariableOpndPerUnrollMap *inductionVariableOpndPerUnrollMap;
  573. // This assumes that all memop operation use the same index and has the same length
  574. // Temporary map to reuse existing startIndexOpnd while emiting
  575. // 0 = !increment & !alreadyChanged, 1 = !increment & alreadyChanged, 2 = increment & !alreadyChanged, 3 = increment & alreadyChanged
  576. IR::RegOpnd* startIndexOpndCache[4];
  577. } MemOpInfo;
  578. MemOpInfo *memOpInfo;
  579. struct RegAlloc
  580. {
  581. Lifetime ** loopTopRegContent; // Save off the state of the registers at the loop top
  582. BVSparse<JitArenaAllocator> * symRegUseBv; // If a lifetime was live in a reg into the loop, did the reg get used before being spilled?
  583. BVSparse<JitArenaAllocator> * defdInLoopBv; // Was a lifetime defined in the loop?
  584. BVSparse<JitArenaAllocator> * liveOnBackEdgeSyms; // Is a lifetime live on the back-edge of the loop?
  585. BitVector regUseBv; // Registers used in this loop so far
  586. uint32 loopStart; // loopTopLabel->GetNumber()
  587. uint32 loopEnd; // loopTailBranch->GetNumber()
  588. uint32 helperLength; // Number of instrs in helper code in loop
  589. SList<Lifetime *> * extendedLifetime; // Lifetimes to extend for this loop
  590. SList<Lifetime **> * exitRegContentList; // Linked list of regContents for the exit edges
  591. bool hasNonOpHelperCall;
  592. bool hasCall;
  593. bool hasAirLock; // Do back-edges have airlock blocks?
  594. } regAlloc;
  595. public:
  596. Loop(JitArenaAllocator * alloc, Func *func)
  597. : topFunc(func),
  598. blockList(alloc),
  599. parent(nullptr),
  600. landingPad(nullptr),
  601. loopTopLabel(nullptr),
  602. symsUsedBeforeDefined(nullptr),
  603. likelyIntSymsUsedBeforeDefined(nullptr),
  604. likelyNumberSymsUsedBeforeDefined(nullptr),
  605. likelySimd128F4SymsUsedBeforeDefined(nullptr),
  606. likelySimd128I4SymsUsedBeforeDefined(nullptr),
  607. forceFloat64SymsOnEntry(nullptr),
  608. forceSimd128F4SymsOnEntry(nullptr),
  609. forceSimd128I4SymsOnEntry(nullptr),
  610. symsDefInLoop(nullptr),
  611. fieldHoistCandidateTypes(nullptr),
  612. fieldHoistSymMap(alloc),
  613. needImplicitCallBailoutChecksForJsArrayCheckHoist(false),
  614. inductionVariables(nullptr),
  615. dominatingLoopCountableBlock(nullptr),
  616. loopCount(nullptr),
  617. loopCountBasedBoundBaseSyms(nullptr),
  618. isDead(false),
  619. allFieldsKilled(false),
  620. isLeaf(true),
  621. isProcessed(false),
  622. initialValueFieldMap(alloc)
  623. {
  624. this->loopNumber = ++func->loopCount;
  625. }
  626. void SetHeadBlock(BasicBlock *block) { headBlock = block; }
  627. BasicBlock * GetHeadBlock() const { Assert(headBlock == blockList.Head()); return headBlock; }
  628. bool IsDescendentOrSelf(Loop const * loop) const;
  629. bool EnsureMemOpVariablesInitialized();
  630. Js::ImplicitCallFlags GetImplicitCallFlags();
  631. void SetImplicitCallFlags(Js::ImplicitCallFlags flags);
  632. Js::LoopFlags GetLoopFlags() const { return loopFlags; }
  633. void SetLoopFlags(Js::LoopFlags val) { loopFlags = val; }
  634. bool CanHoistInvariants();
  635. bool CanDoFieldCopyProp();
  636. bool CanDoFieldHoist();
  637. void SetHasCall();
  638. IR::LabelInstr * GetLoopTopInstr() const;
  639. void SetLoopTopInstr(IR::LabelInstr * loopTop);
  640. Func * GetFunc() const { return GetLoopTopInstr()->m_func; }
  641. #if DBG_DUMP
  642. bool GetHasCall() const { return hasCall; }
  643. uint GetLoopNumber() const;
  644. #endif
  645. private:
  646. void InsertLandingPad(FlowGraph *fg);
  647. bool RemoveBreakBlocks(FlowGraph *fg);
  648. };
  649. // Structure definition cannot be inside Loop in order to use it as a parameter in GlobOpt
  650. struct MemOpEmitData
  651. {
  652. Loop::MemOpCandidate* candidate;
  653. IR::Instr* stElemInstr;
  654. BasicBlock* block;
  655. Loop::InductionVariableChangeInfo inductionVar;
  656. IR::BailOutKind bailOutKind;
  657. };
  658. struct MemSetEmitData : public MemOpEmitData
  659. {
  660. };
  661. struct MemCopyEmitData : public MemOpEmitData
  662. {
  663. IR::Instr* ldElemInstr;
  664. };
  665. #define FOREACH_BLOCK_IN_FUNC(block, func)\
  666. FOREACH_BLOCK(block, func->m_fg)
  667. #define NEXT_BLOCK_IN_FUNC\
  668. NEXT_BLOCK;
  669. #define FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, func)\
  670. FOREACH_BLOCK_DEAD_OR_ALIVE(block, func->m_fg)
  671. #define NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE\
  672. NEXT_BLOCK_DEAD_OR_ALIVE;
  673. #define FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func) \
  674. FOREACH_BLOCK_BACKWARD(block, func->m_fg)
  675. #define NEXT_BLOCK_BACKWARD_IN_FUNC \
  676. NEXT_BLOCK_BACKWARD;
  677. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, func) \
  678. FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, func->m_fg)
  679. #define NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE \
  680. NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE;
  681. #define FOREACH_BLOCK_IN_FUNC_EDITING(block, func)\
  682. FOREACH_BLOCK_EDITING(block, func->m_fg)
  683. #define NEXT_BLOCK_IN_FUNC_EDITING\
  684. NEXT_BLOCK_EDITING;
  685. #define FOREACH_BLOCK_BACKWARD_IN_FUNC_EDITING(block, func)\
  686. FOREACH_BLOCK_BACKWARD_EDITING(block, func->m_fg)
  687. #define NEXT_BLOCK_BACKWARD_IN_FUNC_EDITING\
  688. NEXT_BLOCK_BACKWARD_EDITING;
  689. #define FOREACH_BLOCK_ALL(block, graph) \
  690. for (BasicBlock *block = graph->blockList;\
  691. block != nullptr;\
  692. block = block->next)\
  693. {
  694. #define NEXT_BLOCK_ALL \
  695. }
  696. #define FOREACH_BLOCK(block, graph)\
  697. FOREACH_BLOCK_ALL(block, graph) \
  698. if (block->isDeleted) { continue; }
  699. #define NEXT_BLOCK \
  700. NEXT_BLOCK_ALL
  701. #define FOREACH_BLOCK_DEAD_OR_ALIVE(block, graph)\
  702. FOREACH_BLOCK_ALL(block, graph) \
  703. if (block->isDeleted && !block->isDead) { continue; }
  704. #define NEXT_BLOCK_DEAD_OR_ALIVE \
  705. NEXT_BLOCK_ALL
  706. #define FOREACH_BLOCK_BACKWARD(block, graph)\
  707. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, graph->tailBlock, nullptr)
  708. #define NEXT_BLOCK_BACKWARD \
  709. NEXT_BLOCK_BACKWARD_IN_RANGE
  710. #define FOREACH_BLOCK_BACKWARD_DEAD_OR_ALIVE(block, graph)\
  711. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, graph->tailBlock, nullptr)
  712. #define NEXT_BLOCK_BACKWARD_DEAD_OR_ALIVE \
  713. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE
  714. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast)\
  715. {\
  716. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  717. for (BasicBlock *block = blockList;\
  718. block != blockStop;\
  719. block = block->prev)\
  720. {
  721. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL \
  722. }}
  723. #define FOREACH_BLOCK_BACKWARD_IN_RANGE(block, blockList, blockLast) \
  724. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  725. if (block->isDeleted) { continue; }
  726. #define NEXT_BLOCK_BACKWARD_IN_RANGE \
  727. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  728. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev)\
  729. {\
  730. BasicBlock *blockPrev;\
  731. BasicBlock * blockStop = blockLast? ((BasicBlock *)blockLast)->prev : nullptr; \
  732. for (BasicBlock *block = blockList;\
  733. block != blockStop;\
  734. block = blockPrev)\
  735. {\
  736. blockPrev = block->prev;
  737. #define NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING \
  738. }}
  739. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(block, blockList, blockLast, blockPrev) \
  740. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING(block, blockList, blockLast, blockPrev) \
  741. if (block->isDeleted) { continue; }
  742. #define NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING \
  743. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL_EDITING
  744. #define FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, blockList, blockLast) \
  745. FOREACH_BLOCK_BACKWARD_IN_RANGE_ALL(block, blockList, blockLast) \
  746. if (block->isDeleted && !block->isDead) { continue; }
  747. #define NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE \
  748. NEXT_BLOCK_BACKWARD_IN_RANGE_ALL
  749. #define FOREACH_BLOCK_EDITING(block, graph)\
  750. {\
  751. BasicBlock *blockNext;\
  752. for (BasicBlock *block = graph->blockList;\
  753. block != nullptr;\
  754. block = blockNext)\
  755. {\
  756. blockNext = block->next; \
  757. if (block->isDeleted) { continue; }
  758. #define NEXT_BLOCK_EDITING \
  759. }}
  760. #define FOREACH_BLOCK_BACKWARD_EDITING(block, graph)\
  761. {\
  762. BasicBlock *blockPrev;\
  763. for (BasicBlock *block = graph->tailBlock;\
  764. block != nullptr;\
  765. block = blockPrev)\
  766. {\
  767. blockPrev = block->prev; \
  768. if (block->isDeleted) { continue; }
  769. #define NEXT_BLOCK_BACKWARD_EDITING \
  770. }}
  771. #define FOREACH_BLOCK_IN_LIST(block, list)\
  772. FOREACH_SLIST_ENTRY(BasicBlock*, block, list)\
  773. {\
  774. if (block->isDeleted) { continue; }
  775. #define NEXT_BLOCK_IN_LIST \
  776. NEXT_SLIST_ENTRY \
  777. }
  778. #define FOREACH_BLOCK_IN_LIST_EDITING(block, list, iter)\
  779. FOREACH_SLIST_ENTRY_EDITING(BasicBlock*, block, list, iter)\
  780. {\
  781. if (block->isDeleted) { continue; }
  782. #define NEXT_BLOCK_IN_LIST_EDITING \
  783. NEXT_SLIST_ENTRY_EDITING \
  784. }
  785. #define FOREACH_SUCCESSOR_EDGE(edge, block)\
  786. FOREACH_EDGE_IN_LIST(edge, block->GetSuccList())
  787. #define NEXT_SUCCESSOR_EDGE\
  788. NEXT_EDGE_IN_LIST
  789. #define FOREACH_SUCCESSOR_EDGE_EDITING(edge, bloc, iter)\
  790. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetSuccList(), iter)
  791. #define NEXT_SUCCESSOR_EDGE_EDITING\
  792. NEXT_EDGE_IN_LIST_EDITING
  793. #define FOREACH_PREDECESSOR_EDGE(edge, block)\
  794. FOREACH_EDGE_IN_LIST(edge, block->GetPredList())
  795. #define NEXT_PREDECESSOR_EDGE\
  796. NEXT_EDGE_IN_LIST
  797. #define FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)\
  798. FOREACH_EDGE_IN_LIST_EDITING(edge, block->GetPredList(), iter)
  799. #define NEXT_PREDECESSOR_EDGE_EDITING\
  800. NEXT_EDGE_IN_LIST_EDITING
  801. #define FOREACH_EDGE_IN_LIST(edge, list)\
  802. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, list)\
  803. {
  804. #define NEXT_EDGE_IN_LIST\
  805. NEXT_SLISTBASECOUNTED_ENTRY }
  806. #define FOREACH_EDGE_IN_LIST_EDITING(edge, list, iter)\
  807. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, list, iter)\
  808. {\
  809. #define NEXT_EDGE_IN_LIST_EDITING\
  810. NEXT_SLISTBASECOUNTED_ENTRY_EDITING }
  811. #define FOREACH_SUCCESSOR_BLOCK(blockSucc, block)\
  812. FOREACH_EDGE_IN_LIST(__edge, block->GetSuccList())\
  813. {\
  814. BasicBlock * blockSucc = __edge->GetSucc(); \
  815. AnalysisAssert(blockSucc);
  816. #define NEXT_SUCCESSOR_BLOCK\
  817. }\
  818. NEXT_EDGE_IN_LIST
  819. #define FOREACH_SUCCESSOR_BLOCK_EDITING(blockSucc, block, iter)\
  820. FOREACH_EDGE_IN_LIST_EDITING(__edge, block->GetSuccList(), iter)\
  821. {\
  822. BasicBlock * blockSucc = __edge->GetSucc(); \
  823. AnalysisAssert(blockSucc);
  824. #define NEXT_SUCCESSOR_BLOCK_EDITING\
  825. }\
  826. NEXT_EDGE_IN_LIST_EDITING
  827. #define FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, block)\
  828. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadSuccList())\
  829. {\
  830. BasicBlock * blockSucc = __edge->GetSucc(); \
  831. AnalysisAssert(blockSucc);
  832. #define NEXT_DEAD_SUCCESSOR_BLOCK\
  833. }\
  834. NEXT_EDGE_IN_LIST
  835. #define FOREACH_PREDECESSOR_BLOCK(blockPred, block)\
  836. FOREACH_EDGE_IN_LIST(__edge, block->GetPredList())\
  837. {\
  838. BasicBlock * blockPred = __edge->GetPred(); \
  839. AnalysisAssert(blockPred);
  840. #define NEXT_PREDECESSOR_BLOCK\
  841. }\
  842. NEXT_EDGE_IN_LIST
  843. #define FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, block)\
  844. FOREACH_EDGE_IN_LIST(__edge, block->GetDeadPredList())\
  845. {\
  846. BasicBlock * blockPred = __edge->GetPred(); \
  847. AnalysisAssert(blockPred);
  848. #define NEXT_DEAD_PREDECESSOR_BLOCK\
  849. }\
  850. NEXT_EDGE_IN_LIST
  851. #define FOREACH_BLOCK_IN_LOOP(block, loop)\
  852. FOREACH_BLOCK_IN_LIST(block, &loop->blockList)
  853. #define NEXT_BLOCK_IN_LOOP \
  854. NEXT_BLOCK_IN_LIST
  855. #define FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)\
  856. FOREACH_BLOCK_IN_LIST_EDITING(block, &loop->blockList, iter)
  857. #define NEXT_BLOCK_IN_LOOP_EDITING \
  858. NEXT_BLOCK_IN_LIST_EDITING
  859. #define FOREACH_LOOP_IN_FUNC_EDITING(loop, func)\
  860. FOREACH_LOOP_EDITING(loop, func->m_fg)
  861. #define NEXT_LOOP_IN_FUNC_EDITING\
  862. NEXT_LOOP_EDITING;
  863. #define FOREACH_LOOP_EDITING(loop, graph)\
  864. {\
  865. Loop* loopNext;\
  866. for (Loop* loop = graph->loopList;\
  867. loop != nullptr;\
  868. loop = loopNext)\
  869. {\
  870. loopNext = loop->next;
  871. #define NEXT_LOOP_EDITING \
  872. }}