2
0

GlobOpt.h 62 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. class BackwardPass;
  6. class LoopCount;
  7. class GlobOpt;
  8. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  9. #define PRINT_GOPT_TRACE_HEADER \
  10. Output::Print(_u("TRACE ")); \
  11. if (this->IsLoopPrePass()) \
  12. { \
  13. Output::Print(_u("[%d, %d]"), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  14. } \
  15. Output::Print(_u(": ")); \
  16. #define PRINT_VALUENUMBER_TRACE_HEADER \
  17. Output::Print(_u("VALUE NUMBERING TRACE ")); \
  18. if (this->IsLoopPrePass()) \
  19. { \
  20. Output::Print(_u("[%d, %d]"), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  21. } \
  22. Output::Print(_u(": ")); \
  23. #define GOPT_TRACE_VALUENUMBER(opndHeader, opnd, ...) \
  24. if (PHASE_TRACE(Js::ValueNumberingPhase, this->func)) \
  25. { \
  26. PRINT_VALUENUMBER_TRACE_HEADER; \
  27. Output::Print(opndHeader); \
  28. opnd->Dump(IRDumpFlags_None, this->func); \
  29. Output::Print(_u(" : ")); \
  30. Output::Print(__VA_ARGS__); \
  31. Output::Print(_u("\n")); \
  32. Output::Flush(); \
  33. }
  34. #define GOPT_TRACE_OPND(opnd, ...) \
  35. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  36. { \
  37. PRINT_GOPT_TRACE_HEADER; \
  38. opnd->Dump(); \
  39. Output::Print(_u(" : ")); \
  40. Output::Print(__VA_ARGS__); \
  41. Output::Flush(); \
  42. }
  43. #define GOPT_TRACE(...) \
  44. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  45. { \
  46. PRINT_GOPT_TRACE_HEADER; \
  47. Output::Print(__VA_ARGS__); \
  48. Output::Flush(); \
  49. }
  50. #define GOPT_TRACE_INSTRTRACE(instr) \
  51. if (PHASE_TRACE(Js::GlobOptPhase, this->func) || PHASE_TRACE(Js::ValueNumberingPhase, this->func)) \
  52. { \
  53. if (this->IsLoopPrePass()) \
  54. { \
  55. Output::Print(_u("[%d, %d]: "), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  56. } \
  57. instr->Dump(); \
  58. Output::Flush(); \
  59. }
  60. #define GOPT_TRACE_INSTR(instr, ...) \
  61. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  62. { \
  63. PRINT_GOPT_TRACE_HEADER; \
  64. Output::Print(__VA_ARGS__); \
  65. instr->Dump(); \
  66. Output::Flush(); \
  67. }
  68. #define GOPT_TRACE_BLOCK(block, before) \
  69. this->Trace(block, before); \
  70. Output::Flush();
  71. // TODO: OOP JIT, add back line number
  72. #define TRACE_PHASE_INSTR(phase, instr, ...) \
  73. if(PHASE_TRACE(phase, this->func)) \
  74. { \
  75. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  76. Output::Print( \
  77. _u("Function %s (%s)"), \
  78. this->func->GetJITFunctionBody()->GetDisplayName(), \
  79. this->func->GetDebugNumberSet(debugStringBuffer)); \
  80. if(this->func->IsLoopBody()) \
  81. { \
  82. Output::Print(_u(", loop %u"), this->func->GetWorkItem()->GetLoopNumber()); \
  83. } \
  84. if(instr->m_func != this->func) \
  85. { \
  86. Output::Print( \
  87. _u(", Inlinee %s (%s)"), \
  88. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  89. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  90. } \
  91. Output::Print(_u(" - %s\n "), Js::PhaseNames[phase]); \
  92. instr->Dump(); \
  93. Output::Print(_u(" ")); \
  94. Output::Print(__VA_ARGS__); \
  95. Output::Flush(); \
  96. }
  97. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...) \
  98. if(CONFIG_FLAG(Verbose)) \
  99. { \
  100. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  101. }
  102. #else // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  103. #define GOPT_TRACE(...)
  104. #define GOPT_TRACE_VALUENUMBER(opnd, ...)
  105. #define GOPT_TRACE_OPND(opnd, ...)
  106. #define GOPT_TRACE_INSTRTRACE(instr)
  107. #define GOPT_TRACE_INSTR(instr, ...)
  108. #define GOPT_TRACE_BLOCK(block, before)
  109. #define TRACE_PHASE_INSTR(phase, instr, ...)
  110. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...)
  111. #endif // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  112. class IntMathExprAttributes : public ExprAttributes
  113. {
  114. private:
  115. static const uint IgnoredIntOverflowIndex = 0;
  116. static const uint IgnoredNegativeZeroIndex = 1;
  117. public:
  118. IntMathExprAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  119. {
  120. }
  121. IntMathExprAttributes(const bool ignoredIntOverflow, const bool ignoredNegativeZero)
  122. {
  123. SetBitAttribute(IgnoredIntOverflowIndex, ignoredIntOverflow);
  124. SetBitAttribute(IgnoredNegativeZeroIndex, ignoredNegativeZero);
  125. }
  126. };
  127. class ConvAttributes : public ExprAttributes
  128. {
  129. private:
  130. static const uint DstUnsignedIndex = 0;
  131. static const uint SrcUnsignedIndex = 1;
  132. public:
  133. ConvAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  134. {
  135. }
  136. ConvAttributes(const bool isDstUnsigned, const bool isSrcUnsigned)
  137. {
  138. SetBitAttribute(DstUnsignedIndex, isDstUnsigned);
  139. SetBitAttribute(SrcUnsignedIndex, isSrcUnsigned);
  140. }
  141. };
  142. class DstIsIntOrNumberAttributes : public ExprAttributes
  143. {
  144. private:
  145. static const uint DstIsIntOnlyIndex = 0;
  146. static const uint DstIsNumberOnlyIndex = 1;
  147. public:
  148. DstIsIntOrNumberAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  149. {
  150. }
  151. DstIsIntOrNumberAttributes(const bool dstIsIntOnly, const bool dstIsNumberOnly)
  152. {
  153. SetBitAttribute(DstIsIntOnlyIndex, dstIsIntOnly);
  154. SetBitAttribute(DstIsNumberOnlyIndex, dstIsNumberOnly);
  155. }
  156. };
  157. enum class PathDependentRelationship : uint8
  158. {
  159. Equal,
  160. NotEqual,
  161. GreaterThanOrEqual,
  162. GreaterThan,
  163. LessThanOrEqual,
  164. LessThan
  165. };
  166. class PathDependentInfo
  167. {
  168. private:
  169. Value *leftValue, *rightValue;
  170. int32 rightConstantValue;
  171. PathDependentRelationship relationship;
  172. public:
  173. PathDependentInfo(const PathDependentRelationship relationship, Value *const leftValue, Value *const rightValue)
  174. : leftValue(leftValue), rightValue(rightValue), rightConstantValue(0), relationship(relationship)
  175. {
  176. Assert(leftValue);
  177. Assert(rightValue);
  178. }
  179. PathDependentInfo(
  180. const PathDependentRelationship relationship,
  181. Value *const leftValue,
  182. Value *const rightValue,
  183. const int32 rightConstantValue)
  184. : leftValue(leftValue), rightValue(rightValue), rightConstantValue(rightConstantValue), relationship(relationship)
  185. {
  186. Assert(leftValue);
  187. }
  188. public:
  189. bool HasInfo() const
  190. {
  191. return !!leftValue;
  192. }
  193. PathDependentRelationship Relationship() const
  194. {
  195. Assert(HasInfo());
  196. return relationship;
  197. }
  198. Value *LeftValue() const
  199. {
  200. Assert(HasInfo());
  201. return leftValue;
  202. }
  203. Value *RightValue() const
  204. {
  205. Assert(HasInfo());
  206. return rightValue;
  207. }
  208. int32 RightConstantValue() const
  209. {
  210. Assert(!RightValue());
  211. return rightConstantValue;
  212. }
  213. };
  214. class PathDependentInfoToRestore
  215. {
  216. private:
  217. ValueInfo *leftValueInfo, *rightValueInfo;
  218. public:
  219. PathDependentInfoToRestore() : leftValueInfo(nullptr), rightValueInfo(nullptr)
  220. {
  221. }
  222. PathDependentInfoToRestore(ValueInfo *const leftValueInfo, ValueInfo *const rightValueInfo)
  223. : leftValueInfo(leftValueInfo), rightValueInfo(rightValueInfo)
  224. {
  225. }
  226. public:
  227. ValueInfo *LeftValueInfo() const
  228. {
  229. return leftValueInfo;
  230. }
  231. ValueInfo *RightValueInfo() const
  232. {
  233. return rightValueInfo;
  234. }
  235. public:
  236. void Clear()
  237. {
  238. leftValueInfo = nullptr;
  239. rightValueInfo = nullptr;
  240. }
  241. };
  242. typedef JsUtil::List<IR::Opnd *, JitArenaAllocator> OpndList;
  243. typedef JsUtil::BaseDictionary<Sym *, ValueInfo *, JitArenaAllocator> SymToValueInfoMap;
  244. typedef JsUtil::BaseDictionary<SymID, IR::Instr *, JitArenaAllocator> SymIdToInstrMap;
  245. typedef JsUtil::BaseHashSet<Value *, JitArenaAllocator, PowerOf2SizePolicy, ValueNumber> ValueSetByValueNumber;
  246. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator> SymIdToStackSymMap;
  247. typedef JsUtil::Pair<ValueNumber, ValueNumber> ValueNumberPair;
  248. typedef JsUtil::BaseDictionary<ValueNumberPair, Value *, JitArenaAllocator> ValueNumberPairToValueMap;
  249. namespace JsUtil
  250. {
  251. template <>
  252. inline void ClearValue<StackLiteralInitFldData>::Clear(StackLiteralInitFldData* value)
  253. {
  254. #if DBG
  255. value->propIds = nullptr;
  256. value->currentInitFldCount = (uint)-1;
  257. #endif
  258. }
  259. }
  260. typedef JsUtil::BaseDictionary<IntConstType, StackSym *, JitArenaAllocator> IntConstantToStackSymMap;
  261. typedef JsUtil::BaseDictionary<int32, Value *, JitArenaAllocator> IntConstantToValueMap;
  262. typedef JsUtil::BaseDictionary<int64, Value *, JitArenaAllocator> Int64ConstantToValueMap;
  263. typedef JsUtil::BaseDictionary<Js::Var, Value *, JitArenaAllocator> AddrConstantToValueMap;
  264. typedef JsUtil::BaseDictionary<Js::InternalString, Value *, JitArenaAllocator> StringConstantToValueMap;
  265. class JsArrayKills
  266. {
  267. private:
  268. union
  269. {
  270. struct
  271. {
  272. bool killsAllArrays : 1;
  273. bool killsArraysWithNoMissingValues : 1;
  274. bool killsNativeArrays : 1;
  275. bool killsArrayHeadSegments : 1;
  276. bool killsArrayHeadSegmentLengths : 1;
  277. bool killsArrayLengths : 1;
  278. };
  279. byte bits;
  280. };
  281. public:
  282. JsArrayKills() : bits(0)
  283. {
  284. }
  285. private:
  286. JsArrayKills(const byte bits) : bits(bits)
  287. {
  288. }
  289. public:
  290. bool KillsAllArrays() const { return killsAllArrays; }
  291. void SetKillsAllArrays() { killsAllArrays = true; }
  292. bool KillsArraysWithNoMissingValues() const { return killsArraysWithNoMissingValues; }
  293. void SetKillsArraysWithNoMissingValues() { killsArraysWithNoMissingValues = true; }
  294. bool KillsNativeArrays() const { return killsNativeArrays; }
  295. void SetKillsNativeArrays() { killsNativeArrays = true; }
  296. bool KillsArrayHeadSegments() const { return killsArrayHeadSegments; }
  297. void SetKillsArrayHeadSegments() { killsArrayHeadSegments = true; }
  298. bool KillsArrayHeadSegmentLengths() const { return killsArrayHeadSegmentLengths; }
  299. void SetKillsArrayHeadSegmentLengths() { killsArrayHeadSegmentLengths = true; }
  300. bool KillsTypedArrayHeadSegmentLengths() const { return KillsAllArrays(); }
  301. bool KillsArrayLengths() const { return killsArrayLengths; }
  302. void SetKillsArrayLengths() { killsArrayLengths = true; }
  303. public:
  304. bool KillsValueType(const ValueType valueType) const
  305. {
  306. Assert(valueType.IsArrayOrObjectWithArray() || valueType.IsOptimizedVirtualTypedArray());
  307. return
  308. killsAllArrays ||
  309. (valueType.IsArrayOrObjectWithArray() &&
  310. (
  311. (killsArraysWithNoMissingValues && valueType.HasNoMissingValues()) ||
  312. (killsNativeArrays && !valueType.HasVarElements())
  313. )
  314. );
  315. }
  316. bool AreSubsetOf(const JsArrayKills &other) const
  317. {
  318. return (bits & other.bits) == bits;
  319. }
  320. JsArrayKills Merge(const JsArrayKills &other)
  321. {
  322. return bits | other.bits;
  323. }
  324. };
  325. class InvariantBlockBackwardIterator
  326. {
  327. private:
  328. GlobOpt *const globOpt;
  329. BasicBlock *const exclusiveEndBlock;
  330. StackSym *const invariantSym;
  331. const ValueNumber invariantSymValueNumber;
  332. BasicBlock *block;
  333. Value *invariantSymValue;
  334. BVSparse<JitArenaAllocator> blockBV;
  335. bool followFlow;
  336. #if DBG
  337. BasicBlock *const inclusiveEndBlock;
  338. #endif
  339. bool UpdatePredBlockBV();
  340. public:
  341. InvariantBlockBackwardIterator(GlobOpt *const globOpt, BasicBlock *const exclusiveBeginBlock, BasicBlock *const inclusiveEndBlock, StackSym *const invariantSym, const ValueNumber invariantSymValueNumber = InvalidValueNumber, bool followFlow = false);
  342. public:
  343. bool IsValid() const;
  344. void MoveNext();
  345. BasicBlock *Block() const;
  346. Value *InvariantSymValue() const;
  347. PREVENT_ASSIGN(InvariantBlockBackwardIterator);
  348. };
  349. class FlowGraph;
  350. class GlobOpt
  351. {
  352. private:
  353. class AddSubConstantInfo;
  354. class ArrayLowerBoundCheckHoistInfo;
  355. class ArrayUpperBoundCheckHoistInfo;
  356. class ArraySrcOpt;
  357. class PRE;
  358. friend BackwardPass;
  359. #if DBG
  360. friend class ObjectTempVerify;
  361. #endif
  362. friend class GlobOptBlockData;
  363. friend class BasicBlock;
  364. private:
  365. SparseArray<Value> * byteCodeConstantValueArray;
  366. // Global bitvectors
  367. BVSparse<JitArenaAllocator> * byteCodeConstantValueNumbersBv;
  368. // Global bitvectors
  369. IntConstantToStackSymMap * intConstantToStackSymMap;
  370. IntConstantToValueMap* intConstantToValueMap;
  371. Int64ConstantToValueMap* int64ConstantToValueMap;
  372. AddrConstantToValueMap * addrConstantToValueMap;
  373. StringConstantToValueMap * stringConstantToValueMap;
  374. #if DBG
  375. // We can still track the finished stack literal InitFld lexically.
  376. BVSparse<JitArenaAllocator> * finishedStackLiteralInitFld;
  377. #endif
  378. BVSparse<JitArenaAllocator> * byteCodeUses;
  379. BVSparse<JitArenaAllocator> * tempBv; // Bit vector for temporary uses
  380. BVSparse<JitArenaAllocator> * objectTypeSyms;
  381. BVSparse<JitArenaAllocator> * prePassCopyPropSym; // Symbols that were copy prop'd during loop prepass
  382. // Symbols that refer to slots in the stack frame. We still use currentBlock->liveFields to tell us
  383. // which of these slots are live; this bit-vector just identifies which entries in liveFields represent
  384. // slots, so we can zero them all out quickly.
  385. BVSparse<JitArenaAllocator> * slotSyms;
  386. PropertySym * propertySymUse;
  387. BVSparse<JitArenaAllocator> * lengthEquivBv;
  388. BVSparse<JitArenaAllocator> * argumentsEquivBv;
  389. BVSparse<JitArenaAllocator> * callerEquivBv;
  390. BVSparse<JitArenaAllocator> * changedSymsAfterIncBailoutCandidate;
  391. BVSparse<JitArenaAllocator> * auxSlotPtrSyms;
  392. JitArenaAllocator * alloc;
  393. JitArenaAllocator * tempAlloc;
  394. Func * func;
  395. ValueNumber currentValue;
  396. BasicBlock * currentBlock;
  397. Region * currentRegion;
  398. IntOverflowDoesNotMatterRange *intOverflowDoesNotMatterRange;
  399. Loop * prePassLoop;
  400. Loop * rootLoopPrePass;
  401. uint instrCountSinceLastCleanUp;
  402. SymIdToInstrMap * prePassInstrMap;
  403. SymID maxInitialSymID;
  404. bool isCallHelper: 1;
  405. bool intOverflowCurrentlyMattersInRange : 1;
  406. bool ignoredIntOverflowForCurrentInstr : 1;
  407. bool ignoredNegativeZeroForCurrentInstr : 1;
  408. bool inInlinedBuiltIn : 1;
  409. bool isRecursiveCallOnLandingPad : 1;
  410. bool updateInductionVariableValueNumber : 1;
  411. bool isPerformingLoopBackEdgeCompensation : 1;
  412. bool doTypeSpec : 1;
  413. bool doAggressiveIntTypeSpec : 1;
  414. bool doAggressiveMulIntTypeSpec : 1;
  415. bool doDivIntTypeSpec : 1;
  416. bool doLossyIntTypeSpec : 1;
  417. bool doFloatTypeSpec : 1;
  418. bool doArrayCheckHoist : 1;
  419. bool doArrayMissingValueCheckHoist : 1;
  420. bool doArraySegmentHoist : 1;
  421. bool doJsArraySegmentHoist : 1;
  422. bool doArrayLengthHoist : 1;
  423. bool doEliminateArrayAccessHelperCall : 1;
  424. bool doTrackRelativeIntBounds : 1;
  425. bool doBoundCheckElimination : 1;
  426. bool doBoundCheckHoist : 1;
  427. bool doLoopCountBasedBoundCheckHoist : 1;
  428. bool doPowIntIntTypeSpec : 1;
  429. bool isAsmJSFunc : 1;
  430. bool doTagChecks : 1;
  431. OpndList * noImplicitCallUsesToInsert;
  432. ValueSetByValueNumber * valuesCreatedForClone;
  433. ValueNumberPairToValueMap *valuesCreatedForMerge;
  434. #if DBG
  435. BVSparse<JitArenaAllocator> * byteCodeUsesBeforeOpt;
  436. #endif
  437. public:
  438. GlobOpt(Func * func);
  439. void Optimize();
  440. // Function used by the backward pass as well.
  441. // GlobOptBailout.cpp
  442. static void TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym);
  443. // GlobOptFields.cpp
  444. void ProcessFieldKills(IR::Instr *instr, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt);
  445. IR::ByteCodeUsesInstr * ConvertToByteCodeUses(IR::Instr * isntr);
  446. bool GetIsAsmJSFunc()const{ return isAsmJSFunc; };
  447. bool IsLoopPrePass() const { return this->prePassLoop != nullptr; }
  448. private:
  449. void OptBlock(BasicBlock *block);
  450. void BackwardPass(Js::Phase tag);
  451. void ForwardPass();
  452. void OptLoops(Loop *loop);
  453. void TailDupPass();
  454. bool TryTailDup(IR::BranchInstr *tailBranch);
  455. void FieldPRE(Loop *loop);
  456. void SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym);
  457. PRECandidates * FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc);
  458. void CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock);
  459. void CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  460. void TryReplaceLdLen(IR::Instr *& instr);
  461. IR::Instr * OptInstr(IR::Instr *&instr, bool* isInstrCleared);
  462. Value* OptDst(IR::Instr **pInstr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  463. void CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val);
  464. Value * OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef = nullptr, IR::IndirOpnd *parentIndirOpnd = nullptr);
  465. void MarkArgumentsUsedForBranch(IR::Instr *inst);
  466. bool OptTagChecks(IR::Instr *instr);
  467. void TryOptimizeInstrWithFixedDataProperty(IR::Instr * * const pInstr);
  468. bool CheckIfPropOpEmitsTypeCheck(IR::Instr *instr, IR::PropertySymOpnd *opnd);
  469. IR::PropertySymOpnd * CreateOpndForTypeCheckOnly(IR::PropertySymOpnd* opnd, Func* func);
  470. bool FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock* block = nullptr, bool updateExistingValue = false, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr);
  471. IR::Instr * SetTypeCheckBailOut(IR::Opnd *opnd, IR::Instr *instr, BailOutInfo *bailOutInfo);
  472. void OptArguments(IR::Instr *Instr);
  473. void TrackInstrsForScopeObjectRemoval(IR::Instr * instr);
  474. bool AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const;
  475. Value * ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val);
  476. Value * ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal);
  477. ValueType GetPrepassValueTypeForDst(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool const isValueInfoPreciseRef = false) const;
  478. bool IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue, bool * canTransferValueNumberToDst = nullptr) const;
  479. bool IsPrepassSrcValueInfoPrecise(IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool * canTransferValueNumberToDst = nullptr) const;
  480. bool IsSafeToTransferInPrepass(StackSym * const sym, ValueInfo *const srcValueInfo) const;
  481. bool SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const copySym, Value *const value) const;
  482. Value * CreateDstUntransferredIntValue(const int32 min, const int32 max, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  483. Value * CreateDstUntransferredValue(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  484. Value * ValueNumberTransferDst(IR::Instr *const instr, Value *src1Val);
  485. bool IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue);
  486. Value * ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val);
  487. IR::Opnd * CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd = nullptr);
  488. IR::Opnd * CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd = nullptr);
  489. ValueNumber NewValueNumber();
  490. Value * NewValue(ValueInfo *const valueInfo);
  491. Value * NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo);
  492. Value * CopyValue(Value const *const value);
  493. Value * CopyValue(Value const *const value, const ValueNumber valueNumber);
  494. Value * NewGenericValue(const ValueType valueType);
  495. Value * NewGenericValue(const ValueType valueType, IR::Opnd *const opnd);
  496. Value * NewGenericValue(const ValueType valueType, Sym *const sym);
  497. Value * GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  498. Value * GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  499. Value * NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable);
  500. Value * NewInt64ConstantValue(const int64 intConst, IR::Instr * instr);
  501. ValueInfo * NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout);
  502. ValueInfo * NewIntRangeValueInfo(const ValueInfo *const originalValueInfo, const int32 min, const int32 max) const;
  503. Value * NewIntRangeValue(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  504. IntBoundedValueInfo * NewIntBoundedValueInfo(const ValueInfo *const originalValueInfo, const IntBounds *const bounds) const;
  505. Value * NewIntBoundedValue(const ValueType valueType, const IntBounds *const bounds, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  506. Value * NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd = nullptr);
  507. Value * GetVarConstantValue(IR::AddrOpnd *addrOpnd);
  508. Value * NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString);
  509. Value * HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value);
  510. Value * NewFixedFunctionValue(Js::JavascriptFunction *functionValue, IR::AddrOpnd *addrOpnd);
  511. StackSym * GetTaggedIntConstantStackSym(const int32 intConstantValue) const;
  512. StackSym * GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const;
  513. Sym * SetSymStore(ValueInfo *valueInfo, Sym *sym);
  514. void SetSymStoreDirect(ValueInfo *valueInfo, Sym *sym);
  515. IR::Instr * TypeSpecialization(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  516. IR::Instr * GetExtendedArg(IR::Instr *instr);
  517. void OptNewScObject(IR::Instr** instrPtr, Value* srcVal);
  518. template <typename T>
  519. bool OptConstFoldBinaryWasm(IR::Instr * *pInstr, const Value* src1, const Value* src2, Value **pDstVal);
  520. template <typename T>
  521. IR::Opnd* ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal);
  522. bool OptConstFoldBinary(IR::Instr * *pInstr, const IntConstantBounds &src1IntConstantBounds, const IntConstantBounds &src2IntConstantBounds, Value **pDstVal);
  523. bool OptConstFoldUnary(IR::Instr * *pInstr, const int32 intConstantValue, const bool isUsingOriginalSrc1Value, Value **pDstVal);
  524. bool OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *vInfo);
  525. bool CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2Val, Js::Var src1Var, Js::Var src2Var, bool *result);
  526. bool OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal);
  527. Js::Var GetConstantVar(IR::Opnd *opnd, Value *val);
  528. bool IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val);
  529. bool IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst);
  530. bool IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val = nullptr);
  531. bool TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal);
  532. bool TypeSpecializeIntUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, int32 min, int32 max, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool skipDst = false);
  533. bool TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst = false);
  534. void TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef);
  535. void TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal);
  536. void TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal);
  537. bool TypeSpecializeUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  538. bool TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef);
  539. bool TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst = false);
  540. bool TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal);
  541. void TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal);
  542. bool TypeSpecializeLdLen(IR::Instr * *const instrRef, Value * *const src1ValueRef, Value * *const dstValueRef, bool *const forceInvariantHoistingRef);
  543. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  544. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  545. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  546. bool TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal);
  547. bool TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal);
  548. bool TypeSpecializeStElem(IR::Instr **pInstr, Value *src1Val, Value **pDstVal);
  549. bool ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd);
  550. ValueType GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize);
  551. bool IsInstrInvalidForMemOp(IR::Instr *, Loop *, Value *, Value *);
  552. bool CollectMemOpInfo(IR::Instr *, IR::Instr *, Value *, Value *);
  553. bool CollectMemOpStElementI(IR::Instr *, Loop *);
  554. bool CollectMemsetStElementI(IR::Instr *, Loop *);
  555. bool CollectMemcopyStElementI(IR::Instr *, Loop *);
  556. bool CollectMemOpLdElementI(IR::Instr *, Loop *);
  557. bool CollectMemcopyLdElementI(IR::Instr *, Loop *);
  558. SymID GetVarSymID(StackSym *);
  559. const InductionVariable* GetInductionVariable(SymID, Loop *);
  560. bool IsSymIDInductionVariable(SymID, Loop *);
  561. bool IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd);
  562. void ProcessMemOp();
  563. bool InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, struct MemSetEmitData* emitData, bool& errorInInstr);
  564. bool InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, struct MemCopyEmitData* emitData, bool& errorInInstr);
  565. bool ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) struct MemOpEmitData** emitData, int& iEmitData);
  566. void EmitMemop(Loop * loop, LoopCount *loopCount, const struct MemOpEmitData* emitData);
  567. IR::Opnd* GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr = nullptr);
  568. IR::RegOpnd* GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr = nullptr);
  569. LoopCount* GetOrGenerateLoopCountForMemOp(Loop *loop);
  570. IR::Instr* FindUpperBoundsCheckInstr(IR::Instr* instr);
  571. IR::Instr* FindArraySegmentLoadInstr(IR::Instr* instr);
  572. void RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block);
  573. void GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType);
  574. bool HasMemOp(Loop * loop);
  575. private:
  576. void ChangeValueType(BasicBlock *const block, Value *const value, const ValueType newValueType, const bool preserveSubclassInfo, const bool allowIncompatibleType = false) const;
  577. void ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType = false, const bool compensated = false) const;
  578. bool AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const;
  579. private:
  580. #if DBG
  581. void VerifyArrayValueInfoForTracking(const ValueInfo *const valueInfo, const bool isJsArray, const BasicBlock *const block, const bool ignoreKnownImplicitCalls = false) const;
  582. #endif
  583. void TrackNewValueForKills(Value *const value);
  584. void DoTrackNewValueForKills(Value *const value);
  585. void TrackCopiedValueForKills(Value *const value);
  586. void DoTrackCopiedValueForKills(Value *const value);
  587. void TrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  588. void DoTrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  589. void TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const;
  590. void ProcessValueKills(IR::Instr *const instr);
  591. void ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData);
  592. void ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData);
  593. bool NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const;
  594. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func);
  595. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func* func);
  596. IR::Instr* AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset);
  597. void OptArraySrc(IR::Instr **const instrRef, Value ** src1Val, Value ** src2Val);
  598. void OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val);
  599. private:
  600. void TrackIntSpecializedAddSubConstant(IR::Instr *const instr, const AddSubConstantInfo *const addSubConstantInfo, Value *const dstValue, const bool updateSourceBounds);
  601. void CloneBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  602. void MergeBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  603. void DetectUnknownChangesToInductionVariables(GlobOptBlockData *const blockData);
  604. void SetInductionVariableValueNumbers(GlobOptBlockData *const blockData);
  605. void FinalizeInductionVariables(Loop *const loop, GlobOptBlockData *const headerData);
  606. void InvalidateInductionVariables(IR::Instr * instr);
  607. enum class SymBoundType {OFFSET, VALUE, UNKNOWN};
  608. SymBoundType DetermineSymBoundOffsetOrValueRelativeToLandingPad(StackSym *const sym, const bool landingPadValueIsLowerBound, ValueInfo *const valueInfo, const IntBounds *const bounds, GlobOptBlockData *const landingPadGlobOptBlockData, int *const boundOffsetOrValueRef);
  609. private:
  610. void DetermineDominatingLoopCountableBlock(Loop *const loop, BasicBlock *const headerBlock);
  611. void DetermineLoopCount(Loop *const loop);
  612. void GenerateLoopCount(Loop *const loop, LoopCount *const loopCount);
  613. void GenerateLoopCountPlusOne(Loop *const loop, LoopCount *const loopCount);
  614. void GenerateSecondaryInductionVariableBound(Loop *const loop, StackSym *const inductionVariableSym, LoopCount *const loopCount, const int maxMagnitudeChange, const bool needsMagnitudeAdjustment, StackSym *const boundSym);
  615. private:
  616. void DetermineArrayBoundCheckHoistability(bool needLowerBoundCheck, bool needUpperBoundCheck, ArrayLowerBoundCheckHoistInfo &lowerHoistInfo, ArrayUpperBoundCheckHoistInfo &upperHoistInfo, const bool isJsArray, StackSym *const indexSym, Value *const indexValue, const IntConstantBounds &indexConstantBounds, StackSym *const headSegmentLengthSym, Value *const headSegmentLengthValue, const IntConstantBounds &headSegmentLengthConstantBounds, Loop *const headSegmentLengthInvariantLoop, bool &failedToUpdateCompatibleLowerBoundCheck, bool &failedToUpdateCompatibleUpperBoundCheck);
  617. private:
  618. void CaptureNoImplicitCallUses(IR::Opnd *opnd, const bool usesNoMissingValuesInfo, IR::Instr *const includeCurrentInstr = nullptr);
  619. void InsertNoImplicitCallUses(IR::Instr *const instr);
  620. void PrepareLoopArrayCheckHoist();
  621. public:
  622. JsArrayKills CheckJsArrayKills(IR::Instr *const instr);
  623. GlobOptBlockData const * CurrentBlockData() const;
  624. GlobOptBlockData * CurrentBlockData();
  625. void CommitCapturedValuesCandidate();
  626. private:
  627. bool IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr);
  628. bool NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const;
  629. private:
  630. bool PrepareForIgnoringIntOverflow(IR::Instr *const instr);
  631. void VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr);
  632. void PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val);
  633. void ProcessKills(IR::Instr *instr);
  634. void InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  635. void InsertValueCompensation(BasicBlock *const predecessor, const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap);
  636. IR::Instr * ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val);
  637. void ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
  638. IR::Instr * ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *val, bool needsUpdate);
  639. void ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr = nullptr);
  640. void ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
  641. void ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind = IR::BailOutInvalid, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  642. IR::Instr * ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy);
  643. IR::Instr * ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind);
  644. IR::Instr * ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir,
  645. IRType toType, IR::BailOutKind bailOutKind, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  646. IR::Instr * ToTypeSpecIndex(IR::Instr * instr, IR::RegOpnd * opnd, IR::IndirOpnd * indir);
  647. void ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block);
  648. void ToVarStackSym(StackSym *varSym, BasicBlock *block);
  649. void ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  650. void ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  651. void ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  652. void OptConstFoldBr(bool test, IR::Instr *instr, Value * intTypeSpecSrc1Val = nullptr, Value * intTypeSpecSrc2Val = nullptr);
  653. void PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32 * pNewMax);
  654. void PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  655. int32 min2, int32 max2, int32 *pNewMin, int32 *pNewMax);
  656. bool OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives);
  657. bool OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef = nullptr);
  658. bool OptDstIsInvariant(IR::RegOpnd *dst);
  659. bool OptIsInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv, const bool forceInvariantHoisting = false);
  660. void OptHoistInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *dstVal, Value *const src1Val, Value *const src2Value,
  661. bool isNotTypeSpecConv, bool lossy = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  662. bool TryHoistInvariant(IR::Instr *instr, BasicBlock *block, Value *dstVal, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv,
  663. const bool lossy = false, const bool forceInvariantHoisting = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  664. void HoistInvariantValueInfo(ValueInfo *const invariantValueInfoToHoist, Value *const valueToUpdate, BasicBlock *const targetBlock);
  665. void OptHoistUpdateValueType(Loop* loop, IR::Instr* instr, IR::Opnd** srcOpndPtr, Value *const srcVal);
  666. public:
  667. static bool IsTypeSpecPhaseOff(Func const * func);
  668. static bool DoAggressiveIntTypeSpec(Func const * func);
  669. static bool DoLossyIntTypeSpec(Func const * func);
  670. static bool DoFloatTypeSpec(Func const * func);
  671. static bool DoStringTypeSpec(Func const * func);
  672. static bool DoArrayCheckHoist(Func const * const func);
  673. static bool DoArrayMissingValueCheckHoist(Func const * const func);
  674. static bool DoArraySegmentHoist(const ValueType baseValueType, Func const * const func);
  675. static bool DoArrayLengthHoist(Func const * const func);
  676. static bool DoEliminateArrayAccessHelperCall(Func* func);
  677. static bool DoTypedArrayTypeSpec(Func const * func);
  678. static bool DoNativeArrayTypeSpec(Func const * func);
  679. static bool IsSwitchOptEnabled(Func const * func);
  680. static bool IsSwitchOptEnabledForIntTypeSpec(Func const * func);
  681. static bool DoInlineArgsOpt(Func const * func);
  682. static bool IsPREInstrCandidateLoad(Js::OpCode opcode);
  683. static bool IsPREInstrSequenceCandidateLoad(Js::OpCode opcode);
  684. static bool IsPREInstrCandidateStore(Js::OpCode opcode);
  685. static bool ImplicitCallFlagsAllowOpts(Loop * loop);
  686. static bool ImplicitCallFlagsAllowOpts(Func const * func);
  687. private:
  688. bool DoConstFold() const;
  689. bool DoTypeSpec() const;
  690. bool DoAggressiveIntTypeSpec() const;
  691. bool DoAggressiveMulIntTypeSpec() const;
  692. bool DoDivIntTypeSpec() const;
  693. bool DoLossyIntTypeSpec() const;
  694. bool DoFloatTypeSpec() const;
  695. bool DoStringTypeSpec() const { return GlobOpt::DoStringTypeSpec(this->func); }
  696. bool DoArrayCheckHoist() const;
  697. bool DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr = nullptr) const;
  698. bool DoArrayMissingValueCheckHoist() const;
  699. bool DoArraySegmentHoist(const ValueType baseValueType) const;
  700. bool DoTypedArraySegmentLengthHoist(Loop *const loop) const;
  701. bool DoArrayLengthHoist() const;
  702. bool DoEliminateArrayAccessHelperCall() const;
  703. bool DoTypedArrayTypeSpec() const { return GlobOpt::DoTypedArrayTypeSpec(this->func); }
  704. bool DoNativeArrayTypeSpec() const { return GlobOpt::DoNativeArrayTypeSpec(this->func); }
  705. bool DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType);
  706. bool IsSwitchOptEnabled() const { return GlobOpt::IsSwitchOptEnabled(this->func); }
  707. bool IsSwitchOptEnabledForIntTypeSpec() const { return GlobOpt::IsSwitchOptEnabledForIntTypeSpec(this->func); }
  708. bool DoPathDependentValues() const;
  709. bool DoTrackRelativeIntBounds() const;
  710. bool DoBoundCheckElimination() const;
  711. bool DoBoundCheckHoist() const;
  712. bool DoLoopCountBasedBoundCheckHoist() const;
  713. bool DoPowIntIntTypeSpec() const;
  714. bool DoTagChecks() const;
  715. template <class Fn>
  716. void TrackByteCodeUsesForInstrAddedInOptInstr(IR::Instr * trackByteCodeUseOnInstr, Fn fn)
  717. {
  718. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  719. PropertySym * currentPropertySymUse = this->propertySymUse;
  720. PropertySym * tempPropertySymUse = NULL;
  721. this->byteCodeUses = NULL;
  722. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  723. #if DBG
  724. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  725. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  726. #endif
  727. this->propertySymUse = NULL;
  728. GlobOpt::TrackByteCodeSymUsed(trackByteCodeUseOnInstr, tempByteCodeUse, &tempPropertySymUse);
  729. fn();
  730. this->byteCodeUses = currentBytecodeUses;
  731. this->propertySymUse = currentPropertySymUse;
  732. #if DBG
  733. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  734. #endif
  735. }
  736. private:
  737. // GlobOptBailout.cpp
  738. bool MayNeedBailOut(Loop * loop) const;
  739. static void TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySymUse);
  740. static void TrackByteCodeSymUsed(IR::RegOpnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  741. static void TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  742. void CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture);
  743. void CaptureValuesFromScratch(
  744. BasicBlock * block,
  745. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter, SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter,
  746. BVSparse<JitArenaAllocator>* argsToCapture);
  747. void CaptureValuesIncremental(
  748. BasicBlock * block,
  749. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  750. SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter, BVSparse<JitArenaAllocator>* argsToCapture);
  751. void CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter);
  752. void CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator);
  753. void CaptureByteCodeSymUses(IR::Instr * instr);
  754. IR::ByteCodeUsesInstr * InsertByteCodeUses(IR::Instr * instr, bool includeDef = false);
  755. void ProcessInlineeEnd(IR::Instr * instr);
  756. void TrackCalls(IR::Instr * instr);
  757. void RecordInlineeFrameInfo(IR::Instr* instr);
  758. void EndTrackCall(IR::Instr * instr);
  759. void EndTrackingOfArgObjSymsForInlinee();
  760. void FillBailOutInfo(BasicBlock *block, BailOutInfo *bailOutInfo);
  761. void FillBailOutInfo(BasicBlock *block, _In_ IR::Instr * instr);
  762. static void MarkNonByteCodeUsed(IR::Instr * instr);
  763. static void MarkNonByteCodeUsed(IR::Opnd * opnd);
  764. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const;
  765. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const;
  766. static bool IsTypeCheckProtected(const IR::Instr * instr);
  767. static bool MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val);
  768. static bool MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val);
  769. void GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind);
  770. public:
  771. void GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind);
  772. private:
  773. IR::Instr * EnsureBailTarget(Loop * loop);
  774. // GlobOptFields.cpp
  775. void ProcessFieldKills(IR::Instr * instr);
  776. void KillLiveFields(StackSym * stackSym, BVSparse<JitArenaAllocator> * bv);
  777. void KillLiveFields(PropertySym * propertySym, BVSparse<JitArenaAllocator> * bv);
  778. void KillLiveFields(BVSparse<JitArenaAllocator> *const fieldsToKill, BVSparse<JitArenaAllocator> *const bv) const;
  779. void KillLiveElems(IR::IndirOpnd * indirOpnd, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt, Func *func);
  780. void KillAllFields(BVSparse<JitArenaAllocator> * bv);
  781. void SetAnyPropertyMayBeWrittenTo();
  782. void AddToPropertiesWrittenTo(Js::PropertyId propertyId);
  783. bool DoFieldCopyProp() const;
  784. bool DoFieldCopyProp(Loop * loop) const;
  785. bool DoFunctionFieldCopyProp() const;
  786. bool DoObjTypeSpec() const;
  787. bool DoObjTypeSpec(Loop * loop) const;
  788. bool DoFieldRefOpts() const { return DoObjTypeSpec(); }
  789. bool DoFieldRefOpts(Loop * loop) const { return DoObjTypeSpec(loop); }
  790. bool DoFieldOpts(Loop * loop) const;
  791. bool DoFieldPRE() const;
  792. bool DoFieldPRE(Loop *loop) const;
  793. Value * CreateFieldSrcValue(PropertySym * sym, PropertySym * originalSym, IR::Opnd **ppOpnd, IR::Instr * instr);
  794. bool NeedBailOnImplicitCallWithFieldOpts(Loop *loop, bool hasLiveFields) const;
  795. IR::Instr * EnsureDisableImplicitCallRegion(Loop * loop);
  796. void UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr);
  797. bool TrackArgumentsObject();
  798. void CannotAllocateArgumentsObjectOnStack(Func * curFunc);
  799. #if DBG
  800. bool IsPropertySymId(SymID symId) const;
  801. static void AssertCanCopyPropOrCSEFieldLoad(IR::Instr * instr);
  802. void EmitIntRangeChecks(IR::Instr* instr);
  803. void EmitIntRangeChecks(IR::Instr* instr, IR::Opnd* opnd);
  804. #endif
  805. StackSym * EnsureObjectTypeSym(StackSym * objectSym);
  806. PropertySym * EnsurePropertyWriteGuardSym(PropertySym * propertySym);
  807. void PreparePropertySymForTypeCheckSeq(PropertySym *propertySym);
  808. bool IsPropertySymPreparedForTypeCheckSeq(PropertySym *propertySym);
  809. bool PreparePropertySymOpndForTypeCheckSeq(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instr, Loop *loop);
  810. static bool AreTypeSetsIdentical(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  811. static bool IsSubsetOf(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  812. static bool CompareCurrentTypesWithExpectedTypes(JsTypeValueInfo *valueInfo, IR::PropertySymOpnd * propertySymOpnd);
  813. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  814. bool CheckIfInstrInTypeCheckSeqEmitsTypeCheck(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  815. template<bool makeChanges>
  816. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd, BasicBlock* block, bool updateExistingValue, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr, bool *isObjTypeChecked = nullptr);
  817. StackSym * EnsureAuxSlotPtrSym(IR::PropertySymOpnd *opnd);
  818. void KillAuxSlotPtrSyms(IR::PropertySymOpnd *opnd, BasicBlock *block, bool isObjTypeSpecialized);
  819. template<class Fn>
  820. bool MapObjectHeaderInlinedTypeSymsUntil(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId, Fn fn);
  821. void KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid);
  822. bool HasLiveObjectHeaderInlinedTypeSym(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid);
  823. void ValueNumberObjectType(IR::Opnd *dstOpnd, IR::Instr *instr);
  824. void SetSingleTypeOnObjectTypeValue(Value* value, const JITTypeHolder type);
  825. void SetTypeSetOnObjectTypeValue(Value* value, Js::EquivalentTypeSet* typeSet);
  826. void UpdateObjectTypeValue(Value* value, const JITTypeHolder type, bool setType, Js::EquivalentTypeSet* typeSet, bool setTypeSet);
  827. void SetObjectTypeFromTypeSym(StackSym *typeSym, Value* value, BasicBlock* block = nullptr);
  828. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, BasicBlock* block = nullptr, bool updateExistingValue = false);
  829. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, GlobOptBlockData *blockData, bool updateExistingValue = false);
  830. void KillObjectType(StackSym *objectSym, BVSparse<JitArenaAllocator>* liveFields = nullptr);
  831. void KillAllObjectTypes(BVSparse<JitArenaAllocator>* liveFields = nullptr);
  832. void EndFieldLifetime(IR::SymOpnd *symOpnd);
  833. PropertySym * CopyPropPropertySymObj(IR::SymOpnd *opnd, IR::Instr *instr);
  834. static bool NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind);
  835. IR::Instr * PreOptPeep(IR::Instr *instr);
  836. IR::Instr * OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val);
  837. void OptimizeIndirUses(IR::IndirOpnd *indir, IR::Instr * *pInstr, Value **indirIndexValRef);
  838. void RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr);
  839. void ProcessTryHandler(IR::Instr* instr);
  840. bool ProcessExceptionHandlingEdges(IR::Instr* instr);
  841. void InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd);
  842. void RemoveFlowEdgeToCatchBlock(IR::Instr * instr);
  843. bool RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr);
  844. void CSEAddInstr(BasicBlock *block, IR::Instr *instr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  845. void OptimizeChecks(IR::Instr * const instr);
  846. bool CSEOptimize(BasicBlock *block, IR::Instr * *const instrRef, Value **pSrc1Val, Value **pSrc2Val, Value **pSrc1IndirIndexVal, bool intMathExprOnly = false);
  847. bool GetHash(IR::Instr *instr, Value *src1Val, Value *src2Val, ExprAttributes exprAttributes, ExprHash *pHash);
  848. void ProcessArrayValueKills(IR::Instr *instr);
  849. static bool NeedBailOnImplicitCallForCSE(BasicBlock const *block, bool isForwardPass);
  850. bool DoCSE();
  851. bool CanCSEArrayStore(IR::Instr *instr);
  852. #if DBG_DUMP
  853. void Dump() const;
  854. void DumpSymToValueMap() const;
  855. void DumpSymToValueMap(BasicBlock const * block) const;
  856. void DumpSymVal(int index);
  857. void Trace(BasicBlock * basicBlock, bool before) const;
  858. void TraceSettings() const;
  859. #endif
  860. bool IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const;
  861. bool TryOptConstFoldBrFalse(IR::Instr *const instr, Value *const srcValue, const int32 min, const int32 max);
  862. bool TryOptConstFoldBrEqual(IR::Instr *const instr, const bool branchOnEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  863. bool TryOptConstFoldBrGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  864. bool TryOptConstFoldBrGreaterThanOrEqual(IR::Instr *const instr, const bool branchOnGreaterThanOrEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  865. bool TryOptConstFoldBrUnsignedLessThan(IR::Instr *const instr, const bool branchOnLessThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  866. bool TryOptConstFoldBrUnsignedGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  867. void UpdateIntBoundsForEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  868. void UpdateIntBoundsForNotEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  869. void UpdateIntBoundsForGreaterThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  870. void UpdateIntBoundsForGreaterThanBranch(Value *const src1Value, Value *const src2Value);
  871. void UpdateIntBoundsForLessThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  872. void UpdateIntBoundsForLessThanBranch(Value *const src1Value, Value *const src2Value);
  873. IntBounds * GetIntBoundsToUpdate(const ValueInfo *const valueInfo, const IntConstantBounds &constantBounds, const bool isSettingNewBound, const bool isBoundConstant, const bool isSettingUpperBound, const bool isExplicit);
  874. ValueInfo * UpdateIntBoundsForEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  875. ValueInfo * UpdateIntBoundsForNotEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  876. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  877. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  878. ValueInfo * UpdateIntBoundsForGreaterThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  879. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  880. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  881. ValueInfo * UpdateIntBoundsForLessThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  882. void SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info);
  883. PathDependentInfoToRestore UpdatePathDependentInfo(PathDependentInfo *const info);
  884. void RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore);
  885. IR::Instr * TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrEnd);
  886. void TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd);
  887. IR::Instr * GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst);
  888. friend class InvariantBlockBackwardIterator;
  889. };
  890. class GlobOpt::PRE
  891. {
  892. public:
  893. PRE(GlobOpt * globOpt) : globOpt(globOpt) {}
  894. void FieldPRE(Loop *loop);
  895. private:
  896. void FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc);
  897. void PreloadPRECandidates(Loop *loop);
  898. BOOL PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate);
  899. IR::Instr * InsertPropertySymPreloadInLandingPad(IR::Instr * origLdInstr, Loop * loop, PropertySym * propertySym);
  900. void InsertInstrInLandingPad(IR::Instr * instr, Loop * loop);
  901. bool InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym ** objPtrCopyPropSym);
  902. void MakePropertySymLiveOnBackEdges(PropertySym * propertySym, Loop * loop, Value * valueToAdd);
  903. void RemoveOverlyOptimisticInitialValues(Loop * loop);
  904. #if DBG_DUMP
  905. void TraceFailedPreloadInLandingPad(const Loop *const loop, PropertySym * propSym, const char16* reason) const;
  906. #endif
  907. private:
  908. GlobOpt * globOpt;
  909. PRECandidates * candidates;
  910. };