GlobOpt.h 61 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. class BackwardPass;
  6. class LoopCount;
  7. class GlobOpt;
  8. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  9. #define GOPT_TRACE_OPND(opnd, ...) \
  10. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  11. { \
  12. Output::Print(_u("TRACE: ")); \
  13. opnd->Dump(); \
  14. Output::Print(_u(" : ")); \
  15. Output::Print(__VA_ARGS__); \
  16. Output::Flush(); \
  17. }
  18. #define GOPT_TRACE(...) \
  19. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  20. { \
  21. Output::Print(_u("TRACE: ")); \
  22. Output::Print(__VA_ARGS__); \
  23. Output::Flush(); \
  24. }
  25. #define GOPT_TRACE_INSTRTRACE(instr) \
  26. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  27. { \
  28. instr->Dump(); \
  29. Output::Flush(); \
  30. }
  31. #define GOPT_TRACE_INSTR(instr, ...) \
  32. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  33. { \
  34. Output::Print(_u("TRACE: ")); \
  35. Output::Print(__VA_ARGS__); \
  36. instr->Dump(); \
  37. Output::Flush(); \
  38. }
  39. #define GOPT_TRACE_BLOCK(block, before) \
  40. this->Trace(block, before); \
  41. Output::Flush();
  42. // TODO: OOP JIT, add back line number
  43. #define TRACE_PHASE_INSTR(phase, instr, ...) \
  44. if(PHASE_TRACE(phase, this->func)) \
  45. { \
  46. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  47. Output::Print( \
  48. _u("Function %s (%s)"), \
  49. this->func->GetJITFunctionBody()->GetDisplayName(), \
  50. this->func->GetDebugNumberSet(debugStringBuffer)); \
  51. if(this->func->IsLoopBody()) \
  52. { \
  53. Output::Print(_u(", loop %u"), this->func->GetWorkItem()->GetLoopNumber()); \
  54. } \
  55. if(instr->m_func != this->func) \
  56. { \
  57. Output::Print( \
  58. _u(", Inlinee %s (%s)"), \
  59. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  60. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  61. } \
  62. Output::Print(_u(" - %s\n "), Js::PhaseNames[phase]); \
  63. instr->Dump(); \
  64. Output::Print(_u(" ")); \
  65. Output::Print(__VA_ARGS__); \
  66. Output::Flush(); \
  67. }
  68. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...) \
  69. if(CONFIG_FLAG(Verbose)) \
  70. { \
  71. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  72. }
  73. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  74. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  75. TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  76. #else // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  77. #define GOPT_TRACE(...)
  78. #define GOPT_TRACE_OPND(opnd, ...)
  79. #define GOPT_TRACE_INSTRTRACE(instr)
  80. #define GOPT_TRACE_INSTR(instr, ...)
  81. #define GOPT_TRACE_BLOCK(block, before)
  82. #define TRACE_PHASE_INSTR(phase, instr, ...)
  83. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...)
  84. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  85. #endif // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  86. class IntMathExprAttributes : public ExprAttributes
  87. {
  88. private:
  89. static const uint IgnoredIntOverflowIndex = 0;
  90. static const uint IgnoredNegativeZeroIndex = 1;
  91. public:
  92. IntMathExprAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  93. {
  94. }
  95. IntMathExprAttributes(const bool ignoredIntOverflow, const bool ignoredNegativeZero)
  96. {
  97. SetBitAttribute(IgnoredIntOverflowIndex, ignoredIntOverflow);
  98. SetBitAttribute(IgnoredNegativeZeroIndex, ignoredNegativeZero);
  99. }
  100. };
  101. class ConvAttributes : public ExprAttributes
  102. {
  103. private:
  104. static const uint DstUnsignedIndex = 0;
  105. static const uint SrcUnsignedIndex = 1;
  106. public:
  107. ConvAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  108. {
  109. }
  110. ConvAttributes(const bool isDstUnsigned, const bool isSrcUnsigned)
  111. {
  112. SetBitAttribute(DstUnsignedIndex, isDstUnsigned);
  113. SetBitAttribute(SrcUnsignedIndex, isSrcUnsigned);
  114. }
  115. };
  116. class DstIsIntOrNumberAttributes : public ExprAttributes
  117. {
  118. private:
  119. static const uint DstIsIntOnlyIndex = 0;
  120. static const uint DstIsNumberOnlyIndex = 1;
  121. public:
  122. DstIsIntOrNumberAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  123. {
  124. }
  125. DstIsIntOrNumberAttributes(const bool dstIsIntOnly, const bool dstIsNumberOnly)
  126. {
  127. SetBitAttribute(DstIsIntOnlyIndex, dstIsIntOnly);
  128. SetBitAttribute(DstIsNumberOnlyIndex, dstIsNumberOnly);
  129. }
  130. };
  131. enum class PathDependentRelationship : uint8
  132. {
  133. Equal,
  134. NotEqual,
  135. GreaterThanOrEqual,
  136. GreaterThan,
  137. LessThanOrEqual,
  138. LessThan
  139. };
  140. class PathDependentInfo
  141. {
  142. private:
  143. Value *leftValue, *rightValue;
  144. int32 rightConstantValue;
  145. PathDependentRelationship relationship;
  146. public:
  147. PathDependentInfo(const PathDependentRelationship relationship, Value *const leftValue, Value *const rightValue)
  148. : relationship(relationship), leftValue(leftValue), rightValue(rightValue)
  149. {
  150. Assert(leftValue);
  151. Assert(rightValue);
  152. }
  153. PathDependentInfo(
  154. const PathDependentRelationship relationship,
  155. Value *const leftValue,
  156. Value *const rightValue,
  157. const int32 rightConstantValue)
  158. : relationship(relationship), leftValue(leftValue), rightValue(rightValue), rightConstantValue(rightConstantValue)
  159. {
  160. Assert(leftValue);
  161. }
  162. public:
  163. bool HasInfo() const
  164. {
  165. return !!leftValue;
  166. }
  167. PathDependentRelationship Relationship() const
  168. {
  169. Assert(HasInfo());
  170. return relationship;
  171. }
  172. Value *LeftValue() const
  173. {
  174. Assert(HasInfo());
  175. return leftValue;
  176. }
  177. Value *RightValue() const
  178. {
  179. Assert(HasInfo());
  180. return rightValue;
  181. }
  182. int32 RightConstantValue() const
  183. {
  184. Assert(!RightValue());
  185. return rightConstantValue;
  186. }
  187. };
  188. class PathDependentInfoToRestore
  189. {
  190. private:
  191. ValueInfo *leftValueInfo, *rightValueInfo;
  192. public:
  193. PathDependentInfoToRestore() : leftValueInfo(nullptr), rightValueInfo(nullptr)
  194. {
  195. }
  196. PathDependentInfoToRestore(ValueInfo *const leftValueInfo, ValueInfo *const rightValueInfo)
  197. : leftValueInfo(leftValueInfo), rightValueInfo(rightValueInfo)
  198. {
  199. }
  200. public:
  201. ValueInfo *LeftValueInfo() const
  202. {
  203. return leftValueInfo;
  204. }
  205. ValueInfo *RightValueInfo() const
  206. {
  207. return rightValueInfo;
  208. }
  209. public:
  210. void Clear()
  211. {
  212. leftValueInfo = nullptr;
  213. rightValueInfo = nullptr;
  214. }
  215. };
  216. typedef JsUtil::List<IR::Opnd *, JitArenaAllocator> OpndList;
  217. typedef JsUtil::BaseDictionary<Sym *, ValueInfo *, JitArenaAllocator> SymToValueInfoMap;
  218. typedef JsUtil::BaseDictionary<SymID, IR::Instr *, JitArenaAllocator> SymIdToInstrMap;
  219. typedef JsUtil::BaseHashSet<Value *, JitArenaAllocator, PowerOf2SizePolicy, ValueNumber> ValueSetByValueNumber;
  220. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator> SymIdToStackSymMap;
  221. typedef JsUtil::Pair<ValueNumber, ValueNumber> ValueNumberPair;
  222. typedef JsUtil::BaseDictionary<ValueNumberPair, Value *, JitArenaAllocator> ValueNumberPairToValueMap;
  223. namespace JsUtil
  224. {
  225. template <>
  226. class ValueEntry<StackLiteralInitFldData> : public BaseValueEntry<StackLiteralInitFldData>
  227. {
  228. public:
  229. void Clear()
  230. {
  231. #if DBG
  232. this->value.propIds = nullptr;
  233. this->value.currentInitFldCount = (uint)-1;
  234. #endif
  235. }
  236. };
  237. };
  238. typedef JsUtil::BaseDictionary<IntConstType, StackSym *, JitArenaAllocator> IntConstantToStackSymMap;
  239. typedef JsUtil::BaseDictionary<int32, Value *, JitArenaAllocator> IntConstantToValueMap;
  240. typedef JsUtil::BaseDictionary<int64, Value *, JitArenaAllocator> Int64ConstantToValueMap;
  241. typedef JsUtil::BaseDictionary<Js::Var, Value *, JitArenaAllocator> AddrConstantToValueMap;
  242. typedef JsUtil::BaseDictionary<Js::InternalString, Value *, JitArenaAllocator> StringConstantToValueMap;
  243. class JsArrayKills
  244. {
  245. private:
  246. union
  247. {
  248. struct
  249. {
  250. bool killsAllArrays : 1;
  251. bool killsArraysWithNoMissingValues : 1;
  252. bool killsNativeArrays : 1;
  253. bool killsArrayHeadSegments : 1;
  254. bool killsArrayHeadSegmentLengths : 1;
  255. bool killsArrayLengths : 1;
  256. };
  257. byte bits;
  258. };
  259. public:
  260. JsArrayKills() : bits(0)
  261. {
  262. }
  263. private:
  264. JsArrayKills(const byte bits) : bits(bits)
  265. {
  266. }
  267. public:
  268. bool KillsAllArrays() const { return killsAllArrays; }
  269. void SetKillsAllArrays() { killsAllArrays = true; }
  270. bool KillsArraysWithNoMissingValues() const { return killsArraysWithNoMissingValues; }
  271. void SetKillsArraysWithNoMissingValues() { killsArraysWithNoMissingValues = true; }
  272. bool KillsNativeArrays() const { return killsNativeArrays; }
  273. void SetKillsNativeArrays() { killsNativeArrays = true; }
  274. bool KillsArrayHeadSegments() const { return killsArrayHeadSegments; }
  275. void SetKillsArrayHeadSegments() { killsArrayHeadSegments = true; }
  276. bool KillsArrayHeadSegmentLengths() const { return killsArrayHeadSegmentLengths; }
  277. void SetKillsArrayHeadSegmentLengths() { killsArrayHeadSegmentLengths = true; }
  278. bool KillsTypedArrayHeadSegmentLengths() const { return KillsAllArrays(); }
  279. bool KillsArrayLengths() const { return killsArrayLengths; }
  280. void SetKillsArrayLengths() { killsArrayLengths = true; }
  281. public:
  282. bool KillsValueType(const ValueType valueType) const
  283. {
  284. Assert(valueType.IsArrayOrObjectWithArray());
  285. return
  286. killsAllArrays ||
  287. (killsArraysWithNoMissingValues && valueType.HasNoMissingValues()) ||
  288. (killsNativeArrays && !valueType.HasVarElements());
  289. }
  290. bool AreSubsetOf(const JsArrayKills &other) const
  291. {
  292. return (bits & other.bits) == bits;
  293. }
  294. JsArrayKills Merge(const JsArrayKills &other)
  295. {
  296. return bits | other.bits;
  297. }
  298. };
  299. class InvariantBlockBackwardIterator
  300. {
  301. private:
  302. GlobOpt *const globOpt;
  303. BasicBlock *const exclusiveEndBlock;
  304. StackSym *const invariantSym;
  305. const ValueNumber invariantSymValueNumber;
  306. BasicBlock *block;
  307. Value *invariantSymValue;
  308. #if DBG
  309. BasicBlock *const inclusiveEndBlock;
  310. #endif
  311. public:
  312. InvariantBlockBackwardIterator(GlobOpt *const globOpt, BasicBlock *const exclusiveBeginBlock, BasicBlock *const inclusiveEndBlock, StackSym *const invariantSym, const ValueNumber invariantSymValueNumber = InvalidValueNumber);
  313. public:
  314. bool IsValid() const;
  315. void MoveNext();
  316. BasicBlock *Block() const;
  317. Value *InvariantSymValue() const;
  318. PREVENT_ASSIGN(InvariantBlockBackwardIterator);
  319. };
  320. class FlowGraph;
  321. class GlobOpt
  322. {
  323. private:
  324. class AddSubConstantInfo;
  325. class ArrayLowerBoundCheckHoistInfo;
  326. class ArrayUpperBoundCheckHoistInfo;
  327. friend BackwardPass;
  328. #if DBG
  329. friend class ObjectTempVerify;
  330. #endif
  331. friend class GlobOptBlockData;
  332. friend class BasicBlock;
  333. private:
  334. SparseArray<Value> * byteCodeConstantValueArray;
  335. // Global bitvectors
  336. BVSparse<JitArenaAllocator> * byteCodeConstantValueNumbersBv;
  337. // Global bitvectors
  338. IntConstantToStackSymMap * intConstantToStackSymMap;
  339. IntConstantToValueMap* intConstantToValueMap;
  340. Int64ConstantToValueMap* int64ConstantToValueMap;
  341. AddrConstantToValueMap * addrConstantToValueMap;
  342. StringConstantToValueMap * stringConstantToValueMap;
  343. #if DBG
  344. // We can still track the finished stack literal InitFld lexically.
  345. BVSparse<JitArenaAllocator> * finishedStackLiteralInitFld;
  346. #endif
  347. BVSparse<JitArenaAllocator> * byteCodeUses;
  348. BVSparse<JitArenaAllocator> * tempBv; // Bit vector for temporary uses
  349. BVSparse<JitArenaAllocator> * objectTypeSyms;
  350. BVSparse<JitArenaAllocator> * prePassCopyPropSym; // Symbols that were copy prop'd during loop prepass
  351. // Symbols that refer to slots in the stack frame. We still use currentBlock->liveFields to tell us
  352. // which of these slots are live; this bit-vector just identifies which entries in liveFields represent
  353. // slots, so we can zero them all out quickly.
  354. BVSparse<JitArenaAllocator> * slotSyms;
  355. PropertySym * propertySymUse;
  356. BVSparse<JitArenaAllocator> * lengthEquivBv;
  357. BVSparse<JitArenaAllocator> * argumentsEquivBv;
  358. BVSparse<JitArenaAllocator> * callerEquivBv;
  359. BVSparse<JitArenaAllocator> * changedSymsAfterIncBailoutCandidate;
  360. JitArenaAllocator * alloc;
  361. JitArenaAllocator * tempAlloc;
  362. Func * func;
  363. ValueNumber currentValue;
  364. BasicBlock * currentBlock;
  365. Region * currentRegion;
  366. IntOverflowDoesNotMatterRange *intOverflowDoesNotMatterRange;
  367. Loop * prePassLoop;
  368. Loop * rootLoopPrePass;
  369. uint instrCountSinceLastCleanUp;
  370. SymIdToInstrMap * prePassInstrMap;
  371. SymID maxInitialSymID;
  372. bool isCallHelper: 1;
  373. bool intOverflowCurrentlyMattersInRange : 1;
  374. bool ignoredIntOverflowForCurrentInstr : 1;
  375. bool ignoredNegativeZeroForCurrentInstr : 1;
  376. bool inInlinedBuiltIn : 1;
  377. bool isRecursiveCallOnLandingPad : 1;
  378. bool updateInductionVariableValueNumber : 1;
  379. bool isPerformingLoopBackEdgeCompensation : 1;
  380. bool doTypeSpec : 1;
  381. bool doAggressiveIntTypeSpec : 1;
  382. bool doAggressiveMulIntTypeSpec : 1;
  383. bool doDivIntTypeSpec : 1;
  384. bool doLossyIntTypeSpec : 1;
  385. bool doFloatTypeSpec : 1;
  386. bool doArrayCheckHoist : 1;
  387. bool doArrayMissingValueCheckHoist : 1;
  388. bool doArraySegmentHoist : 1;
  389. bool doJsArraySegmentHoist : 1;
  390. bool doArrayLengthHoist : 1;
  391. bool doEliminateArrayAccessHelperCall : 1;
  392. bool doTrackRelativeIntBounds : 1;
  393. bool doBoundCheckElimination : 1;
  394. bool doBoundCheckHoist : 1;
  395. bool doLoopCountBasedBoundCheckHoist : 1;
  396. bool doPowIntIntTypeSpec : 1;
  397. bool isAsmJSFunc : 1;
  398. bool doTagChecks : 1;
  399. OpndList * noImplicitCallUsesToInsert;
  400. ValueSetByValueNumber * valuesCreatedForClone;
  401. ValueNumberPairToValueMap *valuesCreatedForMerge;
  402. #if DBG
  403. BVSparse<JitArenaAllocator> * byteCodeUsesBeforeOpt;
  404. #endif
  405. public:
  406. GlobOpt(Func * func);
  407. void Optimize();
  408. // Function used by the backward pass as well.
  409. // GlobOptBailout.cpp
  410. static void TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym);
  411. // GlobOptFields.cpp
  412. void ProcessFieldKills(IR::Instr *instr, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt);
  413. static bool DoFieldHoisting(Loop * loop);
  414. IR::ByteCodeUsesInstr * ConvertToByteCodeUses(IR::Instr * isntr);
  415. bool GetIsAsmJSFunc()const{ return isAsmJSFunc; };
  416. private:
  417. bool IsLoopPrePass() const { return this->prePassLoop != nullptr; }
  418. void OptBlock(BasicBlock *block);
  419. void BackwardPass(Js::Phase tag);
  420. void ForwardPass();
  421. void OptLoops(Loop *loop);
  422. void TailDupPass();
  423. bool TryTailDup(IR::BranchInstr *tailBranch);
  424. PRECandidatesList * FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc);
  425. PRECandidatesList * FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc);
  426. void PreloadPRECandidates(Loop *loop, PRECandidatesList *candidates);
  427. BOOL PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate);
  428. void SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym);
  429. void FieldPRE(Loop *loop);
  430. void CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock);
  431. void CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  432. void TryReplaceLdLen(IR::Instr *& instr);
  433. IR::Instr * OptInstr(IR::Instr *&instr, bool* isInstrCleared);
  434. Value* OptDst(IR::Instr **pInstr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  435. void CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val);
  436. Value * OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef = nullptr, IR::IndirOpnd *parentIndirOpnd = nullptr);
  437. void MarkArgumentsUsedForBranch(IR::Instr *inst);
  438. bool OptTagChecks(IR::Instr *instr);
  439. void TryOptimizeInstrWithFixedDataProperty(IR::Instr * * const pInstr);
  440. bool CheckIfPropOpEmitsTypeCheck(IR::Instr *instr, IR::PropertySymOpnd *opnd);
  441. IR::PropertySymOpnd * CreateOpndForTypeCheckOnly(IR::PropertySymOpnd* opnd, Func* func);
  442. bool FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock* block = nullptr, bool updateExistingValue = false, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr);
  443. void FinishOptHoistedPropOps(Loop * loop);
  444. IR::Instr * SetTypeCheckBailOut(IR::Opnd *opnd, IR::Instr *instr, BailOutInfo *bailOutInfo);
  445. void OptArguments(IR::Instr *Instr);
  446. void TrackInstrsForScopeObjectRemoval(IR::Instr * instr);
  447. bool AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const;
  448. Value * ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val);
  449. Value * ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal);
  450. ValueType GetPrepassValueTypeForDst(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool *const isValueInfoPreciseRef = nullptr) const;
  451. bool IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue) const;
  452. Value * CreateDstUntransferredIntValue(const int32 min, const int32 max, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  453. Value * CreateDstUntransferredValue(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  454. Value * ValueNumberTransferDst(IR::Instr *const instr, Value *src1Val);
  455. bool IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue);
  456. Value * ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val);
  457. IR::Opnd * CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd = nullptr);
  458. IR::Opnd * CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd = nullptr);
  459. ValueNumber NewValueNumber();
  460. Value * NewValue(ValueInfo *const valueInfo);
  461. Value * NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo);
  462. Value * CopyValue(Value const *const value);
  463. Value * CopyValue(Value const *const value, const ValueNumber valueNumber);
  464. Value * NewGenericValue(const ValueType valueType);
  465. Value * NewGenericValue(const ValueType valueType, IR::Opnd *const opnd);
  466. Value * NewGenericValue(const ValueType valueType, Sym *const sym);
  467. Value * GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  468. Value * GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  469. Value * NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable);
  470. Value * NewInt64ConstantValue(const int64 intConst, IR::Instr * instr);
  471. ValueInfo * NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout);
  472. ValueInfo * NewIntRangeValueInfo(const ValueInfo *const originalValueInfo, const int32 min, const int32 max) const;
  473. Value * NewIntRangeValue(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  474. IntBoundedValueInfo * NewIntBoundedValueInfo(const ValueInfo *const originalValueInfo, const IntBounds *const bounds) const;
  475. Value * NewIntBoundedValue(const ValueType valueType, const IntBounds *const bounds, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  476. Value * NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd = nullptr);
  477. Value * GetVarConstantValue(IR::AddrOpnd *addrOpnd);
  478. Value * NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString);
  479. Value * HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value);
  480. Value * NewFixedFunctionValue(Js::JavascriptFunction *functionValue, IR::AddrOpnd *addrOpnd);
  481. StackSym * GetTaggedIntConstantStackSym(const int32 intConstantValue) const;
  482. StackSym * GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const;
  483. Sym * SetSymStore(ValueInfo *valueInfo, Sym *sym);
  484. void SetSymStoreDirect(ValueInfo *valueInfo, Sym *sym);
  485. IR::Instr * TypeSpecialization(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  486. #ifdef ENABLE_SIMDJS
  487. // SIMD_JS
  488. bool TypeSpecializeSimd128(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal);
  489. bool Simd128DoTypeSpec(IR::Instr *instr, const Value *src1Val, const Value *src2Val, const Value *dstVal);
  490. bool Simd128DoTypeSpecLoadStore(IR::Instr *instr, const Value *src1Val, const Value *src2Val, const Value *dstVal, const ThreadContext::SimdFuncSignature *simdFuncSignature);
  491. bool Simd128CanTypeSpecOpnd(const ValueType opndType, const ValueType expectedType);
  492. bool Simd128ValidateIfLaneIndex(const IR::Instr * instr, IR::Opnd * opnd, uint argPos);
  493. void UpdateBoundCheckHoistInfoForSimd(ArrayUpperBoundCheckHoistInfo &upperHoistInfo, ValueType arrValueType, const IR::Instr *instr);
  494. void Simd128SetIndirOpndType(IR::IndirOpnd *indirOpnd, Js::OpCode opcode);
  495. #endif
  496. IRType GetIRTypeFromValueType(const ValueType &valueType);
  497. ValueType GetValueTypeFromIRType(const IRType &type);
  498. IR::BailOutKind GetBailOutKindFromValueType(const ValueType &valueType);
  499. IR::Instr * GetExtendedArg(IR::Instr *instr);
  500. int GetBoundCheckOffsetForSimd(ValueType arrValueType, const IR::Instr *instr, const int oldOffset = -1);
  501. IR::Instr * OptNewScObject(IR::Instr** instrPtr, Value* srcVal);
  502. template <typename T>
  503. bool OptConstFoldBinaryWasm(IR::Instr * *pInstr, const Value* src1, const Value* src2, Value **pDstVal);
  504. template <typename T>
  505. IR::Opnd* ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal);
  506. bool OptConstFoldBinary(IR::Instr * *pInstr, const IntConstantBounds &src1IntConstantBounds, const IntConstantBounds &src2IntConstantBounds, Value **pDstVal);
  507. bool OptConstFoldUnary(IR::Instr * *pInstr, const int32 intConstantValue, const bool isUsingOriginalSrc1Value, Value **pDstVal);
  508. bool OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *vInfo);
  509. bool OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal);
  510. Js::Var GetConstantVar(IR::Opnd *opnd, Value *val);
  511. bool IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val);
  512. bool IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst);
  513. bool IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val = nullptr);
  514. bool TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal);
  515. bool TypeSpecializeIntUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, int32 min, int32 max, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool skipDst = false);
  516. bool TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst = false);
  517. void TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef);
  518. void TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal);
  519. void TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal);
  520. bool TypeSpecializeUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  521. bool TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef);
  522. bool TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst = false);
  523. bool TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal);
  524. void TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal);
  525. bool TypeSpecializeLdLen(IR::Instr * *const instrRef, Value * *const src1ValueRef, Value * *const dstValueRef, bool *const forceInvariantHoistingRef);
  526. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  527. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  528. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  529. bool TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal);
  530. bool TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal);
  531. bool TypeSpecializeStElem(IR::Instr **pInstr, Value *src1Val, Value **pDstVal);
  532. bool ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd);
  533. ValueType GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize);
  534. bool IsInstrInvalidForMemOp(IR::Instr *, Loop *, Value *, Value *);
  535. bool CollectMemOpInfo(IR::Instr *, IR::Instr *, Value *, Value *);
  536. bool CollectMemOpStElementI(IR::Instr *, Loop *);
  537. bool CollectMemsetStElementI(IR::Instr *, Loop *);
  538. bool CollectMemcopyStElementI(IR::Instr *, Loop *);
  539. bool CollectMemOpLdElementI(IR::Instr *, Loop *);
  540. bool CollectMemcopyLdElementI(IR::Instr *, Loop *);
  541. SymID GetVarSymID(StackSym *);
  542. const InductionVariable* GetInductionVariable(SymID, Loop *);
  543. bool IsSymIDInductionVariable(SymID, Loop *);
  544. bool IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd);
  545. void ProcessMemOp();
  546. bool InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, struct MemSetEmitData* emitData, bool& errorInInstr);
  547. bool InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, struct MemCopyEmitData* emitData, bool& errorInInstr);
  548. bool ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) struct MemOpEmitData** emitData, int& iEmitData);
  549. void EmitMemop(Loop * loop, LoopCount *loopCount, const struct MemOpEmitData* emitData);
  550. IR::Opnd* GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr = nullptr);
  551. IR::RegOpnd* GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr = nullptr);
  552. LoopCount* GetOrGenerateLoopCountForMemOp(Loop *loop);
  553. IR::Instr* FindUpperBoundsCheckInstr(IR::Instr* instr);
  554. IR::Instr* FindArraySegmentLoadInstr(IR::Instr* instr);
  555. void RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block);
  556. void GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType);
  557. bool HasMemOp(Loop * loop);
  558. private:
  559. void ChangeValueType(BasicBlock *const block, Value *const value, const ValueType newValueType, const bool preserveSubclassInfo, const bool allowIncompatibleType = false) const;
  560. void ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType = false, const bool compensated = false) const;
  561. bool AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const;
  562. private:
  563. #if DBG
  564. void VerifyArrayValueInfoForTracking(const ValueInfo *const valueInfo, const bool isJsArray, const BasicBlock *const block, const bool ignoreKnownImplicitCalls = false) const;
  565. #endif
  566. void TrackNewValueForKills(Value *const value);
  567. void DoTrackNewValueForKills(Value *const value);
  568. void TrackCopiedValueForKills(Value *const value);
  569. void DoTrackCopiedValueForKills(Value *const value);
  570. void TrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  571. void DoTrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  572. void TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const;
  573. void ProcessValueKills(IR::Instr *const instr);
  574. void ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData);
  575. void ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData);
  576. bool NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const;
  577. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func);
  578. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func* func);
  579. IR::Instr* AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset);
  580. void OptArraySrc(IR::Instr * *const instrRef);
  581. private:
  582. void TrackIntSpecializedAddSubConstant(IR::Instr *const instr, const AddSubConstantInfo *const addSubConstantInfo, Value *const dstValue, const bool updateSourceBounds);
  583. void CloneBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  584. void MergeBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  585. void DetectUnknownChangesToInductionVariables(GlobOptBlockData *const blockData);
  586. void SetInductionVariableValueNumbers(GlobOptBlockData *const blockData);
  587. void FinalizeInductionVariables(Loop *const loop, GlobOptBlockData *const headerData);
  588. enum class SymBoundType {OFFSET, VALUE, UNKNOWN};
  589. SymBoundType DetermineSymBoundOffsetOrValueRelativeToLandingPad(StackSym *const sym, const bool landingPadValueIsLowerBound, ValueInfo *const valueInfo, const IntBounds *const bounds, GlobOptBlockData *const landingPadGlobOptBlockData, int *const boundOffsetOrValueRef);
  590. private:
  591. void DetermineDominatingLoopCountableBlock(Loop *const loop, BasicBlock *const headerBlock);
  592. void DetermineLoopCount(Loop *const loop);
  593. void GenerateLoopCount(Loop *const loop, LoopCount *const loopCount);
  594. void GenerateLoopCountPlusOne(Loop *const loop, LoopCount *const loopCount);
  595. void GenerateSecondaryInductionVariableBound(Loop *const loop, StackSym *const inductionVariableSym, const LoopCount *const loopCount, const int maxMagnitudeChange, StackSym *const boundSym);
  596. private:
  597. void DetermineArrayBoundCheckHoistability(bool needLowerBoundCheck, bool needUpperBoundCheck, ArrayLowerBoundCheckHoistInfo &lowerHoistInfo, ArrayUpperBoundCheckHoistInfo &upperHoistInfo, const bool isJsArray, StackSym *const indexSym, Value *const indexValue, const IntConstantBounds &indexConstantBounds, StackSym *const headSegmentLengthSym, Value *const headSegmentLengthValue, const IntConstantBounds &headSegmentLengthConstantBounds, Loop *const headSegmentLengthInvariantLoop, bool &failedToUpdateCompatibleLowerBoundCheck, bool &failedToUpdateCompatibleUpperBoundCheck);
  598. private:
  599. void CaptureNoImplicitCallUses(IR::Opnd *opnd, const bool usesNoMissingValuesInfo, IR::Instr *const includeCurrentInstr = nullptr);
  600. void InsertNoImplicitCallUses(IR::Instr *const instr);
  601. void PrepareLoopArrayCheckHoist();
  602. public:
  603. JsArrayKills CheckJsArrayKills(IR::Instr *const instr);
  604. GlobOptBlockData const * CurrentBlockData() const;
  605. GlobOptBlockData * CurrentBlockData();
  606. private:
  607. bool IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr);
  608. bool NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const;
  609. private:
  610. bool PrepareForIgnoringIntOverflow(IR::Instr *const instr);
  611. void VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr);
  612. void PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val);
  613. void ProcessKills(IR::Instr *instr);
  614. void InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  615. void InsertValueCompensation(BasicBlock *const predecessor, const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap);
  616. IR::Instr * ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val);
  617. void ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
  618. IR::Instr * ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *val, bool needsUpdate);
  619. void ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr = nullptr);
  620. void ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
  621. void ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind = IR::BailOutInvalid, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  622. IR::Instr * ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy);
  623. IR::Instr * ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind);
  624. IR::Instr * ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir,
  625. IRType toType, IR::BailOutKind bailOutKind, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  626. void ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block);
  627. void ToVarStackSym(StackSym *varSym, BasicBlock *block);
  628. void ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  629. void ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  630. void ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  631. #ifdef ENABLE_SIMDJS
  632. // SIMD_JS
  633. void TypeSpecializeSimd128Dst(IRType type, IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value **pDstVal);
  634. void ToSimd128Dst(IRType toType, IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  635. #endif
  636. void OptConstFoldBr(bool test, IR::Instr *instr, Value * intTypeSpecSrc1Val = nullptr, Value * intTypeSpecSrc2Val = nullptr);
  637. void PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32 * pNewMax);
  638. void PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  639. int32 min2, int32 max2, int32 *pNewMin, int32 *pNewMax);
  640. bool OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives);
  641. bool OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef = nullptr);
  642. bool OptDstIsInvariant(IR::RegOpnd *dst);
  643. bool OptIsInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv, const bool forceInvariantHoisting = false);
  644. void OptHoistInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *dstVal, Value *const src1Val, Value *const src2Value,
  645. bool isNotTypeSpecConv, bool lossy = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  646. bool TryHoistInvariant(IR::Instr *instr, BasicBlock *block, Value *dstVal, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv,
  647. const bool lossy = false, const bool forceInvariantHoisting = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  648. void HoistInvariantValueInfo(ValueInfo *const invariantValueInfoToHoist, Value *const valueToUpdate, BasicBlock *const targetBlock);
  649. void OptHoistUpdateValueType(Loop* loop, IR::Instr* instr, IR::Opnd* srcOpnd, Value *const srcVal);
  650. public:
  651. static bool IsTypeSpecPhaseOff(Func const * func);
  652. static bool DoAggressiveIntTypeSpec(Func const * func);
  653. static bool DoLossyIntTypeSpec(Func const * func);
  654. static bool DoFloatTypeSpec(Func const * func);
  655. static bool DoStringTypeSpec(Func const * func);
  656. static bool DoArrayCheckHoist(Func const * const func);
  657. static bool DoArrayMissingValueCheckHoist(Func const * const func);
  658. static bool DoArraySegmentHoist(const ValueType baseValueType, Func const * const func);
  659. static bool DoArrayLengthHoist(Func const * const func);
  660. static bool DoEliminateArrayAccessHelperCall(Func* func);
  661. static bool DoTypedArrayTypeSpec(Func const * func);
  662. static bool DoNativeArrayTypeSpec(Func const * func);
  663. static bool IsSwitchOptEnabled(Func const * func);
  664. static bool DoInlineArgsOpt(Func const * func);
  665. static bool IsPREInstrCandidateLoad(Js::OpCode opcode);
  666. static bool IsPREInstrCandidateStore(Js::OpCode opcode);
  667. static bool ImplicitCallFlagsAllowOpts(Loop * loop);
  668. static bool ImplicitCallFlagsAllowOpts(Func const * func);
  669. private:
  670. bool DoConstFold() const;
  671. bool DoTypeSpec() const;
  672. bool DoAggressiveIntTypeSpec() const;
  673. bool DoAggressiveMulIntTypeSpec() const;
  674. bool DoDivIntTypeSpec() const;
  675. bool DoLossyIntTypeSpec() const;
  676. bool DoFloatTypeSpec() const;
  677. bool DoStringTypeSpec() const { return GlobOpt::DoStringTypeSpec(this->func); }
  678. bool DoArrayCheckHoist() const;
  679. bool DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr = nullptr) const;
  680. bool DoArrayMissingValueCheckHoist() const;
  681. bool DoArraySegmentHoist(const ValueType baseValueType) const;
  682. bool DoTypedArraySegmentLengthHoist(Loop *const loop) const;
  683. bool DoArrayLengthHoist() const;
  684. bool DoEliminateArrayAccessHelperCall() const;
  685. bool DoTypedArrayTypeSpec() const { return GlobOpt::DoTypedArrayTypeSpec(this->func); }
  686. bool DoNativeArrayTypeSpec() const { return GlobOpt::DoNativeArrayTypeSpec(this->func); }
  687. bool DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType);
  688. bool IsSwitchOptEnabled() const { return GlobOpt::IsSwitchOptEnabled(this->func); }
  689. bool DoPathDependentValues() const;
  690. bool DoTrackRelativeIntBounds() const;
  691. bool DoBoundCheckElimination() const;
  692. bool DoBoundCheckHoist() const;
  693. bool DoLoopCountBasedBoundCheckHoist() const;
  694. bool DoPowIntIntTypeSpec() const;
  695. bool DoTagChecks() const;
  696. template <class Fn>
  697. void TrackByteCodeUsesForInstrAddedInOptInstr(IR::Instr * trackByteCodeUseOnInstr, Fn fn)
  698. {
  699. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  700. PropertySym * currentPropertySymUse = this->propertySymUse;
  701. PropertySym * tempPropertySymUse = NULL;
  702. this->byteCodeUses = NULL;
  703. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  704. #if DBG
  705. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  706. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  707. #endif
  708. this->propertySymUse = NULL;
  709. GlobOpt::TrackByteCodeSymUsed(trackByteCodeUseOnInstr, tempByteCodeUse, &tempPropertySymUse);
  710. fn();
  711. this->byteCodeUses = currentBytecodeUses;
  712. this->propertySymUse = currentPropertySymUse;
  713. #if DBG
  714. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  715. #endif
  716. }
  717. private:
  718. // GlobOptBailout.cpp
  719. bool MayNeedBailOut(Loop * loop) const;
  720. static void TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySymUse);
  721. static void TrackByteCodeSymUsed(IR::RegOpnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  722. static void TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  723. void CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo);
  724. void CaptureValuesFromScratch(
  725. BasicBlock * block,
  726. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  727. SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter);
  728. void CaptureValuesIncremental(
  729. BasicBlock * block,
  730. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  731. SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter);
  732. void CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter);
  733. void CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator);
  734. void CaptureByteCodeSymUses(IR::Instr * instr);
  735. IR::ByteCodeUsesInstr * InsertByteCodeUses(IR::Instr * instr, bool includeDef = false);
  736. void TrackCalls(IR::Instr * instr);
  737. void RecordInlineeFrameInfo(IR::Instr* instr);
  738. void EndTrackCall(IR::Instr * instr);
  739. void EndTrackingOfArgObjSymsForInlinee();
  740. void FillBailOutInfo(BasicBlock *block, BailOutInfo *bailOutInfo);
  741. static void MarkNonByteCodeUsed(IR::Instr * instr);
  742. static void MarkNonByteCodeUsed(IR::Opnd * opnd);
  743. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const;
  744. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const;
  745. static bool IsTypeCheckProtected(const IR::Instr * instr);
  746. static bool MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val);
  747. static bool MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val);
  748. void GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind);
  749. public:
  750. void GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind);
  751. private:
  752. IR::Instr * EnsureBailTarget(Loop * loop);
  753. // GlobOptFields.cpp
  754. void ProcessFieldKills(IR::Instr * instr);
  755. void KillLiveFields(StackSym * stackSym, BVSparse<JitArenaAllocator> * bv);
  756. void KillLiveFields(PropertySym * propertySym, BVSparse<JitArenaAllocator> * bv);
  757. void KillLiveFields(BVSparse<JitArenaAllocator> *const fieldsToKill, BVSparse<JitArenaAllocator> *const bv) const;
  758. void KillLiveElems(IR::IndirOpnd * indirOpnd, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt, Func *func);
  759. void KillAllFields(BVSparse<JitArenaAllocator> * bv);
  760. void SetAnyPropertyMayBeWrittenTo();
  761. void AddToPropertiesWrittenTo(Js::PropertyId propertyId);
  762. bool DoFieldCopyProp() const;
  763. bool DoFieldCopyProp(Loop * loop) const;
  764. bool DoFunctionFieldCopyProp() const;
  765. bool DoFieldHoisting() const;
  766. bool DoObjTypeSpec() const;
  767. bool DoObjTypeSpec(Loop * loop) const;
  768. bool DoFieldRefOpts() const { return DoObjTypeSpec(); }
  769. bool DoFieldRefOpts(Loop * loop) const { return DoObjTypeSpec(loop); }
  770. bool DoFieldOpts(Loop * loop) const;
  771. bool DoFieldPRE() const;
  772. bool DoFieldPRE(Loop *loop) const;
  773. bool FieldHoistOptSrc(IR::Opnd *opnd, IR::Instr *instr, PropertySym * propertySym);
  774. void FieldHoistOptDst(IR::Instr * instr, PropertySym * propertySym, Value * src1Val);
  775. bool TrackHoistableFields() const;
  776. void PreparePrepassFieldHoisting(Loop * loop);
  777. void PrepareFieldHoisting(Loop * loop);
  778. void CheckFieldHoistCandidate(IR::Instr * instr, PropertySym * sym);
  779. Loop * FindFieldHoistStackSym(Loop * startLoop, SymID propertySymId, StackSym ** copySym, IR::Instr * instrToHoist = nullptr) const;
  780. bool CopyPropHoistedFields(PropertySym * sym, IR::Opnd ** ppOpnd, IR::Instr * instr);
  781. void HoistFieldLoad(PropertySym * sym, Loop * loop, IR::Instr * instr, Value * oldValue, Value * newValue);
  782. void HoistNewFieldLoad(PropertySym * sym, Loop * loop, IR::Instr * instr, Value * oldValue, Value * newValue);
  783. void GenerateHoistFieldLoad(PropertySym * sym, Loop * loop, IR::Instr * instr, StackSym * newStackSym, Value * oldValue, Value * newValue);
  784. void HoistFieldLoadValue(Loop * loop, Value * newValue, SymID symId, Js::OpCode opcode, IR::Opnd * srcOpnd);
  785. void ReloadFieldHoistStackSym(IR::Instr * instr, PropertySym * propertySym);
  786. void CopyStoreFieldHoistStackSym(IR::Instr * storeFldInstr, PropertySym * sym, Value * src1Val);
  787. Value * CreateFieldSrcValue(PropertySym * sym, PropertySym * originalSym, IR::Opnd **ppOpnd, IR::Instr * instr);
  788. static bool HasHoistableFields(BasicBlock const * block);
  789. static bool HasHoistableFields(GlobOptBlockData const * globOptData);
  790. bool IsHoistablePropertySym(SymID symId) const;
  791. bool NeedBailOnImplicitCallWithFieldOpts(Loop *loop, bool hasLiveFields) const;
  792. IR::Instr * EnsureDisableImplicitCallRegion(Loop * loop);
  793. void UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr);
  794. bool TrackArgumentsObject();
  795. void CannotAllocateArgumentsObjectOnStack();
  796. #if DBG
  797. bool IsPropertySymId(SymID symId) const;
  798. bool IsHoistedPropertySym(PropertySym * sym) const;
  799. bool IsHoistedPropertySym(SymID symId, Loop * loop) const;
  800. static void AssertCanCopyPropOrCSEFieldLoad(IR::Instr * instr);
  801. #endif
  802. StackSym * EnsureObjectTypeSym(StackSym * objectSym);
  803. PropertySym * EnsurePropertyWriteGuardSym(PropertySym * propertySym);
  804. void PreparePropertySymForTypeCheckSeq(PropertySym *propertySym);
  805. bool IsPropertySymPreparedForTypeCheckSeq(PropertySym *propertySym);
  806. bool PreparePropertySymOpndForTypeCheckSeq(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instr, Loop *loop);
  807. static bool AreTypeSetsIdentical(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  808. static bool IsSubsetOf(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  809. static bool CompareCurrentTypesWithExpectedTypes(JsTypeValueInfo *valueInfo, IR::PropertySymOpnd * propertySymOpnd);
  810. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  811. bool CheckIfInstrInTypeCheckSeqEmitsTypeCheck(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  812. template<bool makeChanges>
  813. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd, BasicBlock* block, bool updateExistingValue, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr, bool *isObjTypeChecked = nullptr);
  814. void KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = (SymID)-1);
  815. void ValueNumberObjectType(IR::Opnd *dstOpnd, IR::Instr *instr);
  816. void SetSingleTypeOnObjectTypeValue(Value* value, const JITTypeHolder type);
  817. void SetTypeSetOnObjectTypeValue(Value* value, Js::EquivalentTypeSet* typeSet);
  818. void UpdateObjectTypeValue(Value* value, const JITTypeHolder type, bool setType, Js::EquivalentTypeSet* typeSet, bool setTypeSet);
  819. void SetObjectTypeFromTypeSym(StackSym *typeSym, Value* value, BasicBlock* block = nullptr);
  820. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, BasicBlock* block = nullptr, bool updateExistingValue = false);
  821. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, GlobOptBlockData *blockData, bool updateExistingValue = false);
  822. void KillObjectType(StackSym *objectSym, BVSparse<JitArenaAllocator>* liveFields = nullptr);
  823. void KillAllObjectTypes(BVSparse<JitArenaAllocator>* liveFields = nullptr);
  824. void EndFieldLifetime(IR::SymOpnd *symOpnd);
  825. PropertySym * CopyPropPropertySymObj(IR::SymOpnd *opnd, IR::Instr *instr);
  826. static bool NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind);
  827. IR::Instr * PreOptPeep(IR::Instr *instr);
  828. IR::Instr * OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val);
  829. void OptimizeIndirUses(IR::IndirOpnd *indir, IR::Instr * *pInstr, Value **indirIndexValRef);
  830. void RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr);
  831. void ProcessTryHandler(IR::Instr* instr);
  832. bool ProcessExceptionHandlingEdges(IR::Instr* instr);
  833. void InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd);
  834. void RemoveFlowEdgeToCatchBlock(IR::Instr * instr);
  835. bool RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr);
  836. void CSEAddInstr(BasicBlock *block, IR::Instr *instr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  837. void OptimizeChecks(IR::Instr * const instr);
  838. bool CSEOptimize(BasicBlock *block, IR::Instr * *const instrRef, Value **pSrc1Val, Value **pSrc2Val, Value **pSrc1IndirIndexVal, bool intMathExprOnly = false);
  839. bool GetHash(IR::Instr *instr, Value *src1Val, Value *src2Val, ExprAttributes exprAttributes, ExprHash *pHash);
  840. void ProcessArrayValueKills(IR::Instr *instr);
  841. static bool NeedBailOnImplicitCallForCSE(BasicBlock const *block, bool isForwardPass);
  842. bool DoCSE();
  843. bool CanCSEArrayStore(IR::Instr *instr);
  844. #if DBG_DUMP
  845. void Dump() const;
  846. void DumpSymToValueMap() const;
  847. void DumpSymToValueMap(BasicBlock const * block) const;
  848. void DumpSymVal(int index);
  849. void Trace(BasicBlock * basicBlock, bool before) const;
  850. void TraceSettings() const;
  851. #endif
  852. bool IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const;
  853. bool TryOptConstFoldBrFalse(IR::Instr *const instr, Value *const srcValue, const int32 min, const int32 max);
  854. bool TryOptConstFoldBrEqual(IR::Instr *const instr, const bool branchOnEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  855. bool TryOptConstFoldBrGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  856. bool TryOptConstFoldBrGreaterThanOrEqual(IR::Instr *const instr, const bool branchOnGreaterThanOrEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  857. bool TryOptConstFoldBrUnsignedLessThan(IR::Instr *const instr, const bool branchOnLessThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  858. bool TryOptConstFoldBrUnsignedGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  859. void UpdateIntBoundsForEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  860. void UpdateIntBoundsForNotEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  861. void UpdateIntBoundsForGreaterThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  862. void UpdateIntBoundsForGreaterThanBranch(Value *const src1Value, Value *const src2Value);
  863. void UpdateIntBoundsForLessThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  864. void UpdateIntBoundsForLessThanBranch(Value *const src1Value, Value *const src2Value);
  865. IntBounds * GetIntBoundsToUpdate(const ValueInfo *const valueInfo, const IntConstantBounds &constantBounds, const bool isSettingNewBound, const bool isBoundConstant, const bool isSettingUpperBound, const bool isExplicit);
  866. ValueInfo * UpdateIntBoundsForEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  867. ValueInfo * UpdateIntBoundsForNotEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  868. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  869. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  870. ValueInfo * UpdateIntBoundsForGreaterThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  871. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  872. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  873. ValueInfo * UpdateIntBoundsForLessThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  874. void SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info);
  875. PathDependentInfoToRestore UpdatePathDependentInfo(PathDependentInfo *const info);
  876. void RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore);
  877. IR::Instr * TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrEnd);
  878. void TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd);
  879. IR::Instr * GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst);
  880. friend class InvariantBlockBackwardIterator;
  881. };