GlobOpt.h 63 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. class BackwardPass;
  6. class LoopCount;
  7. class GlobOpt;
  8. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  9. #define PRINT_GOPT_TRACE_HEADER \
  10. Output::Print(_u("TRACE ")); \
  11. if (this->IsLoopPrePass()) \
  12. { \
  13. Output::Print(_u("[%d, %d]"), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  14. } \
  15. Output::Print(_u(": ")); \
  16. #define PRINT_VALUENUMBER_TRACE_HEADER \
  17. Output::Print(_u("VALUE NUMBERING TRACE ")); \
  18. if (this->IsLoopPrePass()) \
  19. { \
  20. Output::Print(_u("[%d, %d]"), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  21. } \
  22. Output::Print(_u(": ")); \
  23. #define GOPT_TRACE_VALUENUMBER(opndHeader, opnd, ...) \
  24. if (PHASE_TRACE(Js::ValueNumberingPhase, this->func)) \
  25. { \
  26. PRINT_VALUENUMBER_TRACE_HEADER; \
  27. Output::Print(opndHeader); \
  28. opnd->Dump(IRDumpFlags_None, this->func); \
  29. Output::Print(_u(" : ")); \
  30. Output::Print(__VA_ARGS__); \
  31. Output::Print(_u("\n")); \
  32. Output::Flush(); \
  33. }
  34. #define GOPT_TRACE_OPND(opnd, ...) \
  35. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  36. { \
  37. PRINT_GOPT_TRACE_HEADER; \
  38. opnd->Dump(); \
  39. Output::Print(_u(" : ")); \
  40. Output::Print(__VA_ARGS__); \
  41. Output::Flush(); \
  42. }
  43. #define GOPT_TRACE(...) \
  44. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  45. { \
  46. PRINT_GOPT_TRACE_HEADER; \
  47. Output::Print(__VA_ARGS__); \
  48. Output::Flush(); \
  49. }
  50. #define GOPT_TRACE_INSTRTRACE(instr) \
  51. if (PHASE_TRACE(Js::GlobOptPhase, this->func) || PHASE_TRACE(Js::ValueNumberingPhase, this->func)) \
  52. { \
  53. if (this->IsLoopPrePass()) \
  54. { \
  55. Output::Print(_u("[%d, %d]: "), this->rootLoopPrePass->loopNumber - 1, this->prePassLoop->loopNumber - 1); \
  56. } \
  57. instr->Dump(); \
  58. Output::Flush(); \
  59. }
  60. #define GOPT_TRACE_INSTR(instr, ...) \
  61. if (PHASE_TRACE(Js::GlobOptPhase, this->func)) \
  62. { \
  63. PRINT_GOPT_TRACE_HEADER; \
  64. Output::Print(__VA_ARGS__); \
  65. instr->Dump(); \
  66. Output::Flush(); \
  67. }
  68. #define GOPT_TRACE_BLOCK(block, before) \
  69. this->Trace(block, before); \
  70. Output::Flush();
  71. // TODO: OOP JIT, add back line number
  72. #define TRACE_PHASE_INSTR(phase, instr, ...) \
  73. if(PHASE_TRACE(phase, this->func)) \
  74. { \
  75. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  76. Output::Print( \
  77. _u("Function %s (%s)"), \
  78. this->func->GetJITFunctionBody()->GetDisplayName(), \
  79. this->func->GetDebugNumberSet(debugStringBuffer)); \
  80. if(this->func->IsLoopBody()) \
  81. { \
  82. Output::Print(_u(", loop %u"), this->func->GetWorkItem()->GetLoopNumber()); \
  83. } \
  84. if(instr->m_func != this->func) \
  85. { \
  86. Output::Print( \
  87. _u(", Inlinee %s (%s)"), \
  88. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  89. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  90. } \
  91. Output::Print(_u(" - %s\n "), Js::PhaseNames[phase]); \
  92. instr->Dump(); \
  93. Output::Print(_u(" ")); \
  94. Output::Print(__VA_ARGS__); \
  95. Output::Flush(); \
  96. }
  97. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...) \
  98. if(CONFIG_FLAG(Verbose)) \
  99. { \
  100. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  101. }
  102. #else // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  103. #define GOPT_TRACE(...)
  104. #define GOPT_TRACE_VALUENUMBER(opnd, ...)
  105. #define GOPT_TRACE_OPND(opnd, ...)
  106. #define GOPT_TRACE_INSTRTRACE(instr)
  107. #define GOPT_TRACE_INSTR(instr, ...)
  108. #define GOPT_TRACE_BLOCK(block, before)
  109. #define TRACE_PHASE_INSTR(phase, instr, ...)
  110. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...)
  111. #endif // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  112. class IntMathExprAttributes : public ExprAttributes
  113. {
  114. private:
  115. static const uint IgnoredIntOverflowIndex = 0;
  116. static const uint IgnoredNegativeZeroIndex = 1;
  117. public:
  118. IntMathExprAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  119. {
  120. }
  121. IntMathExprAttributes(const bool ignoredIntOverflow, const bool ignoredNegativeZero)
  122. {
  123. SetBitAttribute(IgnoredIntOverflowIndex, ignoredIntOverflow);
  124. SetBitAttribute(IgnoredNegativeZeroIndex, ignoredNegativeZero);
  125. }
  126. };
  127. class ConvAttributes : public ExprAttributes
  128. {
  129. private:
  130. static const uint DstUnsignedIndex = 0;
  131. static const uint SrcUnsignedIndex = 1;
  132. public:
  133. ConvAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  134. {
  135. }
  136. ConvAttributes(const bool isDstUnsigned, const bool isSrcUnsigned)
  137. {
  138. SetBitAttribute(DstUnsignedIndex, isDstUnsigned);
  139. SetBitAttribute(SrcUnsignedIndex, isSrcUnsigned);
  140. }
  141. };
  142. class DstIsIntOrNumberAttributes : public ExprAttributes
  143. {
  144. private:
  145. static const uint DstIsIntOnlyIndex = 0;
  146. static const uint DstIsNumberOnlyIndex = 1;
  147. public:
  148. DstIsIntOrNumberAttributes(const ExprAttributes &exprAttributes) : ExprAttributes(exprAttributes)
  149. {
  150. }
  151. DstIsIntOrNumberAttributes(const bool dstIsIntOnly, const bool dstIsNumberOnly)
  152. {
  153. SetBitAttribute(DstIsIntOnlyIndex, dstIsIntOnly);
  154. SetBitAttribute(DstIsNumberOnlyIndex, dstIsNumberOnly);
  155. }
  156. };
  157. enum class PathDependentRelationship : uint8
  158. {
  159. Equal,
  160. NotEqual,
  161. GreaterThanOrEqual,
  162. GreaterThan,
  163. LessThanOrEqual,
  164. LessThan
  165. };
  166. class PathDependentInfo
  167. {
  168. private:
  169. Value *leftValue, *rightValue;
  170. int32 rightConstantValue;
  171. PathDependentRelationship relationship;
  172. public:
  173. PathDependentInfo(const PathDependentRelationship relationship, Value *const leftValue, Value *const rightValue)
  174. : leftValue(leftValue), rightValue(rightValue), rightConstantValue(0), relationship(relationship)
  175. {
  176. Assert(leftValue);
  177. Assert(rightValue);
  178. }
  179. PathDependentInfo(
  180. const PathDependentRelationship relationship,
  181. Value *const leftValue,
  182. Value *const rightValue,
  183. const int32 rightConstantValue)
  184. : leftValue(leftValue), rightValue(rightValue), rightConstantValue(rightConstantValue), relationship(relationship)
  185. {
  186. Assert(leftValue);
  187. }
  188. public:
  189. bool HasInfo() const
  190. {
  191. return !!leftValue;
  192. }
  193. PathDependentRelationship Relationship() const
  194. {
  195. Assert(HasInfo());
  196. return relationship;
  197. }
  198. Value *LeftValue() const
  199. {
  200. Assert(HasInfo());
  201. return leftValue;
  202. }
  203. Value *RightValue() const
  204. {
  205. Assert(HasInfo());
  206. return rightValue;
  207. }
  208. int32 RightConstantValue() const
  209. {
  210. Assert(!RightValue());
  211. return rightConstantValue;
  212. }
  213. };
  214. class PathDependentInfoToRestore
  215. {
  216. private:
  217. ValueInfo *leftValueInfo, *rightValueInfo;
  218. public:
  219. PathDependentInfoToRestore() : leftValueInfo(nullptr), rightValueInfo(nullptr)
  220. {
  221. }
  222. PathDependentInfoToRestore(ValueInfo *const leftValueInfo, ValueInfo *const rightValueInfo)
  223. : leftValueInfo(leftValueInfo), rightValueInfo(rightValueInfo)
  224. {
  225. }
  226. public:
  227. ValueInfo *LeftValueInfo() const
  228. {
  229. return leftValueInfo;
  230. }
  231. ValueInfo *RightValueInfo() const
  232. {
  233. return rightValueInfo;
  234. }
  235. public:
  236. void Clear()
  237. {
  238. leftValueInfo = nullptr;
  239. rightValueInfo = nullptr;
  240. }
  241. };
  242. typedef JsUtil::List<IR::Opnd *, JitArenaAllocator> OpndList;
  243. typedef JsUtil::BaseDictionary<Sym *, ValueInfo *, JitArenaAllocator> SymToValueInfoMap;
  244. typedef JsUtil::BaseDictionary<SymID, IR::Instr *, JitArenaAllocator> SymIdToInstrMap;
  245. typedef JsUtil::BaseHashSet<Value *, JitArenaAllocator, PowerOf2SizePolicy, ValueNumber> ValueSetByValueNumber;
  246. typedef JsUtil::BaseDictionary<SymID, StackSym *, JitArenaAllocator> SymIdToStackSymMap;
  247. typedef JsUtil::Pair<ValueNumber, ValueNumber> ValueNumberPair;
  248. typedef JsUtil::BaseDictionary<ValueNumberPair, Value *, JitArenaAllocator> ValueNumberPairToValueMap;
  249. namespace JsUtil
  250. {
  251. template <>
  252. inline void ClearValue<StackLiteralInitFldData>::Clear(StackLiteralInitFldData* value)
  253. {
  254. #if DBG
  255. value->propIds = nullptr;
  256. value->currentInitFldCount = (uint)-1;
  257. #endif
  258. }
  259. }
  260. typedef JsUtil::BaseDictionary<IntConstType, StackSym *, JitArenaAllocator> IntConstantToStackSymMap;
  261. typedef JsUtil::BaseDictionary<int32, Value *, JitArenaAllocator> IntConstantToValueMap;
  262. typedef JsUtil::BaseDictionary<int64, Value *, JitArenaAllocator> Int64ConstantToValueMap;
  263. typedef JsUtil::BaseDictionary<Js::Var, Value *, JitArenaAllocator> AddrConstantToValueMap;
  264. typedef JsUtil::BaseDictionary<Js::InternalString, Value *, JitArenaAllocator> StringConstantToValueMap;
  265. class JsArrayKills
  266. {
  267. private:
  268. union
  269. {
  270. struct
  271. {
  272. bool killsAllArrays : 1;
  273. bool killsArraysWithNoMissingValues : 1;
  274. bool killsObjectArraysWithNoMissingValues : 1;
  275. bool killsNativeArrays : 1;
  276. bool killsArrayHeadSegments : 1;
  277. bool killsArrayHeadSegmentLengths : 1;
  278. bool killsArrayLengths : 1;
  279. };
  280. byte bits;
  281. };
  282. public:
  283. JsArrayKills() : bits(0)
  284. {
  285. }
  286. private:
  287. JsArrayKills(const byte bits) : bits(bits)
  288. {
  289. }
  290. public:
  291. bool KillsAllArrays() const { return killsAllArrays; }
  292. void SetKillsAllArrays() { killsAllArrays = true; }
  293. bool KillsArraysWithNoMissingValues() const { return killsArraysWithNoMissingValues; }
  294. void SetKillsArraysWithNoMissingValues() { killsArraysWithNoMissingValues = true; }
  295. bool KillsObjectArraysWithNoMissingValues() const { return killsObjectArraysWithNoMissingValues; }
  296. void SetKillsObjectArraysWithNoMissingValues() { killsObjectArraysWithNoMissingValues = true; }
  297. bool KillsNativeArrays() const { return killsNativeArrays; }
  298. void SetKillsNativeArrays() { killsNativeArrays = true; }
  299. bool KillsArrayHeadSegments() const { return killsArrayHeadSegments; }
  300. void SetKillsArrayHeadSegments() { killsArrayHeadSegments = true; }
  301. bool KillsArrayHeadSegmentLengths() const { return killsArrayHeadSegmentLengths; }
  302. void SetKillsArrayHeadSegmentLengths() { killsArrayHeadSegmentLengths = true; }
  303. bool KillsTypedArrayHeadSegmentLengths() const { return KillsAllArrays(); }
  304. bool KillsArrayLengths() const { return killsArrayLengths; }
  305. void SetKillsArrayLengths() { killsArrayLengths = true; }
  306. public:
  307. bool KillsValueType(const ValueType valueType) const
  308. {
  309. Assert(valueType.IsArrayOrObjectWithArray() || valueType.IsOptimizedVirtualTypedArray());
  310. return
  311. killsAllArrays ||
  312. (valueType.IsArrayOrObjectWithArray() &&
  313. (
  314. (killsArraysWithNoMissingValues && valueType.HasNoMissingValues()) ||
  315. (killsObjectArraysWithNoMissingValues && !valueType.IsArray() && valueType.HasNoMissingValues()) ||
  316. (killsNativeArrays && !valueType.HasVarElements())
  317. )
  318. );
  319. }
  320. bool AreSubsetOf(const JsArrayKills &other) const
  321. {
  322. return (bits & other.bits) == bits;
  323. }
  324. JsArrayKills Merge(const JsArrayKills &other)
  325. {
  326. return bits | other.bits;
  327. }
  328. };
  329. class InvariantBlockBackwardIterator
  330. {
  331. private:
  332. GlobOpt *const globOpt;
  333. BasicBlock *const exclusiveEndBlock;
  334. StackSym *const invariantSym;
  335. const ValueNumber invariantSymValueNumber;
  336. BasicBlock *block;
  337. Value *invariantSymValue;
  338. BVSparse<JitArenaAllocator> blockBV;
  339. bool followFlow;
  340. #if DBG
  341. BasicBlock *const inclusiveEndBlock;
  342. #endif
  343. bool UpdatePredBlockBV();
  344. public:
  345. InvariantBlockBackwardIterator(GlobOpt *const globOpt, BasicBlock *const exclusiveBeginBlock, BasicBlock *const inclusiveEndBlock, StackSym *const invariantSym, const ValueNumber invariantSymValueNumber = InvalidValueNumber, bool followFlow = false);
  346. public:
  347. bool IsValid() const;
  348. void MoveNext();
  349. BasicBlock *Block() const;
  350. Value *InvariantSymValue() const;
  351. PREVENT_ASSIGN(InvariantBlockBackwardIterator);
  352. };
  353. class FlowGraph;
  354. class GlobOpt
  355. {
  356. private:
  357. class AddSubConstantInfo;
  358. class ArrayLowerBoundCheckHoistInfo;
  359. class ArrayUpperBoundCheckHoistInfo;
  360. class ArraySrcOpt;
  361. class PRE;
  362. friend BackwardPass;
  363. #if DBG
  364. friend class ObjectTempVerify;
  365. #endif
  366. friend class GlobOptBlockData;
  367. friend class BasicBlock;
  368. private:
  369. SparseArray<Value> * byteCodeConstantValueArray;
  370. // Global bitvectors
  371. BVSparse<JitArenaAllocator> * byteCodeConstantValueNumbersBv;
  372. // Global bitvectors
  373. IntConstantToStackSymMap * intConstantToStackSymMap;
  374. IntConstantToValueMap* intConstantToValueMap;
  375. Int64ConstantToValueMap* int64ConstantToValueMap;
  376. AddrConstantToValueMap * addrConstantToValueMap;
  377. StringConstantToValueMap * stringConstantToValueMap;
  378. #if DBG
  379. // We can still track the finished stack literal InitFld lexically.
  380. BVSparse<JitArenaAllocator> * finishedStackLiteralInitFld;
  381. #endif
  382. BVSparse<JitArenaAllocator> * byteCodeUses;
  383. BVSparse<JitArenaAllocator> * tempBv; // Bit vector for temporary uses
  384. BVSparse<JitArenaAllocator> * objectTypeSyms;
  385. BVSparse<JitArenaAllocator> * prePassCopyPropSym; // Symbols that were copy prop'd during loop prepass
  386. // Symbols that refer to slots in the stack frame. We still use currentBlock->liveFields to tell us
  387. // which of these slots are live; this bit-vector just identifies which entries in liveFields represent
  388. // slots, so we can zero them all out quickly.
  389. BVSparse<JitArenaAllocator> * slotSyms;
  390. PropertySym * propertySymUse;
  391. BVSparse<JitArenaAllocator> * lengthEquivBv;
  392. BVSparse<JitArenaAllocator> * argumentsEquivBv;
  393. BVSparse<JitArenaAllocator> * callerEquivBv;
  394. BVSparse<JitArenaAllocator> * changedSymsAfterIncBailoutCandidate;
  395. BVSparse<JitArenaAllocator> * auxSlotPtrSyms;
  396. JitArenaAllocator * alloc;
  397. JitArenaAllocator * tempAlloc;
  398. Func * func;
  399. ValueNumber currentValue;
  400. BasicBlock * currentBlock;
  401. Region * currentRegion;
  402. IntOverflowDoesNotMatterRange *intOverflowDoesNotMatterRange;
  403. Loop * prePassLoop;
  404. Loop * rootLoopPrePass;
  405. uint instrCountSinceLastCleanUp;
  406. SymIdToInstrMap * prePassInstrMap;
  407. SymID maxInitialSymID;
  408. bool isCallHelper: 1;
  409. bool intOverflowCurrentlyMattersInRange : 1;
  410. bool ignoredIntOverflowForCurrentInstr : 1;
  411. bool ignoredNegativeZeroForCurrentInstr : 1;
  412. bool inInlinedBuiltIn : 1;
  413. bool isRecursiveCallOnLandingPad : 1;
  414. bool updateInductionVariableValueNumber : 1;
  415. bool isPerformingLoopBackEdgeCompensation : 1;
  416. bool doTypeSpec : 1;
  417. bool doAggressiveIntTypeSpec : 1;
  418. bool doAggressiveMulIntTypeSpec : 1;
  419. bool doDivIntTypeSpec : 1;
  420. bool doLossyIntTypeSpec : 1;
  421. bool doFloatTypeSpec : 1;
  422. bool doArrayCheckHoist : 1;
  423. bool doArrayMissingValueCheckHoist : 1;
  424. bool doArraySegmentHoist : 1;
  425. bool doJsArraySegmentHoist : 1;
  426. bool doArrayLengthHoist : 1;
  427. bool doEliminateArrayAccessHelperCall : 1;
  428. bool doTrackRelativeIntBounds : 1;
  429. bool doBoundCheckElimination : 1;
  430. bool doBoundCheckHoist : 1;
  431. bool doLoopCountBasedBoundCheckHoist : 1;
  432. bool doPowIntIntTypeSpec : 1;
  433. bool isAsmJSFunc : 1;
  434. bool doTagChecks : 1;
  435. OpndList * noImplicitCallUsesToInsert;
  436. ValueSetByValueNumber * valuesCreatedForClone;
  437. ValueNumberPairToValueMap *valuesCreatedForMerge;
  438. #if DBG
  439. BVSparse<JitArenaAllocator> * byteCodeUsesBeforeOpt;
  440. #endif
  441. public:
  442. GlobOpt(Func * func);
  443. void Optimize();
  444. // Function used by the backward pass as well.
  445. // GlobOptBailout.cpp
  446. static void TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym);
  447. // GlobOptFields.cpp
  448. void ProcessFieldKills(IR::Instr *instr, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt);
  449. IR::ByteCodeUsesInstr * ConvertToByteCodeUses(IR::Instr * isntr);
  450. bool GetIsAsmJSFunc()const{ return isAsmJSFunc; };
  451. bool IsLoopPrePass() const { return this->prePassLoop != nullptr; }
  452. private:
  453. void OptBlock(BasicBlock *block);
  454. void BackwardPass(Js::Phase tag);
  455. void ForwardPass();
  456. void OptLoops(Loop *loop);
  457. void TailDupPass();
  458. bool TryTailDup(IR::BranchInstr *tailBranch);
  459. void FieldPRE(Loop *loop);
  460. void SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym);
  461. PRECandidates * FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc);
  462. void CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock);
  463. void CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  464. void TryReplaceLdLen(IR::Instr *& instr);
  465. IR::Instr * OptInstr(IR::Instr *&instr, bool* isInstrCleared);
  466. Value* OptDst(IR::Instr **pInstr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  467. void CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val);
  468. Value * OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef = nullptr, IR::IndirOpnd *parentIndirOpnd = nullptr);
  469. void MarkArgumentsUsedForBranch(IR::Instr *inst);
  470. bool OptTagChecks(IR::Instr *instr);
  471. void TryOptimizeInstrWithFixedDataProperty(IR::Instr * * const pInstr);
  472. bool CheckIfPropOpEmitsTypeCheck(IR::Instr *instr, IR::PropertySymOpnd *opnd);
  473. IR::PropertySymOpnd * CreateOpndForTypeCheckOnly(IR::PropertySymOpnd* opnd, Func* func);
  474. bool FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock* block = nullptr, bool updateExistingValue = false, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr);
  475. IR::Instr * SetTypeCheckBailOut(IR::Opnd *opnd, IR::Instr *instr, BailOutInfo *bailOutInfo);
  476. void OptArguments(IR::Instr *Instr);
  477. void TrackInstrsForScopeObjectRemoval(IR::Instr * instr);
  478. bool AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const;
  479. Value * ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val);
  480. Value * ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal);
  481. ValueType GetPrepassValueTypeForDst(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool const isValueInfoPreciseRef = false, bool const isSafeToTransferInPrepass = false) const;
  482. bool IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue, bool * canTransferValueNumberToDst = nullptr) const;
  483. bool IsPrepassSrcValueInfoPrecise(IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool * canTransferValueNumberToDst = nullptr) const;
  484. bool IsSafeToTransferInPrepass(StackSym * const sym, ValueInfo *const srcValueInfo) const;
  485. bool SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const copySym, Value *const value) const;
  486. Value * CreateDstUntransferredIntValue(const int32 min, const int32 max, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  487. Value * CreateDstUntransferredValue(const ValueType desiredValueType, IR::Instr *const instr, Value *const src1Value, Value *const src2Value);
  488. Value * ValueNumberTransferDst(IR::Instr *const instr, Value *src1Val);
  489. bool IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue);
  490. Value * ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val);
  491. IR::Opnd * CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd = nullptr);
  492. IR::Opnd * CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd = nullptr);
  493. ValueNumber NewValueNumber();
  494. Value * NewValue(ValueInfo *const valueInfo);
  495. Value * NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo);
  496. Value * CopyValue(Value const *const value);
  497. Value * CopyValue(Value const *const value, const ValueNumber valueNumber);
  498. Value * NewGenericValue(const ValueType valueType);
  499. Value * NewGenericValue(const ValueType valueType, IR::Opnd *const opnd);
  500. Value * NewGenericValue(const ValueType valueType, Sym *const sym);
  501. Value * GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  502. Value * GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd = nullptr);
  503. Value * NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable);
  504. Value * NewInt64ConstantValue(const int64 intConst, IR::Instr * instr);
  505. ValueInfo * NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout);
  506. ValueInfo * NewIntRangeValueInfo(const ValueInfo *const originalValueInfo, const int32 min, const int32 max) const;
  507. Value * NewIntRangeValue(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  508. IntBoundedValueInfo * NewIntBoundedValueInfo(const ValueInfo *const originalValueInfo, const IntBounds *const bounds) const;
  509. Value * NewIntBoundedValue(const ValueType valueType, const IntBounds *const bounds, const bool wasNegativeZeroPreventedByBailout, IR::Opnd *const opnd = nullptr);
  510. Value * NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd = nullptr);
  511. Value * GetVarConstantValue(IR::AddrOpnd *addrOpnd);
  512. Value * NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString);
  513. Value * HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value);
  514. Value * NewFixedFunctionValue(Js::JavascriptFunction *functionValue, IR::AddrOpnd *addrOpnd);
  515. StackSym * GetTaggedIntConstantStackSym(const int32 intConstantValue) const;
  516. StackSym * GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const;
  517. Sym * SetSymStore(ValueInfo *valueInfo, Sym *sym);
  518. void SetSymStoreDirect(ValueInfo *valueInfo, Sym *sym);
  519. IR::Instr * TypeSpecialization(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  520. IR::Instr * GetExtendedArg(IR::Instr *instr);
  521. void OptNewScObject(IR::Instr** instrPtr, Value* srcVal);
  522. template <typename T>
  523. bool OptConstFoldBinaryWasm(IR::Instr * *pInstr, const Value* src1, const Value* src2, Value **pDstVal);
  524. template <typename T>
  525. IR::Opnd* ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal);
  526. bool OptConstFoldBinary(IR::Instr * *pInstr, const IntConstantBounds &src1IntConstantBounds, const IntConstantBounds &src2IntConstantBounds, Value **pDstVal);
  527. bool OptConstFoldUnary(IR::Instr * *pInstr, const int32 intConstantValue, const bool isUsingOriginalSrc1Value, Value **pDstVal);
  528. bool OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *vInfo);
  529. bool CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2Val, Js::Var src1Var, Js::Var src2Var, bool *result);
  530. bool OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal);
  531. Js::Var GetConstantVar(IR::Opnd *opnd, Value *val);
  532. bool IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val);
  533. bool IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst);
  534. bool IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val = nullptr);
  535. bool TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal);
  536. bool TypeSpecializeIntUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, int32 min, int32 max, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool skipDst = false);
  537. bool TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst = false);
  538. void TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef);
  539. void TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal);
  540. void TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal);
  541. bool TypeSpecializeUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef, bool *const forceInvariantHoistingRef);
  542. bool TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef);
  543. bool TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst = false);
  544. bool TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal);
  545. void TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal);
  546. bool TypeSpecializeLdLen(IR::Instr * *const instrRef, Value * *const src1ValueRef, Value * *const dstValueRef, bool *const forceInvariantHoistingRef);
  547. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  548. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  549. void TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo = nullptr);
  550. bool TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal);
  551. bool TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal);
  552. bool TypeSpecializeStElem(IR::Instr **pInstr, Value *src1Val, Value **pDstVal);
  553. bool ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd);
  554. ValueType GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize);
  555. bool IsInstrInvalidForMemOp(IR::Instr *, Loop *, Value *, Value *);
  556. bool CollectMemOpInfo(IR::Instr *, IR::Instr *, Value *, Value *);
  557. bool CollectMemOpStElementI(IR::Instr *, Loop *);
  558. bool CollectMemsetStElementI(IR::Instr *, Loop *);
  559. bool CollectMemcopyStElementI(IR::Instr *, Loop *);
  560. bool CollectMemOpLdElementI(IR::Instr *, Loop *);
  561. bool CollectMemcopyLdElementI(IR::Instr *, Loop *);
  562. SymID GetVarSymID(StackSym *);
  563. const InductionVariable* GetInductionVariable(SymID, Loop *);
  564. bool IsSymIDInductionVariable(SymID, Loop *);
  565. bool IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd);
  566. void ProcessMemOp();
  567. bool InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, struct MemSetEmitData* emitData, bool& errorInInstr);
  568. bool InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, struct MemCopyEmitData* emitData, bool& errorInInstr);
  569. bool ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) struct MemOpEmitData** emitData, int& iEmitData);
  570. void EmitMemop(Loop * loop, LoopCount *loopCount, const struct MemOpEmitData* emitData);
  571. IR::Opnd* GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr = nullptr);
  572. IR::RegOpnd* GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr = nullptr);
  573. LoopCount* GetOrGenerateLoopCountForMemOp(Loop *loop);
  574. IR::Instr* FindUpperBoundsCheckInstr(IR::Instr* instr);
  575. IR::Instr* FindArraySegmentLoadInstr(IR::Instr* instr);
  576. void RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block);
  577. void GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType);
  578. bool HasMemOp(Loop * loop);
  579. private:
  580. void ChangeValueType(BasicBlock *const block, Value *const value, const ValueType newValueType, const bool preserveSubclassInfo, const bool allowIncompatibleType = false) const;
  581. void ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType = false, const bool compensated = false) const;
  582. bool AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const;
  583. private:
  584. #if DBG
  585. void VerifyArrayValueInfoForTracking(const ValueInfo *const valueInfo, const bool isJsArray, const BasicBlock *const block, const bool ignoreKnownImplicitCalls = false) const;
  586. #endif
  587. void TrackNewValueForKills(Value *const value);
  588. void DoTrackNewValueForKills(Value *const value);
  589. void TrackCopiedValueForKills(Value *const value);
  590. void DoTrackCopiedValueForKills(Value *const value);
  591. void TrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  592. void DoTrackMergedValueForKills(Value *const value, GlobOptBlockData *const blockData, BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const;
  593. void TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const;
  594. void ProcessValueKills(IR::Instr *const instr);
  595. void ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData);
  596. void ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData);
  597. bool NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const;
  598. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func);
  599. IR::Instr* CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func* func);
  600. IR::Instr* AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset);
  601. void OptArraySrc(IR::Instr **const instrRef, Value ** src1Val, Value ** src2Val);
  602. void OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val);
  603. private:
  604. void TrackIntSpecializedAddSubConstant(IR::Instr *const instr, const AddSubConstantInfo *const addSubConstantInfo, Value *const dstValue, const bool updateSourceBounds);
  605. void CloneBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  606. void MergeBoundCheckHoistBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock, GlobOptBlockData *const fromData);
  607. void DetectUnknownChangesToInductionVariables(GlobOptBlockData *const blockData);
  608. void SetInductionVariableValueNumbers(GlobOptBlockData *const blockData);
  609. void FinalizeInductionVariables(Loop *const loop, GlobOptBlockData *const headerData);
  610. void InvalidateInductionVariables(IR::Instr * instr);
  611. enum class SymBoundType {OFFSET, VALUE, UNKNOWN};
  612. SymBoundType DetermineSymBoundOffsetOrValueRelativeToLandingPad(StackSym *const sym, const bool landingPadValueIsLowerBound, ValueInfo *const valueInfo, const IntBounds *const bounds, GlobOptBlockData *const landingPadGlobOptBlockData, int *const boundOffsetOrValueRef);
  613. private:
  614. void DetermineDominatingLoopCountableBlock(Loop *const loop, BasicBlock *const headerBlock);
  615. void DetermineLoopCount(Loop *const loop);
  616. void GenerateLoopCount(Loop *const loop, LoopCount *const loopCount);
  617. void GenerateLoopCountPlusOne(Loop *const loop, LoopCount *const loopCount);
  618. void GenerateSecondaryInductionVariableBound(Loop *const loop, StackSym *const inductionVariableSym, LoopCount *const loopCount, const int maxMagnitudeChange, const bool needsMagnitudeAdjustment, StackSym *const boundSym);
  619. private:
  620. void DetermineArrayBoundCheckHoistability(bool needLowerBoundCheck, bool needUpperBoundCheck, ArrayLowerBoundCheckHoistInfo &lowerHoistInfo, ArrayUpperBoundCheckHoistInfo &upperHoistInfo, const bool isJsArray, StackSym *const indexSym, Value *const indexValue, const IntConstantBounds &indexConstantBounds, StackSym *const headSegmentLengthSym, Value *const headSegmentLengthValue, const IntConstantBounds &headSegmentLengthConstantBounds, Loop *const headSegmentLengthInvariantLoop, bool &failedToUpdateCompatibleLowerBoundCheck, bool &failedToUpdateCompatibleUpperBoundCheck);
  621. private:
  622. void CaptureNoImplicitCallUses(IR::Opnd *opnd, const bool usesNoMissingValuesInfo, IR::Instr *const includeCurrentInstr = nullptr);
  623. void InsertNoImplicitCallUses(IR::Instr *const instr);
  624. void ProcessNoImplicitCallArrayUses(IR::RegOpnd * baseOpnd, IR::ArrayRegOpnd * baseArrayOpnd, IR::Instr * instr, bool isLikelyJsArray, bool useNoMissingValues);
  625. void PrepareLoopArrayCheckHoist();
  626. public:
  627. JsArrayKills CheckJsArrayKills(IR::Instr *const instr);
  628. GlobOptBlockData const * CurrentBlockData() const;
  629. GlobOptBlockData * CurrentBlockData();
  630. void CommitCapturedValuesCandidate();
  631. private:
  632. bool IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr);
  633. bool NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const;
  634. private:
  635. bool PrepareForIgnoringIntOverflow(IR::Instr *const instr);
  636. void VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr);
  637. void PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val);
  638. void ProcessKills(IR::Instr *instr);
  639. void InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData);
  640. void InsertValueCompensation(BasicBlock *const predecessor, BasicBlock *const successor, const SymToValueInfoMap *symsRequiringCompensationToMergedValueInfoMap);
  641. IR::Instr * ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val);
  642. void ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IR::Instr* insertBeforeInstr = nullptr);
  643. IR::Instr * ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *val, bool needsUpdate);
  644. void ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr = nullptr);
  645. void ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block);
  646. void ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind = IR::BailOutInvalid, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  647. IR::Instr * ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy);
  648. IR::Instr * ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind);
  649. IR::Instr * ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir,
  650. IRType toType, IR::BailOutKind bailOutKind, bool lossy = false, IR::Instr *insertBeforeInstr = nullptr);
  651. IR::Instr * ToTypeSpecIndex(IR::Instr * instr, IR::RegOpnd * opnd, IR::IndirOpnd * indir);
  652. void ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block);
  653. void ToVarStackSym(StackSym *varSym, BasicBlock *block);
  654. void ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  655. void ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  656. void ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block);
  657. void OptConstFoldBr(bool test, IR::Instr *instr, Value * intTypeSpecSrc1Val = nullptr, Value * intTypeSpecSrc2Val = nullptr);
  658. void PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32 * pNewMax);
  659. void PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  660. int32 min2, int32 max2, int32 *pNewMin, int32 *pNewMax);
  661. bool OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives);
  662. bool OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef = nullptr);
  663. bool OptDstIsInvariant(IR::RegOpnd *dst);
  664. bool OptIsInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv, const bool forceInvariantHoisting = false);
  665. void OptHoistInvariant(IR::Instr *instr, BasicBlock *block, Loop *loop, Value *dstVal, Value *const src1Val, Value *const src2Value,
  666. bool isNotTypeSpecConv, bool lossy = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  667. bool TryHoistInvariant(IR::Instr *instr, BasicBlock *block, Value *dstVal, Value *src1Val, Value *src2Val, bool isNotTypeSpecConv,
  668. const bool lossy = false, const bool forceInvariantHoisting = false, IR::BailOutKind bailoutKind = IR::BailOutInvalid);
  669. void HoistInvariantValueInfo(ValueInfo *const invariantValueInfoToHoist, Value *const valueToUpdate, BasicBlock *const targetBlock);
  670. void OptHoistUpdateValueType(Loop* loop, IR::Instr* instr, IR::Opnd** srcOpndPtr, Value *const srcVal);
  671. bool IsNonNumericRegOpnd(IR::RegOpnd *opnd, bool inGlobOpt, bool *isSafeToTransferInPrepass = nullptr) const;
  672. public:
  673. static bool IsTypeSpecPhaseOff(Func const * func);
  674. static bool DoAggressiveIntTypeSpec(Func const * func);
  675. static bool DoLossyIntTypeSpec(Func const * func);
  676. static bool DoFloatTypeSpec(Func const * func);
  677. static bool DoStringTypeSpec(Func const * func);
  678. static bool DoArrayCheckHoist(Func const * const func);
  679. static bool DoArrayMissingValueCheckHoist(Func const * const func);
  680. static bool DoArraySegmentHoist(const ValueType baseValueType, Func const * const func);
  681. static bool DoArrayLengthHoist(Func const * const func);
  682. static bool DoEliminateArrayAccessHelperCall(Func* func);
  683. static bool DoTypedArrayTypeSpec(Func const * func);
  684. static bool DoNativeArrayTypeSpec(Func const * func);
  685. static bool IsSwitchOptEnabled(Func const * func);
  686. static bool IsSwitchOptEnabledForIntTypeSpec(Func const * func);
  687. static bool DoInlineArgsOpt(Func const * func);
  688. static bool IsPREInstrCandidateLoad(Js::OpCode opcode);
  689. static bool IsPREInstrSequenceCandidateLoad(Js::OpCode opcode);
  690. static bool IsPREInstrCandidateStore(Js::OpCode opcode);
  691. static bool ImplicitCallFlagsAllowOpts(Loop * loop);
  692. static bool ImplicitCallFlagsAllowOpts(Func const * func);
  693. private:
  694. bool DoConstFold() const;
  695. bool DoTypeSpec() const;
  696. bool DoAggressiveIntTypeSpec() const;
  697. bool DoAggressiveMulIntTypeSpec() const;
  698. bool DoDivIntTypeSpec() const;
  699. bool DoLossyIntTypeSpec() const;
  700. bool DoFloatTypeSpec() const;
  701. bool DoStringTypeSpec() const { return GlobOpt::DoStringTypeSpec(this->func); }
  702. bool DoArrayCheckHoist() const;
  703. bool DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr = nullptr) const;
  704. bool DoArrayMissingValueCheckHoist() const;
  705. bool DoArraySegmentHoist(const ValueType baseValueType) const;
  706. bool DoTypedArraySegmentLengthHoist(Loop *const loop) const;
  707. bool DoArrayLengthHoist() const;
  708. bool DoEliminateArrayAccessHelperCall() const;
  709. bool DoTypedArrayTypeSpec() const { return GlobOpt::DoTypedArrayTypeSpec(this->func); }
  710. bool DoNativeArrayTypeSpec() const { return GlobOpt::DoNativeArrayTypeSpec(this->func); }
  711. bool DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType);
  712. bool IsSwitchOptEnabled() const { return GlobOpt::IsSwitchOptEnabled(this->func); }
  713. bool IsSwitchOptEnabledForIntTypeSpec() const { return GlobOpt::IsSwitchOptEnabledForIntTypeSpec(this->func); }
  714. bool DoPathDependentValues() const;
  715. bool DoTrackRelativeIntBounds() const;
  716. bool DoBoundCheckElimination() const;
  717. bool DoBoundCheckHoist() const;
  718. bool DoLoopCountBasedBoundCheckHoist() const;
  719. bool DoPowIntIntTypeSpec() const;
  720. bool DoTagChecks() const;
  721. template <class Fn>
  722. void TrackByteCodeUsesForInstrAddedInOptInstr(IR::Instr * trackByteCodeUseOnInstr, Fn fn)
  723. {
  724. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  725. PropertySym * currentPropertySymUse = this->propertySymUse;
  726. PropertySym * tempPropertySymUse = NULL;
  727. this->byteCodeUses = NULL;
  728. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  729. #if DBG
  730. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  731. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  732. #endif
  733. this->propertySymUse = NULL;
  734. GlobOpt::TrackByteCodeSymUsed(trackByteCodeUseOnInstr, tempByteCodeUse, &tempPropertySymUse);
  735. fn();
  736. this->byteCodeUses = currentBytecodeUses;
  737. this->propertySymUse = currentPropertySymUse;
  738. #if DBG
  739. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  740. #endif
  741. }
  742. private:
  743. // GlobOptBailout.cpp
  744. bool MayNeedBailOut(Loop * loop) const;
  745. static void TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySymUse);
  746. static void TrackByteCodeSymUsed(IR::RegOpnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  747. static void TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed);
  748. void CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture);
  749. void CaptureValuesFromScratch(
  750. BasicBlock * block,
  751. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter, SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter,
  752. BVSparse<JitArenaAllocator>* argsToCapture);
  753. void CaptureValuesIncremental(
  754. BasicBlock * block,
  755. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  756. SListBase<CopyPropSyms>::EditingIterator & bailOutCopyPropIter, BVSparse<JitArenaAllocator>* argsToCapture);
  757. void CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter);
  758. void CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator);
  759. void CaptureByteCodeSymUses(IR::Instr * instr);
  760. IR::ByteCodeUsesInstr * InsertByteCodeUses(IR::Instr * instr, bool includeDef = false);
  761. void ProcessInlineeEnd(IR::Instr * instr);
  762. void TrackCalls(IR::Instr * instr);
  763. void RecordInlineeFrameInfo(IR::Instr* instr);
  764. void ClearInlineeFrameInfo(IR::Instr* instr);
  765. void EndTrackCall(IR::Instr * instr);
  766. void EndTrackingOfArgObjSymsForInlinee();
  767. void FillBailOutInfo(BasicBlock *block, BailOutInfo *bailOutInfo);
  768. void FillBailOutInfo(BasicBlock *block, _In_ IR::Instr * instr);
  769. static void MarkNonByteCodeUsed(IR::Instr * instr);
  770. static void MarkNonByteCodeUsed(IR::Opnd * opnd);
  771. void GenerateLazyBailOut(IR::Instr *& instr);
  772. bool IsLazyBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, bool isHoisted) const;
  773. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const;
  774. bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val,
  775. BasicBlock const * block, bool hasLiveFields,
  776. bool mayNeedImplicitCallBailOut, bool isForwardPass, bool mayNeedLazyBailOut = false) const;
  777. static bool IsTypeCheckProtected(const IR::Instr * instr);
  778. static bool MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val);
  779. static bool MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val);
  780. void GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind);
  781. public:
  782. void GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind);
  783. private:
  784. IR::Instr * EnsureBailTarget(Loop * loop);
  785. // GlobOptFields.cpp
  786. void ProcessFieldKills(IR::Instr * instr);
  787. void KillLiveFields(StackSym * stackSym, BVSparse<JitArenaAllocator> * bv);
  788. void KillLiveFields(PropertySym * propertySym, BVSparse<JitArenaAllocator> * bv);
  789. void KillLiveFields(BVSparse<JitArenaAllocator> *const fieldsToKill, BVSparse<JitArenaAllocator> *const bv) const;
  790. void KillLiveElems(IR::IndirOpnd * indirOpnd, IR::Opnd * valueOpnd, BVSparse<JitArenaAllocator> * bv, bool inGlobOpt, Func *func);
  791. void KillAllFields(BVSparse<JitArenaAllocator> * bv);
  792. void SetAnyPropertyMayBeWrittenTo();
  793. void AddToPropertiesWrittenTo(Js::PropertyId propertyId);
  794. bool DoFieldCopyProp() const;
  795. bool DoFieldCopyProp(Loop * loop) const;
  796. bool DoFunctionFieldCopyProp() const;
  797. bool DoObjTypeSpec() const;
  798. bool DoObjTypeSpec(Loop * loop) const;
  799. bool DoFieldRefOpts() const { return DoObjTypeSpec(); }
  800. bool DoFieldRefOpts(Loop * loop) const { return DoObjTypeSpec(loop); }
  801. bool DoFieldOpts(Loop * loop) const;
  802. bool DoFieldPRE() const;
  803. bool DoFieldPRE(Loop *loop) const;
  804. Value * CreateFieldSrcValue(PropertySym * sym, PropertySym * originalSym, IR::Opnd **ppOpnd, IR::Instr * instr);
  805. bool NeedBailOnImplicitCallWithFieldOpts(Loop *loop, bool hasLiveFields) const;
  806. IR::Instr * EnsureDisableImplicitCallRegion(Loop * loop);
  807. void UpdateObjPtrValueType(IR::Opnd * opnd, IR::Instr * instr);
  808. bool TrackArgumentsObject();
  809. void CannotAllocateArgumentsObjectOnStack(Func * curFunc);
  810. #if DBG
  811. bool IsPropertySymId(SymID symId) const;
  812. static void AssertCanCopyPropOrCSEFieldLoad(IR::Instr * instr);
  813. void EmitIntRangeChecks(IR::Instr* instr);
  814. void EmitIntRangeChecks(IR::Instr* instr, IR::Opnd* opnd);
  815. #endif
  816. StackSym * EnsureObjectTypeSym(StackSym * objectSym);
  817. PropertySym * EnsurePropertyWriteGuardSym(PropertySym * propertySym);
  818. void PreparePropertySymForTypeCheckSeq(PropertySym *propertySym);
  819. bool IsPropertySymPreparedForTypeCheckSeq(PropertySym *propertySym);
  820. bool PreparePropertySymOpndForTypeCheckSeq(IR::PropertySymOpnd *propertySymOpnd, IR::Instr * instr, Loop *loop);
  821. static bool AreTypeSetsIdentical(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  822. static bool IsSubsetOf(Js::EquivalentTypeSet * leftTypeSet, Js::EquivalentTypeSet * rightTypeSet);
  823. static bool CompareCurrentTypesWithExpectedTypes(JsTypeValueInfo *valueInfo, IR::PropertySymOpnd * propertySymOpnd);
  824. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  825. bool CheckIfInstrInTypeCheckSeqEmitsTypeCheck(IR::Instr* instr, IR::PropertySymOpnd *opnd);
  826. template<bool makeChanges>
  827. bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd, BasicBlock* block, bool updateExistingValue, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr, bool *isObjTypeChecked = nullptr);
  828. StackSym * EnsureAuxSlotPtrSym(IR::PropertySymOpnd *opnd);
  829. void KillAuxSlotPtrSyms(IR::PropertySymOpnd *opnd, BasicBlock *block, bool isObjTypeSpecialized);
  830. template<class Fn>
  831. bool MapObjectHeaderInlinedTypeSymsUntil(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId, Fn fn);
  832. void KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid);
  833. bool HasLiveObjectHeaderInlinedTypeSym(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid);
  834. void ValueNumberObjectType(IR::Opnd *dstOpnd, IR::Instr *instr);
  835. void SetSingleTypeOnObjectTypeValue(Value* value, const JITTypeHolder type);
  836. void SetTypeSetOnObjectTypeValue(Value* value, Js::EquivalentTypeSet* typeSet);
  837. void UpdateObjectTypeValue(Value* value, const JITTypeHolder type, bool setType, Js::EquivalentTypeSet* typeSet, bool setTypeSet);
  838. void SetObjectTypeFromTypeSym(StackSym *typeSym, Value* value, BasicBlock* block = nullptr);
  839. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, BasicBlock* block = nullptr, bool updateExistingValue = false);
  840. void SetObjectTypeFromTypeSym(StackSym *typeSym, const JITTypeHolder type, Js::EquivalentTypeSet * typeSet, GlobOptBlockData *blockData, bool updateExistingValue = false);
  841. void KillObjectType(StackSym *objectSym, BVSparse<JitArenaAllocator>* liveFields = nullptr);
  842. void KillAllObjectTypes(BVSparse<JitArenaAllocator>* liveFields = nullptr);
  843. void EndFieldLifetime(IR::SymOpnd *symOpnd);
  844. PropertySym * CopyPropPropertySymObj(IR::SymOpnd *opnd, IR::Instr *instr);
  845. static bool NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind);
  846. IR::Instr * PreOptPeep(IR::Instr *instr);
  847. IR::Instr * OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val);
  848. void OptimizeIndirUses(IR::IndirOpnd *indir, IR::Instr * *pInstr, Value **indirIndexValRef);
  849. void RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr);
  850. void ProcessTryHandler(IR::Instr* instr);
  851. bool ProcessExceptionHandlingEdges(IR::Instr* instr);
  852. void InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd);
  853. void RemoveFlowEdgeToCatchBlock(IR::Instr * instr);
  854. bool RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr);
  855. void CSEAddInstr(BasicBlock *block, IR::Instr *instr, Value *dstVal, Value *src1Val, Value *src2Val, Value *dstIndirIndexVal, Value *src1IndirIndexVal);
  856. void OptimizeChecks(IR::Instr * const instr);
  857. bool CSEOptimize(BasicBlock *block, IR::Instr * *const instrRef, Value **pSrc1Val, Value **pSrc2Val, Value **pSrc1IndirIndexVal, bool intMathExprOnly = false);
  858. bool GetHash(IR::Instr *instr, Value *src1Val, Value *src2Val, ExprAttributes exprAttributes, ExprHash *pHash);
  859. void ProcessArrayValueKills(IR::Instr *instr);
  860. static bool NeedBailOnImplicitCallForCSE(BasicBlock const *block, bool isForwardPass);
  861. bool DoCSE();
  862. bool CanCSEArrayStore(IR::Instr *instr);
  863. #if DBG_DUMP
  864. void Dump() const;
  865. void DumpSymToValueMap() const;
  866. void DumpSymToValueMap(BasicBlock const * block) const;
  867. void DumpSymVal(int index);
  868. void Trace(BasicBlock * basicBlock, bool before) const;
  869. void TraceSettings() const;
  870. #endif
  871. bool IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const;
  872. bool TryOptConstFoldBrFalse(IR::Instr *const instr, Value *const srcValue, const int32 min, const int32 max);
  873. bool TryOptConstFoldBrEqual(IR::Instr *const instr, const bool branchOnEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  874. bool TryOptConstFoldBrGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  875. bool TryOptConstFoldBrGreaterThanOrEqual(IR::Instr *const instr, const bool branchOnGreaterThanOrEqual, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  876. bool TryOptConstFoldBrUnsignedLessThan(IR::Instr *const instr, const bool branchOnLessThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  877. bool TryOptConstFoldBrUnsignedGreaterThan(IR::Instr *const instr, const bool branchOnGreaterThan, Value *const src1Value, const int32 min1, const int32 max1, Value *const src2Value, const int32 min2, const int32 max2);
  878. void UpdateIntBoundsForEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  879. void UpdateIntBoundsForNotEqualBranch(Value *const src1Value, Value *const src2Value, const int32 src2ConstantValue = 0);
  880. void UpdateIntBoundsForGreaterThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  881. void UpdateIntBoundsForGreaterThanBranch(Value *const src1Value, Value *const src2Value);
  882. void UpdateIntBoundsForLessThanOrEqualBranch(Value *const src1Value, Value *const src2Value);
  883. void UpdateIntBoundsForLessThanBranch(Value *const src1Value, Value *const src2Value);
  884. IntBounds * GetIntBoundsToUpdate(const ValueInfo *const valueInfo, const IntConstantBounds &constantBounds, const bool isSettingNewBound, const bool isBoundConstant, const bool isSettingUpperBound, const bool isExplicit);
  885. ValueInfo * UpdateIntBoundsForEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  886. ValueInfo * UpdateIntBoundsForNotEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  887. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  888. ValueInfo * UpdateIntBoundsForGreaterThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  889. ValueInfo * UpdateIntBoundsForGreaterThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  890. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  891. ValueInfo * UpdateIntBoundsForLessThanOrEqual(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const int boundOffset, const bool isExplicit);
  892. ValueInfo * UpdateIntBoundsForLessThan(Value *const value, const IntConstantBounds &constantBounds, Value *const boundValue, const IntConstantBounds &boundConstantBounds, const bool isExplicit);
  893. void SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info);
  894. PathDependentInfoToRestore UpdatePathDependentInfo(PathDependentInfo *const info);
  895. void RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore);
  896. IR::Instr * TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrEnd);
  897. void TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd);
  898. IR::Instr * GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst);
  899. friend class InvariantBlockBackwardIterator;
  900. };
  901. class GlobOpt::PRE
  902. {
  903. public:
  904. PRE(GlobOpt * globOpt) : globOpt(globOpt) {}
  905. void FieldPRE(Loop *loop);
  906. private:
  907. void FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc);
  908. void PreloadPRECandidates(Loop *loop);
  909. BOOL PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate);
  910. IR::Instr * InsertPropertySymPreloadInLandingPad(IR::Instr * origLdInstr, Loop * loop, PropertySym * propertySym);
  911. void InsertInstrInLandingPad(IR::Instr * instr, Loop * loop);
  912. bool InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym ** objPtrCopyPropSym);
  913. void MakePropertySymLiveOnBackEdges(PropertySym * propertySym, Loop * loop, Value * valueToAdd);
  914. void RemoveOverlyOptimisticInitialValues(Loop * loop);
  915. #if DBG_DUMP
  916. void TraceFailedPreloadInLandingPad(const Loop *const loop, PropertySym * propSym, const char16* reason) const;
  917. #endif
  918. private:
  919. GlobOpt * globOpt;
  920. PRECandidates * candidates;
  921. };