GlobOptBailOut.cpp 53 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  8. {
  9. if (!sym->IsStackSym())
  10. {
  11. return;
  12. }
  13. StackSym * copyPropSym = this->GetCopyPropSym(block, sym, val);
  14. if (copyPropSym != nullptr)
  15. {
  16. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  17. }
  18. }
  19. void
  20. GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
  21. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  22. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  23. {
  24. Sym * sym = nullptr;
  25. Value * value = nullptr;
  26. ValueInfo * valueInfo = nullptr;
  27. block->globOptData.changedSyms->ClearAll();
  28. FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
  29. {
  30. value = bucket.element;
  31. valueInfo = value->GetValueInfo();
  32. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  33. {
  34. continue;
  35. }
  36. sym = bucket.value;
  37. if (sym == nullptr || !sym->IsStackSym() || !(sym->AsStackSym()->HasByteCodeRegSlot()))
  38. {
  39. continue;
  40. }
  41. block->globOptData.changedSyms->Set(sym->m_id);
  42. }
  43. NEXT_GLOBHASHTABLE_ENTRY;
  44. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  45. {
  46. HashBucket<Sym*, Value*> * bucket = block->globOptData.symToValueMap->GetBucket(symId);
  47. StackSym * stackSym = bucket->value->AsStackSym();
  48. value = bucket->element;
  49. valueInfo = value->GetValueInfo();
  50. int intConstantValue;
  51. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  52. {
  53. BailoutConstantValue constValue;
  54. constValue.InitIntConstValue(intConstantValue);
  55. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  56. }
  57. else if (valueInfo->IsVarConstant())
  58. {
  59. BailoutConstantValue constValue;
  60. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  61. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  62. }
  63. else
  64. {
  65. CaptureCopyPropValue(block, stackSym, value, bailOutCopySymsIter);
  66. }
  67. }
  68. NEXT_BITSET_IN_SPARSEBV
  69. }
  70. void
  71. GlobOpt::CaptureValuesIncremental(BasicBlock * block,
  72. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  73. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  74. {
  75. CapturedValues * currCapturedValues = block->globOptData.capturedValues;
  76. SListBase<ConstantStackSymValue>::Iterator iterConst(currCapturedValues ? &currCapturedValues->constantValues : nullptr);
  77. SListBase<CopyPropSyms>::Iterator iterCopyPropSym(currCapturedValues ? &currCapturedValues->copyPropSyms : nullptr);
  78. bool hasConstValue = currCapturedValues ? iterConst.Next() : false;
  79. bool hasCopyPropSym = currCapturedValues ? iterCopyPropSym.Next() : false;
  80. block->globOptData.changedSyms->Set(Js::Constants::InvalidSymID);
  81. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  82. {
  83. Sym * sym = hasConstValue ? iterConst.Data().Key() : nullptr;
  84. Value * val = nullptr;
  85. HashBucket<Sym *, Value *> * symIdBucket = nullptr;
  86. // copy unchanged sym to new capturedValues
  87. while (sym && sym->m_id < symId)
  88. {
  89. Assert(sym->IsStackSym());
  90. if (!sym->AsStackSym()->HasArgSlotNum())
  91. {
  92. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), iterConst.Data().Value());
  93. }
  94. hasConstValue = iterConst.Next();
  95. sym = hasConstValue ? iterConst.Data().Key() : nullptr;
  96. }
  97. if (sym && sym->m_id == symId)
  98. {
  99. hasConstValue = iterConst.Next();
  100. }
  101. if (symId != Js::Constants::InvalidSymID)
  102. {
  103. // recapture changed constant sym
  104. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  105. if (symIdBucket == nullptr)
  106. {
  107. continue;
  108. }
  109. Sym * symIdSym = symIdBucket->value;
  110. Assert(symIdSym->IsStackSym() && (symIdSym->AsStackSym()->HasByteCodeRegSlot() || symIdSym->AsStackSym()->HasArgSlotNum()));
  111. val = symIdBucket->element;
  112. ValueInfo* valueInfo = val->GetValueInfo();
  113. if (valueInfo->GetSymStore() != nullptr)
  114. {
  115. int32 intConstValue;
  116. BailoutConstantValue constValue;
  117. if (valueInfo->TryGetIntConstantValue(&intConstValue))
  118. {
  119. constValue.InitIntConstValue(intConstValue);
  120. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  121. continue;
  122. }
  123. else if(valueInfo->IsVarConstant())
  124. {
  125. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  126. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  127. continue;
  128. }
  129. }
  130. else if (!valueInfo->HasIntConstantValue())
  131. {
  132. continue;
  133. }
  134. }
  135. sym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  136. // process unchanged sym, but copy sym might have changed
  137. while (sym && sym->m_id < symId)
  138. {
  139. StackSym * copyPropSym = iterCopyPropSym.Data().Value();
  140. Assert(sym->IsStackSym());
  141. if (!block->globOptData.changedSyms->Test(copyPropSym->m_id))
  142. {
  143. if (!sym->AsStackSym()->HasArgSlotNum())
  144. {
  145. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  146. }
  147. }
  148. else
  149. {
  150. if (!sym->AsStackSym()->HasArgSlotNum())
  151. {
  152. val = FindValue(sym);
  153. if (val != nullptr)
  154. {
  155. CaptureCopyPropValue(block, sym, val, bailOutCopySymsIter);
  156. }
  157. }
  158. }
  159. hasCopyPropSym = iterCopyPropSym.Next();
  160. sym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  161. }
  162. if (sym && sym->m_id == symId)
  163. {
  164. hasCopyPropSym = iterCopyPropSym.Next();
  165. }
  166. if (symId != Js::Constants::InvalidSymID)
  167. {
  168. // recapture changed copy prop sym
  169. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  170. if (symIdBucket != nullptr)
  171. {
  172. Sym * symIdSym = symIdBucket->value;
  173. val = FindValue(symIdSym);
  174. if (val != nullptr)
  175. {
  176. CaptureCopyPropValue(block, symIdSym, val, bailOutCopySymsIter);
  177. }
  178. }
  179. }
  180. }
  181. NEXT_BITSET_IN_SPARSEBV
  182. }
  183. void
  184. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo)
  185. {
  186. if (!this->func->DoGlobOptsForGeneratorFunc())
  187. {
  188. // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
  189. // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
  190. // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
  191. return;
  192. }
  193. CapturedValues capturedValues;
  194. SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
  195. SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
  196. bailOutConstValuesIter.Next();
  197. bailOutCopySymsIter.Next();
  198. if (!block->globOptData.capturedValues)
  199. {
  200. CaptureValuesFromScratch(block, bailOutConstValuesIter, bailOutCopySymsIter);
  201. }
  202. else
  203. {
  204. CaptureValuesIncremental(block, bailOutConstValuesIter, bailOutCopySymsIter);
  205. }
  206. // attach capturedValues to bailOutInfo
  207. bailOutInfo->capturedValues.constantValues.Clear(this->func->m_alloc);
  208. bailOutConstValuesIter.SetNext(&bailOutInfo->capturedValues.constantValues);
  209. bailOutInfo->capturedValues.constantValues = capturedValues.constantValues;
  210. bailOutInfo->capturedValues.copyPropSyms.Clear(this->func->m_alloc);
  211. bailOutCopySymsIter.SetNext(&bailOutInfo->capturedValues.copyPropSyms);
  212. bailOutInfo->capturedValues.copyPropSyms = capturedValues.copyPropSyms;
  213. if (!PHASE_OFF(Js::IncrementalBailoutPhase, func))
  214. {
  215. // cache the pointer of current bailout as potential baseline for later bailout in this block
  216. block->globOptData.capturedValuesCandidate = &bailOutInfo->capturedValues;
  217. // reset changed syms to track symbols change after the above captured values candidate
  218. this->changedSymsAfterIncBailoutCandidate->ClearAll();
  219. }
  220. }
  221. void
  222. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  223. {
  224. FOREACH_BITSET_IN_SPARSEBV(id, this->blockData.argObjSyms)
  225. {
  226. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  227. Assert(stackSym != nullptr);
  228. if (!stackSym->HasByteCodeRegSlot())
  229. {
  230. continue;
  231. }
  232. if (!bailOutInfo->capturedValues.argObjSyms)
  233. {
  234. bailOutInfo->capturedValues.argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  235. }
  236. bailOutInfo->capturedValues.argObjSyms->Set(id);
  237. // Add to BailOutInfo
  238. }
  239. NEXT_BITSET_IN_SPARSEBV
  240. }
  241. void
  242. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  243. {
  244. IR::Opnd * src = instr->GetSrc1();
  245. if (src)
  246. {
  247. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  248. src = instr->GetSrc2();
  249. if (src)
  250. {
  251. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  252. }
  253. }
  254. #if DBG
  255. // There should be no more than one property sym used.
  256. PropertySym *propertySymFromSrc = *pPropertySym;
  257. #endif
  258. IR::Opnd * dst = instr->GetDst();
  259. if (dst)
  260. {
  261. StackSym *stackSym = dst->GetStackSym();
  262. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  263. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  264. if (stackSym == NULL)
  265. {
  266. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  267. }
  268. }
  269. #if DBG
  270. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  271. "Lost a property sym use?");
  272. #endif
  273. }
  274. void
  275. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  276. {
  277. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  278. if (!regOpnd->GetIsJITOptimizedReg())
  279. {
  280. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  281. }
  282. }
  283. void
  284. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  285. {
  286. if (opnd->GetIsJITOptimizedReg())
  287. {
  288. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  289. return;
  290. }
  291. switch(opnd->GetKind())
  292. {
  293. case IR::OpndKindReg:
  294. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  295. break;
  296. case IR::OpndKindSym:
  297. {
  298. Sym * sym = opnd->AsSymOpnd()->m_sym;
  299. if (sym->IsStackSym())
  300. {
  301. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  302. }
  303. else
  304. {
  305. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  306. *pPropertySym = sym->AsPropertySym();
  307. }
  308. }
  309. break;
  310. case IR::OpndKindIndir:
  311. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  312. {
  313. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  314. if (indexOpnd)
  315. {
  316. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  317. }
  318. }
  319. break;
  320. }
  321. }
  322. void
  323. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  324. {
  325. // We only care about stack sym that has a corresponding byte code register
  326. if (sym->HasByteCodeRegSlot())
  327. {
  328. if (sym->IsTypeSpec())
  329. {
  330. // It has to have a var version for byte code regs
  331. sym = sym->GetVarEquivSym(nullptr);
  332. }
  333. instrByteCodeStackSymUsed->Set(sym->m_id);
  334. }
  335. }
  336. void
  337. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  338. {
  339. IR::Opnd * dst = instr->GetDst();
  340. if (dst)
  341. {
  342. MarkNonByteCodeUsed(dst);
  343. }
  344. IR::Opnd * src1 = instr->GetSrc1();
  345. if (src1)
  346. {
  347. MarkNonByteCodeUsed(src1);
  348. IR::Opnd * src2 = instr->GetSrc2();
  349. if (src2)
  350. {
  351. MarkNonByteCodeUsed(src2);
  352. }
  353. }
  354. }
  355. void
  356. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  357. {
  358. switch(opnd->GetKind())
  359. {
  360. case IR::OpndKindReg:
  361. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  362. break;
  363. case IR::OpndKindIndir:
  364. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  365. {
  366. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  367. if (indexOpnd)
  368. {
  369. indexOpnd->SetIsJITOptimizedReg(true);
  370. }
  371. }
  372. break;
  373. }
  374. }
  375. void
  376. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  377. {
  378. if (this->byteCodeUses)
  379. {
  380. // We already captured it before.
  381. return;
  382. }
  383. Assert(this->propertySymUse == NULL);
  384. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  385. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  386. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  387. "Instruction edited before capturing the byte code use");
  388. }
  389. void
  390. GlobOpt::TrackCalls(IR::Instr * instr)
  391. {
  392. // Keep track of out params for bailout
  393. switch (instr->m_opcode)
  394. {
  395. case Js::OpCode::StartCall:
  396. Assert(!this->isCallHelper);
  397. Assert(instr->GetDst()->IsRegOpnd());
  398. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  399. if (this->blockData.callSequence == nullptr)
  400. {
  401. this->blockData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  402. this->currentBlock->globOptData.callSequence = this->blockData.callSequence;
  403. }
  404. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  405. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  406. this->currentBlock->globOptData.startCallCount++;
  407. break;
  408. case Js::OpCode::BytecodeArgOutCapture:
  409. {
  410. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  411. this->currentBlock->globOptData.argOutCount++;
  412. break;
  413. }
  414. case Js::OpCode::ArgOut_A:
  415. case Js::OpCode::ArgOut_A_Inline:
  416. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  417. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  418. case Js::OpCode::ArgOut_A_Dynamic:
  419. case Js::OpCode::ArgOut_A_FromStackArgs:
  420. case Js::OpCode::ArgOut_A_SpreadArg:
  421. {
  422. IR::Opnd * opnd = instr->GetDst();
  423. if (opnd->IsSymOpnd())
  424. {
  425. Assert(!this->isCallHelper);
  426. Assert(!this->blockData.callSequence->Empty());
  427. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  428. // These scenarios are already tracked using BytecodeArgOutCapture,
  429. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  430. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  431. {
  432. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  433. this->currentBlock->globOptData.argOutCount++;
  434. }
  435. Assert(stackSym->IsArgSlotSym());
  436. if (stackSym->m_isInlinedArgSlot)
  437. {
  438. this->currentBlock->globOptData.inlinedArgOutCount++;
  439. // We want to update the offsets only once: don't do in prepass.
  440. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  441. {
  442. Func * currentFunc = instr->m_func;
  443. stackSym->FixupStackOffset(currentFunc);
  444. }
  445. }
  446. }
  447. else
  448. {
  449. // It is a reg opnd if it is a helper call
  450. // It should be all ArgOut until the CallHelper instruction
  451. Assert(opnd->IsRegOpnd());
  452. this->isCallHelper = true;
  453. }
  454. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  455. {
  456. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  457. }
  458. break;
  459. }
  460. case Js::OpCode::InlineeStart:
  461. Assert(instr->m_func->GetParentFunc() == this->blockData.curFunc);
  462. Assert(instr->m_func->GetParentFunc());
  463. this->blockData.curFunc = instr->m_func;
  464. this->currentBlock->globOptData.curFunc = instr->m_func;
  465. this->func->UpdateMaxInlineeArgOutCount(this->currentBlock->globOptData.inlinedArgOutCount);
  466. this->EndTrackCall(instr);
  467. if (DoInlineArgsOpt(instr->m_func))
  468. {
  469. instr->m_func->m_hasInlineArgsOpt = true;
  470. InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(func->m_alloc);
  471. instr->m_func->frameInfo = frameInfo;
  472. frameInfo->floatSyms = currentBlock->globOptData.liveFloat64Syms->CopyNew(this->alloc);
  473. frameInfo->intSyms = currentBlock->globOptData.liveInt32Syms->MinusNew(currentBlock->globOptData.liveLossyInt32Syms, this->alloc);
  474. // SIMD_JS
  475. frameInfo->simd128F4Syms = currentBlock->globOptData.liveSimd128F4Syms->CopyNew(this->alloc);
  476. frameInfo->simd128I4Syms = currentBlock->globOptData.liveSimd128I4Syms->CopyNew(this->alloc);
  477. }
  478. break;
  479. case Js::OpCode::EndCallForPolymorphicInlinee:
  480. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  481. this->EndTrackCall(instr);
  482. break;
  483. case Js::OpCode::CallHelper:
  484. case Js::OpCode::IsInst:
  485. Assert(this->isCallHelper);
  486. this->isCallHelper = false;
  487. break;
  488. case Js::OpCode::InlineeEnd:
  489. if (instr->m_func->m_hasInlineArgsOpt)
  490. {
  491. RecordInlineeFrameInfo(instr);
  492. }
  493. EndTrackingOfArgObjSymsForInlinee();
  494. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  495. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  496. break;
  497. case Js::OpCode::InlineeMetaArg:
  498. {
  499. Assert(instr->GetDst()->IsSymOpnd());
  500. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  501. Assert(stackSym->IsArgSlotSym());
  502. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  503. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  504. // We want to update the offsets only once: don't do in prepass.
  505. if (!this->IsLoopPrePass())
  506. {
  507. Func * currentFunc = instr->m_func->GetParentFunc();
  508. stackSym->FixupStackOffset(currentFunc);
  509. }
  510. this->currentBlock->globOptData.inlinedArgOutCount++;
  511. break;
  512. }
  513. case Js::OpCode::InlineBuiltInStart:
  514. this->inInlinedBuiltIn = true;
  515. break;
  516. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  517. case Js::OpCode::InlineBuiltInEnd:
  518. {
  519. // If extra bailouts were added for the InlineMathXXX call itself,
  520. // move InlineeBuiltInStart just above the InlineMathXXX.
  521. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  522. // otherwise when we bailout we would get wrong function.
  523. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  524. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  525. {
  526. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  527. }
  528. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  529. IR::Instr * insertBeforeInstr = instr->m_prev;
  530. IR::Instr * tmpInstr = insertBeforeInstr;
  531. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  532. {
  533. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  534. {
  535. insertBeforeInstr = tmpInstr;
  536. }
  537. tmpInstr = tmpInstr->m_prev;
  538. }
  539. inlineBuiltInStartInstr->Unlink();
  540. if(insertBeforeInstr == instr->m_prev)
  541. {
  542. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  543. }
  544. else
  545. {
  546. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  547. }
  548. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  549. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  550. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  551. // byte code register holding the function object needs to be restored on bailout.
  552. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  553. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  554. {
  555. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  556. while(
  557. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  558. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  559. {
  560. IR::Instr *const instrToMove = byteCodeUsesInstr;
  561. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  562. instrToMove->Unlink();
  563. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  564. }
  565. }
  566. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  567. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  568. // need to know about current bailout record, but since they are added after TrackCalls is called
  569. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  570. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  571. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  572. {
  573. this->EndTrackCall(instr);
  574. }
  575. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  576. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  577. this->inInlinedBuiltIn = false;
  578. break;
  579. }
  580. case Js::OpCode::InlineArrayPop:
  581. {
  582. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  583. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  584. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  585. this->EndTrackCall(instr);
  586. break;
  587. }
  588. default:
  589. if (OpCodeAttr::CallInstr(instr->m_opcode))
  590. {
  591. this->EndTrackCall(instr);
  592. if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
  593. {
  594. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  595. //
  596. // StartCall
  597. // ArgOut_InlineBuiltIn
  598. // ArgOut_InlineBuiltIn
  599. // ArgOut_InlineBuiltIn
  600. // InlineBuiltInStart
  601. // ArgOut_A_InlineSpecialized
  602. // ArgOut_A
  603. // ArgOut_A
  604. // CallDirect
  605. // InlineNonTrackingBuiltInEnd
  606. //
  607. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  608. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  609. // happened when we bail out.
  610. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  611. this->EndTrackCall(instr);
  612. }
  613. }
  614. break;
  615. }
  616. }
  617. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  618. {
  619. if (this->IsLoopPrePass())
  620. {
  621. return;
  622. }
  623. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  624. if (frameInfo->isRecorded)
  625. {
  626. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  627. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  628. return;
  629. }
  630. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  631. {
  632. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  633. {
  634. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  635. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  636. if (functionObject->m_sym->IsConst())
  637. {
  638. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  639. }
  640. else
  641. {
  642. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  643. }
  644. }
  645. else
  646. {
  647. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  648. IR::Opnd* argOpnd = argInstr->GetSrc1();
  649. InlineFrameInfoValue frameInfoValue;
  650. StackSym* argSym = argOpnd->GetStackSym();
  651. if (!argSym)
  652. {
  653. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  654. }
  655. else if (argSym->IsConst())
  656. {
  657. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  658. }
  659. else
  660. {
  661. if (PHASE_ON(Js::CopyPropPhase, func))
  662. {
  663. Value* value = FindValue(argSym);
  664. StackSym * copyPropSym = this->GetCopyPropSym(this->currentBlock, argSym, value);
  665. if (copyPropSym)
  666. {
  667. argSym = copyPropSym;
  668. }
  669. }
  670. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  671. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  672. {
  673. // Var version of the sym is not live, use the int32 version
  674. argSym = argSym->GetInt32EquivSym(nullptr);
  675. Assert(argSym);
  676. }
  677. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  678. {
  679. // Var/int32 version of the sym is not live, use the float64 version
  680. argSym = argSym->GetFloat64EquivSym(nullptr);
  681. Assert(argSym);
  682. }
  683. // SIMD_JS
  684. else if (frameInfo->simd128F4Syms->TestEmpty() && frameInfo->simd128F4Syms->Test(argSym->m_id))
  685. {
  686. argSym = argSym->GetSimd128F4EquivSym(nullptr);
  687. }
  688. else if (frameInfo->simd128I4Syms->TestEmpty() && frameInfo->simd128I4Syms->Test(argSym->m_id))
  689. {
  690. argSym = argSym->GetSimd128I4EquivSym(nullptr);
  691. }
  692. else
  693. {
  694. Assert(globOptData.liveVarSyms->Test(argSym->m_id));
  695. }
  696. if (argSym->IsConst())
  697. {
  698. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  699. }
  700. else
  701. {
  702. frameInfoValue = InlineFrameInfoValue(argSym);
  703. }
  704. }
  705. Assert(argSlot >= 1);
  706. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  707. }
  708. return false;
  709. });
  710. JitAdelete(this->alloc, frameInfo->intSyms);
  711. frameInfo->intSyms = nullptr;
  712. JitAdelete(this->alloc, frameInfo->floatSyms);
  713. frameInfo->floatSyms = nullptr;
  714. // SIMD_JS
  715. JitAdelete(this->alloc, frameInfo->simd128F4Syms);
  716. frameInfo->simd128F4Syms = nullptr;
  717. JitAdelete(this->alloc, frameInfo->simd128I4Syms);
  718. frameInfo->simd128I4Syms = nullptr;
  719. frameInfo->isRecorded = true;
  720. }
  721. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  722. {
  723. Assert(this->blockData.curFunc->GetParentFunc());
  724. if (this->blockData.curFunc->argObjSyms && TrackArgumentsObject())
  725. {
  726. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  727. tempBv->Minus(this->blockData.curFunc->argObjSyms, this->blockData.argObjSyms);
  728. if(!tempBv->IsEmpty())
  729. {
  730. // This means there are arguments object symbols in the current function which are not in the current block.
  731. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  732. // Rare case, abort stack arguments optimization in this case.
  733. CannotAllocateArgumentsObjectOnStack();
  734. }
  735. else
  736. {
  737. Assert(this->blockData.argObjSyms->OrNew(this->blockData.curFunc->argObjSyms)->Equal(this->blockData.argObjSyms));
  738. this->blockData.argObjSyms->Minus(this->blockData.curFunc->argObjSyms);
  739. }
  740. JitAdelete(this->tempAlloc, tempBv);
  741. }
  742. this->blockData.curFunc = this->blockData.curFunc->GetParentFunc();
  743. this->currentBlock->globOptData.curFunc = this->blockData.curFunc;
  744. }
  745. void GlobOpt::EndTrackCall(IR::Instr* instr)
  746. {
  747. Assert(instr);
  748. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  749. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  750. Assert(!this->isCallHelper);
  751. Assert(!this->blockData.callSequence->Empty());
  752. #if DBG
  753. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  754. #endif
  755. while (this->blockData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  756. {
  757. this->currentBlock->globOptData.argOutCount--;
  758. this->blockData.callSequence->RemoveHead(this->alloc);
  759. }
  760. StackSym * sym = this->blockData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  761. this->blockData.callSequence->RemoveHead(this->alloc);
  762. #if DBG
  763. Assert(sym->m_isSingleDef);
  764. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  765. // Number of argument set should be the same as indicated at StartCall
  766. // except NewScObject has an implicit arg1
  767. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  768. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  769. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  770. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  771. #endif
  772. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  773. this->currentBlock->globOptData.startCallCount--;
  774. }
  775. void
  776. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  777. {
  778. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  779. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  780. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  781. // SIMD_JS
  782. bailOutInfo->liveSimd128F4Syms = block->globOptData.liveSimd128F4Syms->CopyNew(this->func->m_alloc);
  783. bailOutInfo->liveSimd128I4Syms = block->globOptData.liveSimd128I4Syms->CopyNew(this->func->m_alloc);
  784. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  785. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  786. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  787. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  788. // the lossily-converted int value back into a var, restoring the wrong value.
  789. bailOutInfo->liveLosslessInt32Syms =
  790. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  791. // Save the stack literal init field count so we can null out the uninitialized fields
  792. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  793. if (stackLiteralInitFldDataMap != nullptr)
  794. {
  795. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  796. if (stackLiteralInitFldDataCount != 0)
  797. {
  798. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  799. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  800. uint i = 0;
  801. stackLiteralInitFldDataMap->Map(
  802. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  803. {
  804. Assert(i < stackLiteralInitFldDataCount);
  805. stackLiteralBailOutInfo[i].stackSym = stackSym;
  806. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  807. i++;
  808. });
  809. Assert(i == stackLiteralInitFldDataCount);
  810. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  811. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  812. }
  813. }
  814. if (TrackArgumentsObject())
  815. {
  816. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  817. }
  818. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  819. {
  820. uint currentArgOutCount = 0;
  821. uint startCallNumber = block->globOptData.startCallCount;
  822. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  823. bailOutInfo->startCallCount = startCallNumber;
  824. // Save the start call's func to identify the function (inlined) that the call sequence is for
  825. // We might not have any arg out yet to get the function from
  826. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  827. #ifdef _M_IX86
  828. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  829. #endif
  830. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  831. bailOutInfo->totalOutParamCount = totalOutParamCount;
  832. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  833. uint argRestoreAdjustCount = 0;
  834. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  835. {
  836. if(opnd->GetStackSym()->HasArgSlotNum())
  837. {
  838. StackSym * sym;
  839. if(opnd->IsSymOpnd())
  840. {
  841. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  842. Assert(sym->IsArgSlotSym());
  843. Assert(sym->m_isSingleDef);
  844. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  845. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  846. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  847. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  848. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  849. }
  850. else
  851. {
  852. sym = opnd->GetStackSym();
  853. Assert(FindValue(sym));
  854. // StackSym args need to be re-captured
  855. this->SetChangedSym(sym->m_id);
  856. }
  857. Assert(totalOutParamCount != 0);
  858. Assert(totalOutParamCount > currentArgOutCount);
  859. currentArgOutCount++;
  860. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  861. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  862. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  863. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  864. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  865. }
  866. else
  867. {
  868. Assert(opnd->IsRegOpnd());
  869. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  870. Assert(!sym->IsArgSlotSym());
  871. Assert(sym->m_isSingleDef);
  872. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  873. Assert(startCallNumber != 0);
  874. startCallNumber--;
  875. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  876. #ifdef _M_IX86
  877. if (this->currentRegion && this->currentRegion->GetType() == RegionTypeTry)
  878. {
  879. // For a bailout in argument evaluation from an EH region, the esp is offset by the TryCatch helper�s frame. So, the argouts are not actually pushed at the
  880. // offsets stored in the bailout record, which are relative to ebp. Need to restore the argouts from the actual value of esp before calling the Bailout helper.
  881. // For nested calls, argouts for the outer call need to be restored from an offset of stack-adjustment-done-by-the-inner-call from esp.
  882. if (startCallNumber + 1 == bailOutInfo->startCallCount)
  883. {
  884. argRestoreAdjustCount = 0;
  885. }
  886. else
  887. {
  888. argRestoreAdjustCount = bailOutInfo->startCallInfo[startCallNumber + 1].argRestoreAdjustCount + bailOutInfo->startCallInfo[startCallNumber + 1].argCount;
  889. if ((Math::Align<int32>(bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr, MachStackAlignment) - (bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr)) != 0)
  890. {
  891. argRestoreAdjustCount++;
  892. }
  893. }
  894. }
  895. if (sym->m_isInlinedArgSlot)
  896. {
  897. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  898. }
  899. #endif
  900. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  901. Assert(totalOutParamCount >= argOutCount);
  902. Assert(argOutCount >= currentArgOutCount);
  903. bailOutInfo->RecordStartCallInfo(startCallNumber, argRestoreAdjustCount, sym->m_instrDef);
  904. totalOutParamCount -= argOutCount;
  905. currentArgOutCount = 0;
  906. }
  907. }
  908. NEXT_SLISTBASE_ENTRY;
  909. Assert(totalOutParamCount == 0);
  910. Assert(startCallNumber == 0);
  911. Assert(currentArgOutCount == 0);
  912. }
  913. // Save the constant values that we know so we can restore them directly.
  914. // This allows us to dead store the constant value assign.
  915. this->CaptureValues(block, bailOutInfo);
  916. }
  917. IR::ByteCodeUsesInstr *
  918. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  919. {
  920. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  921. Assert(this->byteCodeUses);
  922. IR::RegOpnd * dstOpnd = nullptr;
  923. if (includeDef)
  924. {
  925. IR::Opnd * opnd = instr->GetDst();
  926. if (opnd && opnd->IsRegOpnd())
  927. {
  928. dstOpnd = opnd->AsRegOpnd();
  929. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  930. {
  931. dstOpnd = nullptr;
  932. }
  933. }
  934. }
  935. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  936. {
  937. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  938. if (!this->byteCodeUses->IsEmpty())
  939. {
  940. byteCodeUsesInstr->SetBV(byteCodeUses->CopyNew(instr->m_func->m_alloc));
  941. }
  942. if (dstOpnd != nullptr)
  943. {
  944. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  945. }
  946. if (this->propertySymUse)
  947. {
  948. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  949. }
  950. instr->InsertBefore(byteCodeUsesInstr);
  951. }
  952. JitAdelete(this->alloc, this->byteCodeUses);
  953. this->byteCodeUses = nullptr;
  954. this->propertySymUse = nullptr;
  955. return byteCodeUsesInstr;
  956. }
  957. IR::ByteCodeUsesInstr *
  958. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  959. {
  960. #if DBG
  961. PropertySym *propertySymUseBefore = NULL;
  962. Assert(this->byteCodeUses == nullptr);
  963. this->byteCodeUsesBeforeOpt->ClearAll();
  964. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  965. #endif
  966. this->CaptureByteCodeSymUses(instr);
  967. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  968. instr->Remove();
  969. if (byteCodeUsesInstr)
  970. {
  971. byteCodeUsesInstr->Aggregate();
  972. }
  973. return byteCodeUsesInstr;
  974. }
  975. bool
  976. GlobOpt::MayNeedBailOut(Loop * loop) const
  977. {
  978. Assert(this->IsLoopPrePass());
  979. return loop->CanHoistInvariants() ||
  980. this->DoFieldCopyProp(loop) || (this->DoFieldHoisting(loop) && !loop->fieldHoistCandidates->IsEmpty());
  981. }
  982. bool
  983. GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd * opnd, Value *val)
  984. {
  985. switch (opnd->GetKind())
  986. {
  987. case IR::OpndKindAddr:
  988. case IR::OpndKindFloatConst:
  989. case IR::OpndKindIntConst:
  990. return false;
  991. case IR::OpndKindReg:
  992. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  993. // that it is already a primitive
  994. return
  995. !(val && val->GetValueInfo()->IsPrimitive()) &&
  996. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  997. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  998. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  999. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  1000. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  1001. case IR::OpndKindSym:
  1002. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1003. {
  1004. IR::PropertySymOpnd* propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1005. if (!propertySymOpnd->MayHaveImplicitCall())
  1006. {
  1007. return false;
  1008. }
  1009. }
  1010. return true;
  1011. default:
  1012. return true;
  1013. };
  1014. }
  1015. bool
  1016. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value *src1Val, Value *src2Val)
  1017. {
  1018. Assert(!this->IsLoopPrePass());
  1019. return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
  1020. (!this->blockData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
  1021. }
  1022. bool
  1023. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value *src1Val, Value *src2Val, BasicBlock * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass)
  1024. {
  1025. if (mayNeedImplicitCallBailOut &&
  1026. !instr->CallsAccessor() &&
  1027. (
  1028. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  1029. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  1030. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  1031. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass)
  1032. ) &&
  1033. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  1034. {
  1035. return true;
  1036. }
  1037. #if DBG
  1038. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  1039. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  1040. {
  1041. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  1042. return true;
  1043. }
  1044. #endif
  1045. return false;
  1046. }
  1047. bool
  1048. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  1049. {
  1050. #if DBG
  1051. IR::Opnd* dst = instr->GetDst();
  1052. IR::Opnd* src1 = instr->GetSrc1();
  1053. IR::Opnd* src2 = instr->GetSrc2();
  1054. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  1055. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  1056. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  1057. #endif
  1058. IR::Opnd * opnd = instr->GetDst();
  1059. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1060. {
  1061. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1062. }
  1063. opnd = instr->GetSrc1();
  1064. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1065. {
  1066. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1067. }
  1068. return false;
  1069. }
  1070. bool
  1071. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  1072. {
  1073. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  1074. {
  1075. return false;
  1076. }
  1077. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  1078. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  1079. if (propertySymOpnd->MayNeedTypeCheckProtection())
  1080. {
  1081. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  1082. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) ||
  1083. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed method/data phase disabled?");
  1084. Assert(!isStore || !isCheckFixedFld);
  1085. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  1086. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  1087. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  1088. {
  1089. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  1090. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  1091. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  1092. return true;
  1093. }
  1094. else
  1095. {
  1096. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  1097. *pBailOutKind = IR::BailOutInvalid;
  1098. return false;
  1099. }
  1100. }
  1101. else
  1102. {
  1103. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  1104. *pBailOutKind = IR::BailOutInvalid;
  1105. return false;
  1106. }
  1107. }
  1108. bool
  1109. GlobOpt::MayNeedBailOnImplicitCall(const IR::Instr * instr, Value *src1Val, Value *src2Val)
  1110. {
  1111. if (!instr->HasAnyImplicitCalls())
  1112. {
  1113. return false;
  1114. }
  1115. bool isLdElem = false;
  1116. switch (instr->m_opcode)
  1117. {
  1118. case Js::OpCode::LdLen_A:
  1119. {
  1120. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  1121. return
  1122. !(
  1123. baseValueType.IsString() ||
  1124. (baseValueType.IsAnyArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray) ||
  1125. (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength) // guarantees no implicit calls
  1126. );
  1127. }
  1128. case Js::OpCode::LdElemI_A:
  1129. case Js::OpCode::LdMethodElem:
  1130. case Js::OpCode::InlineArrayPop:
  1131. isLdElem = true;
  1132. // fall-through
  1133. case Js::OpCode::StElemI_A:
  1134. case Js::OpCode::StElemI_A_Strict:
  1135. case Js::OpCode::InlineArrayPush:
  1136. {
  1137. if(!instr->HasBailOutInfo())
  1138. {
  1139. return true;
  1140. }
  1141. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  1142. // implicit call result in a pre-op bailout.
  1143. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1144. return
  1145. !(
  1146. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  1147. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  1148. (isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  1149. );
  1150. }
  1151. default:
  1152. break;
  1153. }
  1154. if (OpCodeAttr::HasImplicitCall(instr->m_opcode))
  1155. {
  1156. // Operation has an implicit call regardless of operand attributes.
  1157. return true;
  1158. }
  1159. IR::Opnd * opnd = instr->GetDst();
  1160. if (opnd)
  1161. {
  1162. switch (opnd->GetKind())
  1163. {
  1164. case IR::OpndKindReg:
  1165. break;
  1166. case IR::OpndKindSym:
  1167. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1168. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1169. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1170. // can't optimize (e.g. copy prop) across such field stores.
  1171. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1172. {
  1173. return false;
  1174. }
  1175. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1176. {
  1177. IR::PropertySymOpnd* propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1178. if (!propertySymOpnd->MayHaveImplicitCall())
  1179. {
  1180. return false;
  1181. }
  1182. }
  1183. return true;
  1184. case IR::OpndKindIndir:
  1185. return true;
  1186. default:
  1187. Assume(UNREACHED);
  1188. }
  1189. }
  1190. opnd = instr->GetSrc1();
  1191. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src1Val))
  1192. {
  1193. return true;
  1194. }
  1195. opnd = instr->GetSrc2();
  1196. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src2Val))
  1197. {
  1198. return true;
  1199. }
  1200. return false;
  1201. }
  1202. void
  1203. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1204. {
  1205. Assert(pInstr);
  1206. IR::Instr* instr = *pInstr;
  1207. Assert(instr);
  1208. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  1209. uint32 currentOffset = instr->GetByteCodeOffset();
  1210. while (nextInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1211. nextInstr->GetByteCodeOffset() == currentOffset)
  1212. {
  1213. nextInstr = nextInstr->GetNextRealInstrOrLabel();
  1214. }
  1215. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1216. if (this->currentBlock->GetLastInstr() == instr)
  1217. {
  1218. this->currentBlock->SetLastInstr(bailOutInstr);
  1219. }
  1220. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  1221. *pInstr = bailOutInstr;
  1222. }
  1223. void
  1224. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1225. {
  1226. Assert(pInstr);
  1227. IR::Instr * instr = *pInstr;
  1228. Assert(instr);
  1229. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1230. Assert(bailOutKind != IR::BailOutInvalid);
  1231. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1232. if (this->currentBlock->GetLastInstr() == instr)
  1233. {
  1234. this->currentBlock->SetLastInstr(bailOutInstr);
  1235. }
  1236. FillBailOutInfo(currentBlock, bailOutInstr->GetBailOutInfo());
  1237. *pInstr = bailOutInstr;
  1238. }
  1239. IR::Instr *
  1240. GlobOpt::EnsureBailTarget(Loop * loop)
  1241. {
  1242. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1243. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1244. if (bailOutInstr == nullptr)
  1245. {
  1246. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1247. loop->landingPad->InsertAfter(bailOutInstr);
  1248. }
  1249. return bailOutInstr;
  1250. }