GlobOptBailOut.cpp 55 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  8. {
  9. if (!sym->IsStackSym())
  10. {
  11. return;
  12. }
  13. StackSym * copyPropSym = block->globOptData.GetCopyPropSym(sym, val);
  14. if (copyPropSym != nullptr)
  15. {
  16. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  17. }
  18. }
  19. void
  20. GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
  21. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  22. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  23. BVSparse<JitArenaAllocator>* argsToCapture)
  24. {
  25. Sym * sym = nullptr;
  26. Value * value = nullptr;
  27. ValueInfo * valueInfo = nullptr;
  28. block->globOptData.changedSyms->ClearAll();
  29. FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
  30. {
  31. value = bucket.element;
  32. valueInfo = value->GetValueInfo();
  33. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  34. {
  35. continue;
  36. }
  37. sym = bucket.value;
  38. if (sym == nullptr || !sym->IsStackSym() || !(sym->AsStackSym()->HasByteCodeRegSlot()))
  39. {
  40. continue;
  41. }
  42. block->globOptData.changedSyms->Set(sym->m_id);
  43. }
  44. NEXT_GLOBHASHTABLE_ENTRY;
  45. if (argsToCapture)
  46. {
  47. block->globOptData.changedSyms->Or(argsToCapture);
  48. }
  49. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  50. {
  51. HashBucket<Sym*, Value*> * bucket = block->globOptData.symToValueMap->GetBucket(symId);
  52. StackSym * stackSym = bucket->value->AsStackSym();
  53. value = bucket->element;
  54. valueInfo = value->GetValueInfo();
  55. int intConstantValue;
  56. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  57. {
  58. BailoutConstantValue constValue;
  59. constValue.InitIntConstValue(intConstantValue);
  60. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  61. }
  62. else if (valueInfo->IsVarConstant())
  63. {
  64. BailoutConstantValue constValue;
  65. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  66. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  67. }
  68. else
  69. {
  70. CaptureCopyPropValue(block, stackSym, value, bailOutCopySymsIter);
  71. }
  72. }
  73. NEXT_BITSET_IN_SPARSEBV
  74. }
  75. void
  76. GlobOpt::CaptureValuesIncremental(BasicBlock * block,
  77. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  78. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  79. BVSparse<JitArenaAllocator>* argsToCapture)
  80. {
  81. CapturedValues * currCapturedValues = block->globOptData.capturedValues;
  82. SListBase<ConstantStackSymValue>::Iterator iterConst(currCapturedValues ? &currCapturedValues->constantValues : nullptr);
  83. SListBase<CopyPropSyms>::Iterator iterCopyPropSym(currCapturedValues ? &currCapturedValues->copyPropSyms : nullptr);
  84. bool hasConstValue = currCapturedValues ? iterConst.Next() : false;
  85. bool hasCopyPropSym = currCapturedValues ? iterCopyPropSym.Next() : false;
  86. block->globOptData.changedSyms->Set(Js::Constants::InvalidSymID);
  87. if (argsToCapture)
  88. {
  89. block->globOptData.changedSyms->Or(argsToCapture);
  90. }
  91. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  92. {
  93. Value * val = nullptr;
  94. // First process all unchanged syms with m_id < symId. Then, recapture the current changed sym.
  95. // copy unchanged const sym to new capturedValues
  96. Sym * constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  97. while (constSym && constSym->m_id < symId)
  98. {
  99. Assert(constSym->IsStackSym());
  100. if (!constSym->AsStackSym()->HasArgSlotNum())
  101. {
  102. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  103. }
  104. hasConstValue = iterConst.Next();
  105. constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  106. }
  107. if (constSym && constSym->m_id == symId)
  108. {
  109. hasConstValue = iterConst.Next();
  110. }
  111. // process unchanged sym; copy-prop sym might have changed
  112. Sym * capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  113. while (capturedSym && capturedSym->m_id < symId)
  114. {
  115. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  116. Assert(capturedSym->IsStackSym());
  117. if (!block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id))
  118. {
  119. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  120. {
  121. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  122. }
  123. }
  124. else
  125. {
  126. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  127. {
  128. val = this->currentBlock->globOptData.FindValue(capturedSym);
  129. if (val != nullptr)
  130. {
  131. CaptureCopyPropValue(block, capturedSym, val, bailOutCopySymsIter);
  132. }
  133. }
  134. }
  135. hasCopyPropSym = iterCopyPropSym.Next();
  136. capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  137. }
  138. if (capturedSym && capturedSym->m_id == symId)
  139. {
  140. hasCopyPropSym = iterCopyPropSym.Next();
  141. }
  142. // recapture changed sym
  143. HashBucket<Sym *, Value *> * symIdBucket = nullptr;
  144. if (symId != Js::Constants::InvalidSymID)
  145. {
  146. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  147. if (symIdBucket != nullptr)
  148. {
  149. Sym * symIdSym = symIdBucket->value;
  150. Assert(symIdSym->IsStackSym() && (symIdSym->AsStackSym()->HasByteCodeRegSlot() || symIdSym->AsStackSym()->HasArgSlotNum()));
  151. val = symIdBucket->element;
  152. Assert(val);
  153. ValueInfo* valueInfo = val->GetValueInfo();
  154. if (valueInfo->GetSymStore() != nullptr)
  155. {
  156. int32 intConstValue;
  157. BailoutConstantValue constValue;
  158. if (valueInfo->TryGetIntConstantValue(&intConstValue))
  159. {
  160. constValue.InitIntConstValue(intConstValue);
  161. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  162. }
  163. else if (valueInfo->IsVarConstant())
  164. {
  165. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  166. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  167. }
  168. else
  169. {
  170. CaptureCopyPropValue(block, symIdSym, val, bailOutCopySymsIter);
  171. }
  172. }
  173. }
  174. }
  175. }
  176. NEXT_BITSET_IN_SPARSEBV
  177. // If, after going over the set of changed syms since the last time we captured values,
  178. // there are remaining unprocessed entries in the current captured values set,
  179. // they can simply be copied over to the new bailout info.
  180. while (hasConstValue)
  181. {
  182. Sym * constSym = iterConst.Data().Key();
  183. Assert(constSym->IsStackSym());
  184. Assert(!block->globOptData.changedSyms->Test(constSym->m_id));
  185. if (!constSym->AsStackSym()->HasArgSlotNum())
  186. {
  187. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  188. }
  189. hasConstValue = iterConst.Next();
  190. }
  191. while (hasCopyPropSym)
  192. {
  193. Sym * capturedSym = iterCopyPropSym.Data().Key();
  194. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  195. Assert(capturedSym->IsStackSym());
  196. Assert(!block->globOptData.changedSyms->Test(capturedSym->m_id) &&
  197. !block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id));
  198. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  199. {
  200. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  201. }
  202. hasCopyPropSym = iterCopyPropSym.Next();
  203. }
  204. }
  205. void
  206. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture)
  207. {
  208. if (!this->func->DoGlobOptsForGeneratorFunc())
  209. {
  210. // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
  211. // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
  212. // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
  213. return;
  214. }
  215. CapturedValues capturedValues;
  216. SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
  217. SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
  218. bailOutConstValuesIter.Next();
  219. bailOutCopySymsIter.Next();
  220. if (!block->globOptData.capturedValues)
  221. {
  222. CaptureValuesFromScratch(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  223. }
  224. else
  225. {
  226. CaptureValuesIncremental(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  227. }
  228. // attach capturedValues to bailOutInfo
  229. bailOutInfo->capturedValues->constantValues.Clear(this->func->m_alloc);
  230. bailOutConstValuesIter.SetNext(&bailOutInfo->capturedValues->constantValues);
  231. bailOutInfo->capturedValues->constantValues = capturedValues.constantValues;
  232. bailOutInfo->capturedValues->copyPropSyms.Clear(this->func->m_alloc);
  233. bailOutCopySymsIter.SetNext(&bailOutInfo->capturedValues->copyPropSyms);
  234. bailOutInfo->capturedValues->copyPropSyms = capturedValues.copyPropSyms;
  235. // In pre-pass only bailout info created should be for the loop header, and that doesn't take into account the back edge.
  236. // Don't use the captured values on that bailout for incremental capturing of values.
  237. if (!PHASE_OFF(Js::IncrementalBailoutPhase, func) && !this->IsLoopPrePass())
  238. {
  239. // cache the pointer of current bailout as potential baseline for later bailout in this block
  240. if (block->globOptData.capturedValuesCandidate)
  241. {
  242. block->globOptData.capturedValuesCandidate->DecrementRefCount();
  243. }
  244. block->globOptData.capturedValuesCandidate = bailOutInfo->capturedValues;
  245. block->globOptData.capturedValuesCandidate->IncrementRefCount();
  246. // reset changed syms to track symbols change after the above captured values candidate
  247. this->changedSymsAfterIncBailoutCandidate->ClearAll();
  248. }
  249. }
  250. void
  251. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  252. {
  253. FOREACH_BITSET_IN_SPARSEBV(id, this->currentBlock->globOptData.argObjSyms)
  254. {
  255. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  256. Assert(stackSym != nullptr);
  257. if (!stackSym->HasByteCodeRegSlot())
  258. {
  259. continue;
  260. }
  261. if (!bailOutInfo->capturedValues->argObjSyms)
  262. {
  263. bailOutInfo->capturedValues->argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  264. }
  265. bailOutInfo->capturedValues->argObjSyms->Set(id);
  266. // Add to BailOutInfo
  267. }
  268. NEXT_BITSET_IN_SPARSEBV
  269. }
  270. void
  271. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  272. {
  273. if(instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  274. {
  275. return;
  276. }
  277. IR::Opnd * src = instr->GetSrc1();
  278. if (src)
  279. {
  280. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  281. src = instr->GetSrc2();
  282. if (src)
  283. {
  284. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  285. }
  286. }
  287. #if DBG
  288. // There should be no more than one property sym used.
  289. PropertySym *propertySymFromSrc = *pPropertySym;
  290. #endif
  291. IR::Opnd * dst = instr->GetDst();
  292. if (dst)
  293. {
  294. StackSym *stackSym = dst->GetStackSym();
  295. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  296. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  297. if (stackSym == NULL)
  298. {
  299. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  300. }
  301. }
  302. #if DBG
  303. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  304. "Lost a property sym use?");
  305. #endif
  306. }
  307. void
  308. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  309. {
  310. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  311. if (!regOpnd->GetIsJITOptimizedReg())
  312. {
  313. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  314. }
  315. }
  316. void
  317. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  318. {
  319. if (opnd->GetIsJITOptimizedReg())
  320. {
  321. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  322. return;
  323. }
  324. switch(opnd->GetKind())
  325. {
  326. case IR::OpndKindReg:
  327. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  328. break;
  329. case IR::OpndKindSym:
  330. {
  331. Sym * sym = opnd->AsSymOpnd()->m_sym;
  332. if (sym->IsStackSym())
  333. {
  334. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  335. }
  336. else
  337. {
  338. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  339. *pPropertySym = sym->AsPropertySym();
  340. }
  341. }
  342. break;
  343. case IR::OpndKindIndir:
  344. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  345. {
  346. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  347. if (indexOpnd)
  348. {
  349. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  350. }
  351. }
  352. break;
  353. }
  354. }
  355. void
  356. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  357. {
  358. // We only care about stack sym that has a corresponding byte code register
  359. if (sym->HasByteCodeRegSlot())
  360. {
  361. if (sym->IsTypeSpec())
  362. {
  363. // It has to have a var version for byte code regs
  364. sym = sym->GetVarEquivSym(nullptr);
  365. }
  366. instrByteCodeStackSymUsed->Set(sym->m_id);
  367. }
  368. }
  369. void
  370. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  371. {
  372. IR::Opnd * dst = instr->GetDst();
  373. if (dst)
  374. {
  375. MarkNonByteCodeUsed(dst);
  376. }
  377. IR::Opnd * src1 = instr->GetSrc1();
  378. if (src1)
  379. {
  380. MarkNonByteCodeUsed(src1);
  381. IR::Opnd * src2 = instr->GetSrc2();
  382. if (src2)
  383. {
  384. MarkNonByteCodeUsed(src2);
  385. }
  386. }
  387. }
  388. void
  389. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  390. {
  391. switch(opnd->GetKind())
  392. {
  393. case IR::OpndKindReg:
  394. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  395. break;
  396. case IR::OpndKindIndir:
  397. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  398. {
  399. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  400. if (indexOpnd)
  401. {
  402. indexOpnd->SetIsJITOptimizedReg(true);
  403. }
  404. }
  405. break;
  406. }
  407. }
  408. void
  409. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  410. {
  411. if (this->byteCodeUses || this->func->GetJITFunctionBody()->IsAsmJsMode())
  412. {
  413. // We already captured it before.
  414. return;
  415. }
  416. Assert(this->propertySymUse == NULL);
  417. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  418. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  419. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  420. "Instruction edited before capturing the byte code use");
  421. }
  422. void
  423. GlobOpt::ProcessInlineeEnd(IR::Instr* instr)
  424. {
  425. if (instr->m_func->hasArgLenAndConstOpt && instr->m_func->unoptableInlineArgCount == 0)
  426. {
  427. instr->m_func->hasUnoptimizedArgumentsAccess = false;
  428. if (DoInlineArgsOpt(instr->m_func))
  429. {
  430. instr->m_func->m_hasInlineArgsOpt = true;
  431. instr->m_func->frameInfo = instr->m_func->cachedInlineeFrameInfo;
  432. }
  433. }
  434. if (instr->m_func->m_hasInlineArgsOpt)
  435. {
  436. RecordInlineeFrameInfo(instr);
  437. }
  438. EndTrackingOfArgObjSymsForInlinee();
  439. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  440. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  441. }
  442. void
  443. GlobOpt::TrackCalls(IR::Instr * instr)
  444. {
  445. // Keep track of out params for bailout
  446. switch (instr->m_opcode)
  447. {
  448. case Js::OpCode::StartCall:
  449. Assert(!this->isCallHelper);
  450. Assert(instr->GetDst()->IsRegOpnd());
  451. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  452. if (this->currentBlock->globOptData.callSequence == nullptr)
  453. {
  454. this->currentBlock->globOptData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  455. }
  456. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  457. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  458. this->currentBlock->globOptData.startCallCount++;
  459. break;
  460. case Js::OpCode::BytecodeArgOutCapture:
  461. {
  462. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  463. this->currentBlock->globOptData.argOutCount++;
  464. break;
  465. }
  466. case Js::OpCode::ArgOut_A:
  467. case Js::OpCode::ArgOut_A_Inline:
  468. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  469. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  470. case Js::OpCode::ArgOut_A_Dynamic:
  471. case Js::OpCode::ArgOut_A_FromStackArgs:
  472. case Js::OpCode::ArgOut_A_SpreadArg:
  473. {
  474. IR::Opnd * opnd = instr->GetDst();
  475. if (opnd->IsSymOpnd())
  476. {
  477. Assert(!this->isCallHelper);
  478. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  479. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  480. // These scenarios are already tracked using BytecodeArgOutCapture,
  481. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  482. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  483. {
  484. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  485. this->currentBlock->globOptData.argOutCount++;
  486. }
  487. Assert(stackSym->IsArgSlotSym());
  488. if (stackSym->m_isInlinedArgSlot)
  489. {
  490. uint size = TySize[instr->GetDst()->GetType()];
  491. this->currentBlock->globOptData.inlinedArgOutSize += size < MachPtr ? MachPtr : size;
  492. // We want to update the offsets only once: don't do in prepass.
  493. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  494. {
  495. Func * currentFunc = instr->m_func;
  496. stackSym->FixupStackOffset(currentFunc);
  497. }
  498. }
  499. }
  500. else
  501. {
  502. // It is a reg opnd if it is a helper call
  503. // It should be all ArgOut until the CallHelper instruction
  504. Assert(opnd->IsRegOpnd());
  505. this->isCallHelper = true;
  506. }
  507. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  508. {
  509. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  510. }
  511. break;
  512. }
  513. case Js::OpCode::InlineeStart:
  514. {
  515. Assert(instr->m_func->GetParentFunc() == this->currentBlock->globOptData.curFunc);
  516. Assert(instr->m_func->GetParentFunc());
  517. this->currentBlock->globOptData.curFunc = instr->m_func;
  518. this->func->UpdateMaxInlineeArgOutSize(this->currentBlock->globOptData.inlinedArgOutSize);
  519. this->EndTrackCall(instr);
  520. auto createFrameInfo = [&](Func* inlineeFunc)
  521. {
  522. InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(inlineeFunc->m_alloc);
  523. frameInfo->floatSyms = CurrentBlockData()->liveFloat64Syms->CopyNew(this->alloc);
  524. frameInfo->intSyms = CurrentBlockData()->liveInt32Syms->MinusNew(CurrentBlockData()->liveLossyInt32Syms, this->alloc);
  525. frameInfo->varSyms = CurrentBlockData()->liveVarSyms->CopyNew(this->alloc);
  526. return frameInfo;
  527. };
  528. if (DoInlineArgsOpt(instr->m_func))
  529. {
  530. instr->m_func->m_hasInlineArgsOpt = true;
  531. instr->m_func->frameInfo = createFrameInfo(func);
  532. }
  533. else
  534. {
  535. instr->m_func->cachedInlineeFrameInfo = createFrameInfo(instr->m_func);
  536. }
  537. break;
  538. }
  539. case Js::OpCode::EndCallForPolymorphicInlinee:
  540. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  541. this->EndTrackCall(instr);
  542. break;
  543. case Js::OpCode::CallHelper:
  544. case Js::OpCode::IsInst:
  545. Assert(this->isCallHelper);
  546. this->isCallHelper = false;
  547. break;
  548. case Js::OpCode::InlineeEnd:
  549. ProcessInlineeEnd(instr);
  550. break;
  551. case Js::OpCode::InlineeMetaArg:
  552. {
  553. Assert(instr->GetDst()->IsSymOpnd());
  554. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  555. Assert(stackSym->IsArgSlotSym());
  556. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  557. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  558. // We want to update the offsets only once: don't do in prepass.
  559. if (!this->IsLoopPrePass())
  560. {
  561. Func * currentFunc = instr->m_func->GetParentFunc();
  562. stackSym->FixupStackOffset(currentFunc);
  563. }
  564. this->currentBlock->globOptData.inlinedArgOutSize += MachPtr;
  565. break;
  566. }
  567. case Js::OpCode::InlineBuiltInStart:
  568. this->inInlinedBuiltIn = true;
  569. break;
  570. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  571. case Js::OpCode::InlineBuiltInEnd:
  572. {
  573. // If extra bailouts were added for the InlineMathXXX call itself,
  574. // move InlineeBuiltInStart just above the InlineMathXXX.
  575. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  576. // otherwise when we bailout we would get wrong function.
  577. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  578. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  579. {
  580. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  581. }
  582. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  583. IR::Instr * insertBeforeInstr = instr->m_prev;
  584. IR::Instr * tmpInstr = insertBeforeInstr;
  585. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  586. {
  587. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  588. {
  589. insertBeforeInstr = tmpInstr;
  590. }
  591. tmpInstr = tmpInstr->m_prev;
  592. }
  593. inlineBuiltInStartInstr->Unlink();
  594. if(insertBeforeInstr == instr->m_prev)
  595. {
  596. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  597. }
  598. else
  599. {
  600. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  601. }
  602. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  603. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  604. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  605. // byte code register holding the function object needs to be restored on bailout.
  606. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  607. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  608. {
  609. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  610. while(
  611. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  612. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  613. {
  614. IR::Instr *const instrToMove = byteCodeUsesInstr;
  615. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  616. instrToMove->Unlink();
  617. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  618. }
  619. }
  620. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  621. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  622. // need to know about current bailout record, but since they are added after TrackCalls is called
  623. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  624. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  625. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  626. {
  627. this->EndTrackCall(instr);
  628. }
  629. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  630. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  631. this->inInlinedBuiltIn = false;
  632. break;
  633. }
  634. case Js::OpCode::InlineArrayPop:
  635. {
  636. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  637. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  638. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  639. this->EndTrackCall(instr);
  640. break;
  641. }
  642. default:
  643. if (OpCodeAttr::CallInstr(instr->m_opcode))
  644. {
  645. this->EndTrackCall(instr);
  646. if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
  647. {
  648. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  649. //
  650. // StartCall
  651. // ArgOut_InlineBuiltIn
  652. // ArgOut_InlineBuiltIn
  653. // ArgOut_InlineBuiltIn
  654. // InlineBuiltInStart
  655. // ArgOut_A_InlineSpecialized
  656. // ArgOut_A
  657. // ArgOut_A
  658. // CallDirect
  659. // InlineNonTrackingBuiltInEnd
  660. //
  661. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  662. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  663. // happened when we bail out.
  664. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  665. this->EndTrackCall(instr);
  666. }
  667. }
  668. break;
  669. }
  670. }
  671. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  672. {
  673. if (this->IsLoopPrePass())
  674. {
  675. return;
  676. }
  677. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  678. if (frameInfo->isRecorded)
  679. {
  680. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  681. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  682. return;
  683. }
  684. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  685. {
  686. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  687. {
  688. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  689. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  690. if (functionObject->m_sym->IsConst())
  691. {
  692. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  693. }
  694. else
  695. {
  696. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  697. }
  698. }
  699. else if(!GetIsAsmJSFunc()) // don't care about saving arg syms for wasm/asm.js
  700. {
  701. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  702. IR::Opnd* argOpnd = argInstr->GetSrc1();
  703. InlineFrameInfoValue frameInfoValue;
  704. StackSym* argSym = argOpnd->GetStackSym();
  705. if (!argSym)
  706. {
  707. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  708. }
  709. else if (argSym->IsConst() && !argSym->IsInt64Const())
  710. {
  711. // InlineFrameInfo doesn't currently support Int64Const
  712. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  713. }
  714. else
  715. {
  716. if (!PHASE_OFF(Js::CopyPropPhase, func))
  717. {
  718. Value* value = this->currentBlock->globOptData.FindValue(argSym);
  719. if (value)
  720. {
  721. StackSym * copyPropSym = this->currentBlock->globOptData.GetCopyPropSym(argSym, value);
  722. if (copyPropSym &&
  723. frameInfo->varSyms->TestEmpty() && frameInfo->varSyms->Test(copyPropSym->m_id))
  724. {
  725. argSym = copyPropSym;
  726. }
  727. }
  728. }
  729. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  730. {
  731. // Var version of the sym is not live, use the int32 version
  732. argSym = argSym->GetInt32EquivSym(nullptr);
  733. Assert(argSym);
  734. }
  735. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  736. {
  737. // Var/int32 version of the sym is not live, use the float64 version
  738. argSym = argSym->GetFloat64EquivSym(nullptr);
  739. Assert(argSym);
  740. }
  741. else
  742. {
  743. Assert(frameInfo->varSyms->Test(argSym->m_id));
  744. }
  745. if (argSym->IsConst() && !argSym->IsInt64Const())
  746. {
  747. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  748. }
  749. else
  750. {
  751. frameInfoValue = InlineFrameInfoValue(argSym);
  752. }
  753. }
  754. Assert(argSlot >= 1);
  755. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  756. }
  757. return false;
  758. });
  759. JitAdelete(this->alloc, frameInfo->intSyms);
  760. frameInfo->intSyms = nullptr;
  761. JitAdelete(this->alloc, frameInfo->floatSyms);
  762. frameInfo->floatSyms = nullptr;
  763. JitAdelete(this->alloc, frameInfo->varSyms);
  764. frameInfo->varSyms = nullptr;
  765. frameInfo->isRecorded = true;
  766. }
  767. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  768. {
  769. Assert(this->currentBlock->globOptData.curFunc->GetParentFunc());
  770. if (this->currentBlock->globOptData.curFunc->argObjSyms && TrackArgumentsObject())
  771. {
  772. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  773. tempBv->Minus(this->currentBlock->globOptData.curFunc->argObjSyms, this->currentBlock->globOptData.argObjSyms);
  774. if(!tempBv->IsEmpty())
  775. {
  776. // This means there are arguments object symbols in the current function which are not in the current block.
  777. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  778. // Rare case, abort stack arguments optimization in this case.
  779. CannotAllocateArgumentsObjectOnStack(this->currentBlock->globOptData.curFunc);
  780. }
  781. else
  782. {
  783. Assert(this->currentBlock->globOptData.argObjSyms->OrNew(this->currentBlock->globOptData.curFunc->argObjSyms)->Equal(this->currentBlock->globOptData.argObjSyms));
  784. this->currentBlock->globOptData.argObjSyms->Minus(this->currentBlock->globOptData.curFunc->argObjSyms);
  785. }
  786. JitAdelete(this->tempAlloc, tempBv);
  787. }
  788. this->currentBlock->globOptData.curFunc = this->currentBlock->globOptData.curFunc->GetParentFunc();
  789. }
  790. void GlobOpt::EndTrackCall(IR::Instr* instr)
  791. {
  792. Assert(instr);
  793. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  794. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  795. Assert(!this->isCallHelper);
  796. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  797. #if DBG
  798. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  799. #endif
  800. while (this->currentBlock->globOptData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  801. {
  802. this->currentBlock->globOptData.argOutCount--;
  803. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  804. }
  805. StackSym * sym = this->currentBlock->globOptData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  806. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  807. #if DBG
  808. Assert(sym->m_isSingleDef);
  809. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  810. // Number of argument set should be the same as indicated at StartCall
  811. // except NewScObject has an implicit arg1
  812. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  813. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  814. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  815. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  816. #endif
  817. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  818. this->currentBlock->globOptData.startCallCount--;
  819. }
  820. void
  821. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  822. {
  823. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  824. BVSparse<JitArenaAllocator>* argsToCapture = nullptr;
  825. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  826. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  827. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  828. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  829. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  830. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  831. // the lossily-converted int value back into a var, restoring the wrong value.
  832. bailOutInfo->liveLosslessInt32Syms =
  833. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  834. // Save the stack literal init field count so we can null out the uninitialized fields
  835. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  836. if (stackLiteralInitFldDataMap != nullptr)
  837. {
  838. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  839. if (stackLiteralInitFldDataCount != 0)
  840. {
  841. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  842. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  843. uint i = 0;
  844. stackLiteralInitFldDataMap->Map(
  845. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  846. {
  847. Assert(i < stackLiteralInitFldDataCount);
  848. stackLiteralBailOutInfo[i].stackSym = stackSym;
  849. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  850. i++;
  851. });
  852. Assert(i == stackLiteralInitFldDataCount);
  853. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  854. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  855. }
  856. }
  857. if (TrackArgumentsObject())
  858. {
  859. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  860. }
  861. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  862. {
  863. uint currentArgOutCount = 0;
  864. uint startCallNumber = block->globOptData.startCallCount;
  865. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  866. bailOutInfo->startCallCount = startCallNumber;
  867. // Save the start call's func to identify the function (inlined) that the call sequence is for
  868. // We might not have any arg out yet to get the function from
  869. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  870. #ifdef _M_IX86
  871. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  872. #endif
  873. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  874. bailOutInfo->totalOutParamCount = totalOutParamCount;
  875. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  876. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  877. {
  878. if(opnd->GetStackSym()->HasArgSlotNum())
  879. {
  880. StackSym * sym;
  881. if(opnd->IsSymOpnd())
  882. {
  883. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  884. Assert(sym->IsArgSlotSym());
  885. Assert(sym->m_isSingleDef);
  886. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  887. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  888. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  889. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  890. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  891. }
  892. else
  893. {
  894. sym = opnd->GetStackSym();
  895. Assert(this->currentBlock->globOptData.FindValue(sym));
  896. // StackSym args need to be re-captured
  897. if (!argsToCapture)
  898. {
  899. argsToCapture = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  900. }
  901. argsToCapture->Set(sym->m_id);
  902. }
  903. Assert(totalOutParamCount != 0);
  904. Assert(totalOutParamCount > currentArgOutCount);
  905. currentArgOutCount++;
  906. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  907. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  908. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  909. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  910. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  911. }
  912. else
  913. {
  914. Assert(opnd->IsRegOpnd());
  915. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  916. Assert(!sym->IsArgSlotSym());
  917. Assert(sym->m_isSingleDef);
  918. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  919. Assert(startCallNumber != 0);
  920. startCallNumber--;
  921. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  922. #ifdef _M_IX86
  923. if (sym->m_isInlinedArgSlot)
  924. {
  925. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  926. }
  927. #endif
  928. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  929. Assert(totalOutParamCount >= argOutCount);
  930. Assert(argOutCount >= currentArgOutCount);
  931. bailOutInfo->RecordStartCallInfo(startCallNumber, sym->m_instrDef);
  932. totalOutParamCount -= argOutCount;
  933. currentArgOutCount = 0;
  934. }
  935. }
  936. NEXT_SLISTBASE_ENTRY;
  937. Assert(totalOutParamCount == 0);
  938. Assert(startCallNumber == 0);
  939. Assert(currentArgOutCount == 0);
  940. }
  941. // Save the constant values that we know so we can restore them directly.
  942. // This allows us to dead store the constant value assign.
  943. this->CaptureValues(block, bailOutInfo, argsToCapture);
  944. }
  945. void
  946. GlobOpt::FillBailOutInfo(BasicBlock *block, _In_ IR::Instr * instr)
  947. {
  948. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  949. Assert(instr->HasBailOutInfo());
  950. if (this->isRecursiveCallOnLandingPad)
  951. {
  952. Assert(block->IsLandingPad());
  953. Loop * loop = block->next->loop;
  954. EnsureBailTarget(loop);
  955. if (instr->GetBailOutInfo() != loop->bailOutInfo)
  956. {
  957. instr->ReplaceBailOutInfo(loop->bailOutInfo);
  958. }
  959. return;
  960. }
  961. FillBailOutInfo(block, instr->GetBailOutInfo());
  962. }
  963. IR::ByteCodeUsesInstr *
  964. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  965. {
  966. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  967. if (!this->byteCodeUses)
  968. {
  969. Assert(this->isAsmJSFunc);
  970. return nullptr;
  971. }
  972. IR::RegOpnd * dstOpnd = nullptr;
  973. if (includeDef)
  974. {
  975. IR::Opnd * opnd = instr->GetDst();
  976. if (opnd && opnd->IsRegOpnd())
  977. {
  978. dstOpnd = opnd->AsRegOpnd();
  979. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  980. {
  981. dstOpnd = nullptr;
  982. }
  983. }
  984. }
  985. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  986. {
  987. if (instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset || !instr->HasBailOutInfo())
  988. {
  989. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  990. }
  991. else
  992. {
  993. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr->m_func, instr->GetBailOutInfo()->bailOutOffset);
  994. }
  995. if (!this->byteCodeUses->IsEmpty())
  996. {
  997. byteCodeUsesInstr->SetBV(byteCodeUses->CopyNew(instr->m_func->m_alloc));
  998. }
  999. if (dstOpnd != nullptr)
  1000. {
  1001. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  1002. }
  1003. if (this->propertySymUse)
  1004. {
  1005. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  1006. }
  1007. instr->InsertBefore(byteCodeUsesInstr);
  1008. }
  1009. JitAdelete(this->alloc, this->byteCodeUses);
  1010. this->byteCodeUses = nullptr;
  1011. this->propertySymUse = nullptr;
  1012. return byteCodeUsesInstr;
  1013. }
  1014. IR::ByteCodeUsesInstr *
  1015. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  1016. {
  1017. #if DBG
  1018. PropertySym *propertySymUseBefore = NULL;
  1019. Assert(this->byteCodeUses == nullptr);
  1020. this->byteCodeUsesBeforeOpt->ClearAll();
  1021. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  1022. #endif
  1023. this->CaptureByteCodeSymUses(instr);
  1024. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  1025. instr->Remove();
  1026. if (byteCodeUsesInstr)
  1027. {
  1028. byteCodeUsesInstr->AggregateFollowingByteCodeUses();
  1029. }
  1030. return byteCodeUsesInstr;
  1031. }
  1032. bool
  1033. GlobOpt::MayNeedBailOut(Loop * loop) const
  1034. {
  1035. Assert(this->IsLoopPrePass());
  1036. return loop->CanHoistInvariants() || this->DoFieldCopyProp(loop) ;
  1037. }
  1038. bool
  1039. GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val)
  1040. {
  1041. switch (opnd->GetKind())
  1042. {
  1043. case IR::OpndKindAddr:
  1044. case IR::OpndKindFloatConst:
  1045. case IR::OpndKindIntConst:
  1046. return false;
  1047. case IR::OpndKindReg:
  1048. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  1049. // that it is already a primitive
  1050. return
  1051. !(val && val->GetValueInfo()->IsPrimitive()) &&
  1052. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  1053. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  1054. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  1055. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  1056. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  1057. case IR::OpndKindSym:
  1058. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1059. {
  1060. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1061. if (!propertySymOpnd->MayHaveImplicitCall())
  1062. {
  1063. return false;
  1064. }
  1065. }
  1066. return true;
  1067. default:
  1068. return true;
  1069. };
  1070. }
  1071. bool
  1072. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const
  1073. {
  1074. Assert(!this->IsLoopPrePass());
  1075. return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
  1076. (!this->currentBlock->globOptData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
  1077. }
  1078. bool
  1079. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const
  1080. {
  1081. if (mayNeedImplicitCallBailOut &&
  1082. !instr->CallsAccessor() &&
  1083. (
  1084. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  1085. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  1086. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  1087. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass)
  1088. ) &&
  1089. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  1090. {
  1091. return true;
  1092. }
  1093. #if DBG
  1094. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  1095. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  1096. {
  1097. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  1098. return true;
  1099. }
  1100. #endif
  1101. return false;
  1102. }
  1103. bool
  1104. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  1105. {
  1106. #if DBG
  1107. IR::Opnd* dst = instr->GetDst();
  1108. IR::Opnd* src1 = instr->GetSrc1();
  1109. IR::Opnd* src2 = instr->GetSrc2();
  1110. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  1111. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  1112. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  1113. #endif
  1114. IR::Opnd * opnd = instr->GetDst();
  1115. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1116. {
  1117. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1118. }
  1119. opnd = instr->GetSrc1();
  1120. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1121. {
  1122. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1123. }
  1124. return false;
  1125. }
  1126. bool
  1127. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  1128. {
  1129. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  1130. {
  1131. return false;
  1132. }
  1133. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  1134. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  1135. if (propertySymOpnd->MayNeedTypeCheckProtection())
  1136. {
  1137. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  1138. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) ||
  1139. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed method/data phase disabled?");
  1140. Assert(!isStore || !isCheckFixedFld);
  1141. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  1142. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  1143. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  1144. {
  1145. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  1146. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  1147. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  1148. return true;
  1149. }
  1150. else
  1151. {
  1152. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  1153. *pBailOutKind = IR::BailOutInvalid;
  1154. return false;
  1155. }
  1156. }
  1157. else
  1158. {
  1159. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  1160. *pBailOutKind = IR::BailOutInvalid;
  1161. return false;
  1162. }
  1163. }
  1164. bool
  1165. GlobOpt::MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val)
  1166. {
  1167. if (!instr->HasAnyImplicitCalls())
  1168. {
  1169. return false;
  1170. }
  1171. bool isLdElem = false;
  1172. switch (instr->m_opcode)
  1173. {
  1174. case Js::OpCode::LdLen_A:
  1175. {
  1176. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  1177. return
  1178. !(
  1179. baseValueType.IsString() ||
  1180. (baseValueType.IsAnyArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray) ||
  1181. (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength) // guarantees no implicit calls
  1182. );
  1183. }
  1184. case Js::OpCode::LdElemI_A:
  1185. case Js::OpCode::LdMethodElem:
  1186. case Js::OpCode::InlineArrayPop:
  1187. isLdElem = true;
  1188. // fall-through
  1189. case Js::OpCode::StElemI_A:
  1190. case Js::OpCode::StElemI_A_Strict:
  1191. case Js::OpCode::InlineArrayPush:
  1192. {
  1193. if(!instr->HasBailOutInfo())
  1194. {
  1195. return true;
  1196. }
  1197. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  1198. // implicit call result in a pre-op bailout.
  1199. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1200. return
  1201. !(
  1202. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  1203. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  1204. (isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  1205. );
  1206. }
  1207. default:
  1208. break;
  1209. }
  1210. if (OpCodeAttr::HasImplicitCall(instr->m_opcode))
  1211. {
  1212. // Operation has an implicit call regardless of operand attributes.
  1213. return true;
  1214. }
  1215. IR::Opnd const * opnd = instr->GetDst();
  1216. if (opnd)
  1217. {
  1218. switch (opnd->GetKind())
  1219. {
  1220. case IR::OpndKindReg:
  1221. break;
  1222. case IR::OpndKindSym:
  1223. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1224. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1225. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1226. // can't optimize (e.g. copy prop) across such field stores.
  1227. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1228. {
  1229. return false;
  1230. }
  1231. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1232. {
  1233. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1234. if (!propertySymOpnd->MayHaveImplicitCall())
  1235. {
  1236. return false;
  1237. }
  1238. }
  1239. return true;
  1240. case IR::OpndKindIndir:
  1241. return true;
  1242. default:
  1243. Assume(UNREACHED);
  1244. }
  1245. }
  1246. opnd = instr->GetSrc1();
  1247. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src1Val))
  1248. {
  1249. return true;
  1250. }
  1251. opnd = instr->GetSrc2();
  1252. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src2Val))
  1253. {
  1254. return true;
  1255. }
  1256. return false;
  1257. }
  1258. void
  1259. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1260. {
  1261. Assert(pInstr && *pInstr);
  1262. IR::Instr* instr = *pInstr;
  1263. IR::Instr * nextInstr = instr->GetNextByteCodeInstr();
  1264. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1265. if (this->currentBlock->GetLastInstr() == instr)
  1266. {
  1267. this->currentBlock->SetLastInstr(bailOutInstr);
  1268. }
  1269. FillBailOutInfo(this->currentBlock, bailOutInstr);
  1270. *pInstr = bailOutInstr;
  1271. }
  1272. void
  1273. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1274. {
  1275. Assert(pInstr);
  1276. IR::Instr * instr = *pInstr;
  1277. Assert(instr);
  1278. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1279. Assert(bailOutKind != IR::BailOutInvalid);
  1280. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1281. if (this->currentBlock->GetLastInstr() == instr)
  1282. {
  1283. this->currentBlock->SetLastInstr(bailOutInstr);
  1284. }
  1285. FillBailOutInfo(currentBlock, bailOutInstr);
  1286. *pInstr = bailOutInstr;
  1287. }
  1288. IR::Instr *
  1289. GlobOpt::EnsureBailTarget(Loop * loop)
  1290. {
  1291. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1292. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1293. if (bailOutInstr == nullptr)
  1294. {
  1295. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1296. loop->landingPad->InsertAfter(bailOutInstr);
  1297. }
  1298. return bailOutInstr;
  1299. }