GlobOptBailOut.cpp 56 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  8. {
  9. if (!sym->IsStackSym())
  10. {
  11. return;
  12. }
  13. StackSym * copyPropSym = block->globOptData.GetCopyPropSym(sym, val);
  14. if (copyPropSym != nullptr)
  15. {
  16. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  17. }
  18. }
  19. void
  20. GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
  21. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  22. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  23. BVSparse<JitArenaAllocator>* argsToCapture)
  24. {
  25. Sym * sym = nullptr;
  26. Value * value = nullptr;
  27. ValueInfo * valueInfo = nullptr;
  28. block->globOptData.changedSyms->ClearAll();
  29. FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
  30. {
  31. value = bucket.element;
  32. valueInfo = value->GetValueInfo();
  33. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  34. {
  35. continue;
  36. }
  37. sym = bucket.value;
  38. if (sym == nullptr || !sym->IsStackSym() || !(sym->AsStackSym()->HasByteCodeRegSlot()))
  39. {
  40. continue;
  41. }
  42. block->globOptData.changedSyms->Set(sym->m_id);
  43. }
  44. NEXT_GLOBHASHTABLE_ENTRY;
  45. if (argsToCapture)
  46. {
  47. block->globOptData.changedSyms->Or(argsToCapture);
  48. }
  49. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  50. {
  51. HashBucket<Sym*, Value*> * bucket = block->globOptData.symToValueMap->GetBucket(symId);
  52. StackSym * stackSym = bucket->value->AsStackSym();
  53. value = bucket->element;
  54. valueInfo = value->GetValueInfo();
  55. int intConstantValue;
  56. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  57. {
  58. BailoutConstantValue constValue;
  59. constValue.InitIntConstValue(intConstantValue);
  60. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  61. }
  62. else if (valueInfo->IsVarConstant())
  63. {
  64. BailoutConstantValue constValue;
  65. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  66. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  67. }
  68. else
  69. {
  70. CaptureCopyPropValue(block, stackSym, value, bailOutCopySymsIter);
  71. }
  72. }
  73. NEXT_BITSET_IN_SPARSEBV
  74. }
  75. void
  76. GlobOpt::CaptureValuesIncremental(BasicBlock * block,
  77. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  78. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  79. BVSparse<JitArenaAllocator>* argsToCapture)
  80. {
  81. CapturedValues * currCapturedValues = block->globOptData.capturedValues;
  82. SListBase<ConstantStackSymValue>::Iterator iterConst(currCapturedValues ? &currCapturedValues->constantValues : nullptr);
  83. SListBase<CopyPropSyms>::Iterator iterCopyPropSym(currCapturedValues ? &currCapturedValues->copyPropSyms : nullptr);
  84. bool hasConstValue = currCapturedValues ? iterConst.Next() : false;
  85. bool hasCopyPropSym = currCapturedValues ? iterCopyPropSym.Next() : false;
  86. block->globOptData.changedSyms->Set(Js::Constants::InvalidSymID);
  87. if (argsToCapture)
  88. {
  89. block->globOptData.changedSyms->Or(argsToCapture);
  90. }
  91. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  92. {
  93. Value * val = nullptr;
  94. // First process all unchanged syms with m_id < symId. Then, recapture the current changed sym.
  95. // copy unchanged const sym to new capturedValues
  96. Sym * constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  97. while (constSym && constSym->m_id < symId)
  98. {
  99. Assert(constSym->IsStackSym());
  100. if (!constSym->AsStackSym()->HasArgSlotNum())
  101. {
  102. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  103. }
  104. hasConstValue = iterConst.Next();
  105. constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  106. }
  107. if (constSym && constSym->m_id == symId)
  108. {
  109. hasConstValue = iterConst.Next();
  110. }
  111. // process unchanged sym; copy-prop sym might have changed
  112. Sym * capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  113. while (capturedSym && capturedSym->m_id < symId)
  114. {
  115. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  116. Assert(capturedSym->IsStackSym());
  117. if (!block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id))
  118. {
  119. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  120. {
  121. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  122. }
  123. }
  124. else
  125. {
  126. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  127. {
  128. val = this->currentBlock->globOptData.FindValue(capturedSym);
  129. if (val != nullptr)
  130. {
  131. CaptureCopyPropValue(block, capturedSym, val, bailOutCopySymsIter);
  132. }
  133. }
  134. }
  135. hasCopyPropSym = iterCopyPropSym.Next();
  136. capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  137. }
  138. if (capturedSym && capturedSym->m_id == symId)
  139. {
  140. hasCopyPropSym = iterCopyPropSym.Next();
  141. }
  142. // recapture changed sym
  143. HashBucket<Sym *, Value *> * symIdBucket = nullptr;
  144. if (symId != Js::Constants::InvalidSymID)
  145. {
  146. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  147. if (symIdBucket != nullptr)
  148. {
  149. Sym * symIdSym = symIdBucket->value;
  150. Assert(symIdSym->IsStackSym() && (symIdSym->AsStackSym()->HasByteCodeRegSlot() || symIdSym->AsStackSym()->HasArgSlotNum()));
  151. val = symIdBucket->element;
  152. Assert(val);
  153. ValueInfo* valueInfo = val->GetValueInfo();
  154. if (valueInfo->GetSymStore() != nullptr)
  155. {
  156. int32 intConstValue;
  157. BailoutConstantValue constValue;
  158. if (valueInfo->TryGetIntConstantValue(&intConstValue))
  159. {
  160. constValue.InitIntConstValue(intConstValue);
  161. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  162. }
  163. else if (valueInfo->IsVarConstant())
  164. {
  165. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  166. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  167. }
  168. else
  169. {
  170. CaptureCopyPropValue(block, symIdSym, val, bailOutCopySymsIter);
  171. }
  172. }
  173. }
  174. }
  175. }
  176. NEXT_BITSET_IN_SPARSEBV
  177. // If, after going over the set of changed syms since the last time we captured values,
  178. // there are remaining unprocessed entries in the current captured values set,
  179. // they can simply be copied over to the new bailout info.
  180. while (hasConstValue)
  181. {
  182. Sym * constSym = iterConst.Data().Key();
  183. Assert(constSym->IsStackSym());
  184. Assert(!block->globOptData.changedSyms->Test(constSym->m_id));
  185. if (!constSym->AsStackSym()->HasArgSlotNum())
  186. {
  187. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  188. }
  189. hasConstValue = iterConst.Next();
  190. }
  191. while (hasCopyPropSym)
  192. {
  193. Sym * capturedSym = iterCopyPropSym.Data().Key();
  194. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  195. Assert(capturedSym->IsStackSym());
  196. Assert(!block->globOptData.changedSyms->Test(capturedSym->m_id) &&
  197. !block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id));
  198. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  199. {
  200. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  201. }
  202. hasCopyPropSym = iterCopyPropSym.Next();
  203. }
  204. }
  205. void
  206. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture)
  207. {
  208. if (!this->func->DoGlobOptsForGeneratorFunc())
  209. {
  210. // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
  211. // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
  212. // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
  213. return;
  214. }
  215. CapturedValues capturedValues;
  216. SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
  217. SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
  218. bailOutConstValuesIter.Next();
  219. bailOutCopySymsIter.Next();
  220. if (!block->globOptData.capturedValues)
  221. {
  222. CaptureValuesFromScratch(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  223. }
  224. else
  225. {
  226. CaptureValuesIncremental(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  227. }
  228. // attach capturedValues to bailOutInfo
  229. bailOutInfo->capturedValues->constantValues.Clear(this->func->m_alloc);
  230. bailOutConstValuesIter.SetNext(&bailOutInfo->capturedValues->constantValues);
  231. bailOutInfo->capturedValues->constantValues = capturedValues.constantValues;
  232. bailOutInfo->capturedValues->copyPropSyms.Clear(this->func->m_alloc);
  233. bailOutCopySymsIter.SetNext(&bailOutInfo->capturedValues->copyPropSyms);
  234. bailOutInfo->capturedValues->copyPropSyms = capturedValues.copyPropSyms;
  235. // In pre-pass only bailout info created should be for the loop header, and that doesn't take into account the back edge.
  236. // Don't use the captured values on that bailout for incremental capturing of values.
  237. if (!PHASE_OFF(Js::IncrementalBailoutPhase, func) && !this->IsLoopPrePass())
  238. {
  239. // cache the pointer of current bailout as potential baseline for later bailout in this block
  240. if (block->globOptData.capturedValuesCandidate)
  241. {
  242. block->globOptData.capturedValuesCandidate->DecrementRefCount();
  243. }
  244. block->globOptData.capturedValuesCandidate = bailOutInfo->capturedValues;
  245. block->globOptData.capturedValuesCandidate->IncrementRefCount();
  246. // reset changed syms to track symbols change after the above captured values candidate
  247. this->changedSymsAfterIncBailoutCandidate->ClearAll();
  248. }
  249. }
  250. void
  251. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  252. {
  253. FOREACH_BITSET_IN_SPARSEBV(id, this->currentBlock->globOptData.argObjSyms)
  254. {
  255. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  256. Assert(stackSym != nullptr);
  257. if (!stackSym->HasByteCodeRegSlot())
  258. {
  259. continue;
  260. }
  261. if (!bailOutInfo->capturedValues->argObjSyms)
  262. {
  263. bailOutInfo->capturedValues->argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  264. }
  265. bailOutInfo->capturedValues->argObjSyms->Set(id);
  266. // Add to BailOutInfo
  267. }
  268. NEXT_BITSET_IN_SPARSEBV
  269. }
  270. void
  271. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  272. {
  273. if(instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  274. {
  275. return;
  276. }
  277. IR::Opnd * src = instr->GetSrc1();
  278. if (src)
  279. {
  280. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  281. src = instr->GetSrc2();
  282. if (src)
  283. {
  284. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  285. }
  286. }
  287. #if DBG
  288. // There should be no more than one property sym used.
  289. PropertySym *propertySymFromSrc = *pPropertySym;
  290. #endif
  291. IR::Opnd * dst = instr->GetDst();
  292. if (dst)
  293. {
  294. StackSym *stackSym = dst->GetStackSym();
  295. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  296. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  297. if (stackSym == NULL)
  298. {
  299. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  300. }
  301. }
  302. #if DBG
  303. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  304. "Lost a property sym use?");
  305. #endif
  306. }
  307. void
  308. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  309. {
  310. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  311. if (!regOpnd->GetIsJITOptimizedReg())
  312. {
  313. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  314. }
  315. }
  316. void
  317. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  318. {
  319. if (opnd->GetIsJITOptimizedReg())
  320. {
  321. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  322. return;
  323. }
  324. switch(opnd->GetKind())
  325. {
  326. case IR::OpndKindReg:
  327. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  328. break;
  329. case IR::OpndKindSym:
  330. {
  331. Sym * sym = opnd->AsSymOpnd()->m_sym;
  332. if (sym->IsStackSym())
  333. {
  334. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  335. }
  336. else
  337. {
  338. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  339. *pPropertySym = sym->AsPropertySym();
  340. }
  341. }
  342. break;
  343. case IR::OpndKindIndir:
  344. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  345. {
  346. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  347. if (indexOpnd)
  348. {
  349. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  350. }
  351. }
  352. break;
  353. }
  354. }
  355. void
  356. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  357. {
  358. // We only care about stack sym that has a corresponding byte code register
  359. if (sym->HasByteCodeRegSlot())
  360. {
  361. if (sym->IsTypeSpec())
  362. {
  363. // It has to have a var version for byte code regs
  364. sym = sym->GetVarEquivSym(nullptr);
  365. }
  366. instrByteCodeStackSymUsed->Set(sym->m_id);
  367. }
  368. }
  369. void
  370. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  371. {
  372. IR::Opnd * dst = instr->GetDst();
  373. if (dst)
  374. {
  375. MarkNonByteCodeUsed(dst);
  376. }
  377. IR::Opnd * src1 = instr->GetSrc1();
  378. if (src1)
  379. {
  380. MarkNonByteCodeUsed(src1);
  381. IR::Opnd * src2 = instr->GetSrc2();
  382. if (src2)
  383. {
  384. MarkNonByteCodeUsed(src2);
  385. }
  386. }
  387. }
  388. void
  389. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  390. {
  391. switch(opnd->GetKind())
  392. {
  393. case IR::OpndKindReg:
  394. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  395. break;
  396. case IR::OpndKindIndir:
  397. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  398. {
  399. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  400. if (indexOpnd)
  401. {
  402. indexOpnd->SetIsJITOptimizedReg(true);
  403. }
  404. }
  405. break;
  406. }
  407. }
  408. void
  409. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  410. {
  411. if (this->byteCodeUses || this->func->GetJITFunctionBody()->IsAsmJsMode())
  412. {
  413. // We already captured it before.
  414. return;
  415. }
  416. Assert(this->propertySymUse == NULL);
  417. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  418. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  419. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  420. "Instruction edited before capturing the byte code use");
  421. }
  422. void
  423. GlobOpt::ProcessInlineeEnd(IR::Instr* instr)
  424. {
  425. if (instr->m_func->m_hasInlineArgsOpt)
  426. {
  427. RecordInlineeFrameInfo(instr);
  428. }
  429. EndTrackingOfArgObjSymsForInlinee();
  430. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  431. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  432. }
  433. void
  434. GlobOpt::TrackCalls(IR::Instr * instr)
  435. {
  436. // Keep track of out params for bailout
  437. switch (instr->m_opcode)
  438. {
  439. case Js::OpCode::StartCall:
  440. Assert(!this->isCallHelper);
  441. Assert(instr->GetDst()->IsRegOpnd());
  442. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  443. if (this->currentBlock->globOptData.callSequence == nullptr)
  444. {
  445. this->currentBlock->globOptData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  446. this->currentBlock->globOptData.callSequence = this->currentBlock->globOptData.callSequence;
  447. }
  448. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  449. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  450. this->currentBlock->globOptData.startCallCount++;
  451. break;
  452. case Js::OpCode::BytecodeArgOutCapture:
  453. {
  454. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  455. this->currentBlock->globOptData.argOutCount++;
  456. break;
  457. }
  458. case Js::OpCode::ArgOut_A:
  459. case Js::OpCode::ArgOut_A_Inline:
  460. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  461. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  462. case Js::OpCode::ArgOut_A_Dynamic:
  463. case Js::OpCode::ArgOut_A_FromStackArgs:
  464. case Js::OpCode::ArgOut_A_SpreadArg:
  465. {
  466. IR::Opnd * opnd = instr->GetDst();
  467. if (opnd->IsSymOpnd())
  468. {
  469. Assert(!this->isCallHelper);
  470. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  471. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  472. // These scenarios are already tracked using BytecodeArgOutCapture,
  473. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  474. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  475. {
  476. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  477. this->currentBlock->globOptData.argOutCount++;
  478. }
  479. Assert(stackSym->IsArgSlotSym());
  480. if (stackSym->m_isInlinedArgSlot)
  481. {
  482. uint size = TySize[instr->GetDst()->GetType()];
  483. this->currentBlock->globOptData.inlinedArgOutSize += size < MachPtr ? MachPtr : size;
  484. // We want to update the offsets only once: don't do in prepass.
  485. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  486. {
  487. Func * currentFunc = instr->m_func;
  488. stackSym->FixupStackOffset(currentFunc);
  489. }
  490. }
  491. }
  492. else
  493. {
  494. // It is a reg opnd if it is a helper call
  495. // It should be all ArgOut until the CallHelper instruction
  496. Assert(opnd->IsRegOpnd());
  497. this->isCallHelper = true;
  498. }
  499. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  500. {
  501. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  502. }
  503. break;
  504. }
  505. case Js::OpCode::InlineeStart:
  506. Assert(instr->m_func->GetParentFunc() == this->currentBlock->globOptData.curFunc);
  507. Assert(instr->m_func->GetParentFunc());
  508. this->currentBlock->globOptData.curFunc = instr->m_func;
  509. this->func->UpdateMaxInlineeArgOutSize(this->currentBlock->globOptData.inlinedArgOutSize);
  510. this->EndTrackCall(instr);
  511. if (DoInlineArgsOpt(instr->m_func))
  512. {
  513. instr->m_func->m_hasInlineArgsOpt = true;
  514. InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(func->m_alloc);
  515. instr->m_func->frameInfo = frameInfo;
  516. frameInfo->functionSymStartValue = instr->GetSrc1()->GetSym() ?
  517. CurrentBlockData()->FindValue(instr->GetSrc1()->GetSym()) : nullptr;
  518. frameInfo->floatSyms = CurrentBlockData()->liveFloat64Syms->CopyNew(this->alloc);
  519. frameInfo->intSyms = CurrentBlockData()->liveInt32Syms->MinusNew(CurrentBlockData()->liveLossyInt32Syms, this->alloc);
  520. frameInfo->varSyms = CurrentBlockData()->liveVarSyms->CopyNew(this->alloc);
  521. }
  522. break;
  523. case Js::OpCode::EndCallForPolymorphicInlinee:
  524. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  525. this->EndTrackCall(instr);
  526. break;
  527. case Js::OpCode::CallHelper:
  528. case Js::OpCode::IsInst:
  529. Assert(this->isCallHelper);
  530. this->isCallHelper = false;
  531. break;
  532. case Js::OpCode::InlineeEnd:
  533. ProcessInlineeEnd(instr);
  534. break;
  535. case Js::OpCode::InlineeMetaArg:
  536. {
  537. Assert(instr->GetDst()->IsSymOpnd());
  538. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  539. Assert(stackSym->IsArgSlotSym());
  540. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  541. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  542. // We want to update the offsets only once: don't do in prepass.
  543. if (!this->IsLoopPrePass())
  544. {
  545. Func * currentFunc = instr->m_func->GetParentFunc();
  546. stackSym->FixupStackOffset(currentFunc);
  547. }
  548. this->currentBlock->globOptData.inlinedArgOutSize += MachPtr;
  549. break;
  550. }
  551. case Js::OpCode::InlineBuiltInStart:
  552. this->inInlinedBuiltIn = true;
  553. break;
  554. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  555. case Js::OpCode::InlineBuiltInEnd:
  556. {
  557. // If extra bailouts were added for the InlineMathXXX call itself,
  558. // move InlineeBuiltInStart just above the InlineMathXXX.
  559. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  560. // otherwise when we bailout we would get wrong function.
  561. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  562. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  563. {
  564. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  565. }
  566. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  567. IR::Instr * insertBeforeInstr = instr->m_prev;
  568. IR::Instr * tmpInstr = insertBeforeInstr;
  569. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  570. {
  571. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  572. {
  573. insertBeforeInstr = tmpInstr;
  574. }
  575. tmpInstr = tmpInstr->m_prev;
  576. }
  577. inlineBuiltInStartInstr->Unlink();
  578. if(insertBeforeInstr == instr->m_prev)
  579. {
  580. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  581. }
  582. else
  583. {
  584. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  585. }
  586. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  587. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  588. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  589. // byte code register holding the function object needs to be restored on bailout.
  590. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  591. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  592. {
  593. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  594. while(
  595. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  596. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  597. {
  598. IR::Instr *const instrToMove = byteCodeUsesInstr;
  599. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  600. instrToMove->Unlink();
  601. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  602. }
  603. }
  604. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  605. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  606. // need to know about current bailout record, but since they are added after TrackCalls is called
  607. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  608. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  609. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  610. {
  611. this->EndTrackCall(instr);
  612. }
  613. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  614. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  615. this->inInlinedBuiltIn = false;
  616. break;
  617. }
  618. case Js::OpCode::InlineArrayPop:
  619. {
  620. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  621. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  622. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  623. this->EndTrackCall(instr);
  624. break;
  625. }
  626. default:
  627. if (OpCodeAttr::CallInstr(instr->m_opcode))
  628. {
  629. this->EndTrackCall(instr);
  630. if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
  631. {
  632. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  633. //
  634. // StartCall
  635. // ArgOut_InlineBuiltIn
  636. // ArgOut_InlineBuiltIn
  637. // ArgOut_InlineBuiltIn
  638. // InlineBuiltInStart
  639. // ArgOut_A_InlineSpecialized
  640. // ArgOut_A
  641. // ArgOut_A
  642. // CallDirect
  643. // InlineNonTrackingBuiltInEnd
  644. //
  645. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  646. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  647. // happened when we bail out.
  648. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  649. this->EndTrackCall(instr);
  650. }
  651. }
  652. break;
  653. }
  654. }
  655. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  656. {
  657. if (this->IsLoopPrePass())
  658. {
  659. return;
  660. }
  661. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  662. if (frameInfo->isRecorded)
  663. {
  664. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  665. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  666. return;
  667. }
  668. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  669. {
  670. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  671. {
  672. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  673. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  674. if (functionObject->m_sym->IsConst())
  675. {
  676. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  677. }
  678. else
  679. {
  680. // If the value of the functionObject symbol has changed between the inlineeStart and the inlineeEnd,
  681. // we don't record the inlinee frame info (see OS#18318884).
  682. Assert(frameInfo->functionSymStartValue != nullptr);
  683. if (!frameInfo->functionSymStartValue->IsEqualTo(CurrentBlockData()->FindValue(functionObject->m_sym)))
  684. {
  685. argInstr->m_func->DisableCanDoInlineArgOpt();
  686. return true;
  687. }
  688. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  689. }
  690. }
  691. else if(!GetIsAsmJSFunc()) // don't care about saving arg syms for wasm/asm.js
  692. {
  693. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  694. IR::Opnd* argOpnd = argInstr->GetSrc1();
  695. InlineFrameInfoValue frameInfoValue;
  696. StackSym* argSym = argOpnd->GetStackSym();
  697. if (!argSym)
  698. {
  699. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  700. }
  701. else if (argSym->IsConst() && !argSym->IsInt64Const())
  702. {
  703. // InlineFrameInfo doesn't currently support Int64Const
  704. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  705. }
  706. else
  707. {
  708. if (!PHASE_OFF(Js::CopyPropPhase, func))
  709. {
  710. Value* value = this->currentBlock->globOptData.FindValue(argSym);
  711. if (value)
  712. {
  713. StackSym * copyPropSym = this->currentBlock->globOptData.GetCopyPropSym(argSym, value);
  714. if (copyPropSym &&
  715. frameInfo->varSyms->TestEmpty() && frameInfo->varSyms->Test(copyPropSym->m_id))
  716. {
  717. argSym = copyPropSym;
  718. }
  719. }
  720. }
  721. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  722. {
  723. // Var version of the sym is not live, use the int32 version
  724. argSym = argSym->GetInt32EquivSym(nullptr);
  725. Assert(argSym);
  726. }
  727. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  728. {
  729. // Var/int32 version of the sym is not live, use the float64 version
  730. argSym = argSym->GetFloat64EquivSym(nullptr);
  731. Assert(argSym);
  732. }
  733. else
  734. {
  735. Assert(frameInfo->varSyms->Test(argSym->m_id));
  736. }
  737. if (argSym->IsConst() && !argSym->IsInt64Const())
  738. {
  739. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  740. }
  741. else
  742. {
  743. frameInfoValue = InlineFrameInfoValue(argSym);
  744. }
  745. }
  746. Assert(argSlot >= 1);
  747. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  748. }
  749. return false;
  750. });
  751. JitAdelete(this->alloc, frameInfo->intSyms);
  752. frameInfo->intSyms = nullptr;
  753. JitAdelete(this->alloc, frameInfo->floatSyms);
  754. frameInfo->floatSyms = nullptr;
  755. JitAdelete(this->alloc, frameInfo->varSyms);
  756. frameInfo->varSyms = nullptr;
  757. frameInfo->isRecorded = true;
  758. }
  759. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  760. {
  761. Assert(this->currentBlock->globOptData.curFunc->GetParentFunc());
  762. if (this->currentBlock->globOptData.curFunc->argObjSyms && TrackArgumentsObject())
  763. {
  764. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  765. tempBv->Minus(this->currentBlock->globOptData.curFunc->argObjSyms, this->currentBlock->globOptData.argObjSyms);
  766. if(!tempBv->IsEmpty())
  767. {
  768. // This means there are arguments object symbols in the current function which are not in the current block.
  769. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  770. // Rare case, abort stack arguments optimization in this case.
  771. CannotAllocateArgumentsObjectOnStack();
  772. }
  773. else
  774. {
  775. Assert(this->currentBlock->globOptData.argObjSyms->OrNew(this->currentBlock->globOptData.curFunc->argObjSyms)->Equal(this->currentBlock->globOptData.argObjSyms));
  776. this->currentBlock->globOptData.argObjSyms->Minus(this->currentBlock->globOptData.curFunc->argObjSyms);
  777. }
  778. JitAdelete(this->tempAlloc, tempBv);
  779. }
  780. this->currentBlock->globOptData.curFunc = this->currentBlock->globOptData.curFunc->GetParentFunc();
  781. }
  782. void GlobOpt::EndTrackCall(IR::Instr* instr)
  783. {
  784. Assert(instr);
  785. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  786. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  787. Assert(!this->isCallHelper);
  788. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  789. #if DBG
  790. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  791. #endif
  792. while (this->currentBlock->globOptData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  793. {
  794. this->currentBlock->globOptData.argOutCount--;
  795. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  796. }
  797. StackSym * sym = this->currentBlock->globOptData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  798. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  799. #if DBG
  800. Assert(sym->m_isSingleDef);
  801. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  802. // Number of argument set should be the same as indicated at StartCall
  803. // except NewScObject has an implicit arg1
  804. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  805. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  806. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  807. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  808. #endif
  809. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  810. this->currentBlock->globOptData.startCallCount--;
  811. }
  812. void
  813. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  814. {
  815. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  816. BVSparse<JitArenaAllocator>* argsToCapture = nullptr;
  817. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  818. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  819. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  820. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  821. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  822. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  823. // the lossily-converted int value back into a var, restoring the wrong value.
  824. bailOutInfo->liveLosslessInt32Syms =
  825. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  826. // Save the stack literal init field count so we can null out the uninitialized fields
  827. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  828. if (stackLiteralInitFldDataMap != nullptr)
  829. {
  830. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  831. if (stackLiteralInitFldDataCount != 0)
  832. {
  833. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  834. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  835. uint i = 0;
  836. stackLiteralInitFldDataMap->Map(
  837. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  838. {
  839. Assert(i < stackLiteralInitFldDataCount);
  840. stackLiteralBailOutInfo[i].stackSym = stackSym;
  841. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  842. i++;
  843. });
  844. Assert(i == stackLiteralInitFldDataCount);
  845. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  846. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  847. }
  848. }
  849. if (TrackArgumentsObject())
  850. {
  851. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  852. }
  853. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  854. {
  855. uint currentArgOutCount = 0;
  856. uint startCallNumber = block->globOptData.startCallCount;
  857. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  858. bailOutInfo->startCallCount = startCallNumber;
  859. // Save the start call's func to identify the function (inlined) that the call sequence is for
  860. // We might not have any arg out yet to get the function from
  861. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  862. #ifdef _M_IX86
  863. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  864. #endif
  865. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  866. bailOutInfo->totalOutParamCount = totalOutParamCount;
  867. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  868. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  869. {
  870. if(opnd->GetStackSym()->HasArgSlotNum())
  871. {
  872. StackSym * sym;
  873. if(opnd->IsSymOpnd())
  874. {
  875. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  876. Assert(sym->IsArgSlotSym());
  877. Assert(sym->m_isSingleDef);
  878. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  879. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  880. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  881. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  882. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  883. }
  884. else
  885. {
  886. sym = opnd->GetStackSym();
  887. Assert(this->currentBlock->globOptData.FindValue(sym));
  888. // StackSym args need to be re-captured
  889. if (!argsToCapture)
  890. {
  891. argsToCapture = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  892. }
  893. argsToCapture->Set(sym->m_id);
  894. }
  895. Assert(totalOutParamCount != 0);
  896. Assert(totalOutParamCount > currentArgOutCount);
  897. currentArgOutCount++;
  898. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  899. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  900. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  901. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  902. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  903. }
  904. else
  905. {
  906. Assert(opnd->IsRegOpnd());
  907. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  908. Assert(!sym->IsArgSlotSym());
  909. Assert(sym->m_isSingleDef);
  910. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  911. Assert(startCallNumber != 0);
  912. startCallNumber--;
  913. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  914. #ifdef _M_IX86
  915. if (sym->m_isInlinedArgSlot)
  916. {
  917. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  918. }
  919. #endif
  920. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  921. Assert(totalOutParamCount >= argOutCount);
  922. Assert(argOutCount >= currentArgOutCount);
  923. bailOutInfo->RecordStartCallInfo(startCallNumber, sym->m_instrDef);
  924. totalOutParamCount -= argOutCount;
  925. currentArgOutCount = 0;
  926. }
  927. }
  928. NEXT_SLISTBASE_ENTRY;
  929. Assert(totalOutParamCount == 0);
  930. Assert(startCallNumber == 0);
  931. Assert(currentArgOutCount == 0);
  932. }
  933. // Save the constant values that we know so we can restore them directly.
  934. // This allows us to dead store the constant value assign.
  935. this->CaptureValues(block, bailOutInfo, argsToCapture);
  936. }
  937. void
  938. GlobOpt::FillBailOutInfo(BasicBlock *block, _In_ IR::Instr * instr)
  939. {
  940. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  941. Assert(instr->HasBailOutInfo());
  942. if (this->isRecursiveCallOnLandingPad)
  943. {
  944. Assert(block->IsLandingPad());
  945. Loop * loop = block->next->loop;
  946. EnsureBailTarget(loop);
  947. if (instr->GetBailOutInfo() != loop->bailOutInfo)
  948. {
  949. instr->ReplaceBailOutInfo(loop->bailOutInfo);
  950. }
  951. return;
  952. }
  953. FillBailOutInfo(block, instr->GetBailOutInfo());
  954. }
  955. IR::ByteCodeUsesInstr *
  956. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  957. {
  958. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  959. if (!this->byteCodeUses)
  960. {
  961. Assert(this->isAsmJSFunc);
  962. return nullptr;
  963. }
  964. IR::RegOpnd * dstOpnd = nullptr;
  965. if (includeDef)
  966. {
  967. IR::Opnd * opnd = instr->GetDst();
  968. if (opnd && opnd->IsRegOpnd())
  969. {
  970. dstOpnd = opnd->AsRegOpnd();
  971. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  972. {
  973. dstOpnd = nullptr;
  974. }
  975. }
  976. }
  977. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  978. {
  979. if (instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset || !instr->HasBailOutInfo())
  980. {
  981. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  982. }
  983. else
  984. {
  985. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr->m_func, instr->GetBailOutInfo()->bailOutOffset);
  986. }
  987. if (!this->byteCodeUses->IsEmpty())
  988. {
  989. byteCodeUsesInstr->SetBV(byteCodeUses->CopyNew(instr->m_func->m_alloc));
  990. }
  991. if (dstOpnd != nullptr)
  992. {
  993. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  994. }
  995. if (this->propertySymUse)
  996. {
  997. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  998. }
  999. instr->InsertBefore(byteCodeUsesInstr);
  1000. }
  1001. JitAdelete(this->alloc, this->byteCodeUses);
  1002. this->byteCodeUses = nullptr;
  1003. this->propertySymUse = nullptr;
  1004. return byteCodeUsesInstr;
  1005. }
  1006. IR::ByteCodeUsesInstr *
  1007. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  1008. {
  1009. #if DBG
  1010. PropertySym *propertySymUseBefore = NULL;
  1011. Assert(this->byteCodeUses == nullptr);
  1012. this->byteCodeUsesBeforeOpt->ClearAll();
  1013. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  1014. #endif
  1015. this->CaptureByteCodeSymUses(instr);
  1016. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  1017. instr->Remove();
  1018. if (byteCodeUsesInstr)
  1019. {
  1020. byteCodeUsesInstr->AggregateFollowingByteCodeUses();
  1021. }
  1022. return byteCodeUsesInstr;
  1023. }
  1024. bool
  1025. GlobOpt::MayNeedBailOut(Loop * loop) const
  1026. {
  1027. Assert(this->IsLoopPrePass());
  1028. return loop->CanHoistInvariants() || this->DoFieldCopyProp(loop) ;
  1029. }
  1030. bool
  1031. GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val)
  1032. {
  1033. switch (opnd->GetKind())
  1034. {
  1035. case IR::OpndKindAddr:
  1036. case IR::OpndKindFloatConst:
  1037. case IR::OpndKindIntConst:
  1038. return false;
  1039. case IR::OpndKindReg:
  1040. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  1041. // that it is already a primitive
  1042. return
  1043. !(val && val->GetValueInfo()->IsPrimitive()) &&
  1044. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  1045. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  1046. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  1047. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  1048. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  1049. case IR::OpndKindSym:
  1050. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1051. {
  1052. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1053. if (!propertySymOpnd->MayHaveImplicitCall())
  1054. {
  1055. return false;
  1056. }
  1057. }
  1058. return true;
  1059. default:
  1060. return true;
  1061. };
  1062. }
  1063. bool
  1064. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const
  1065. {
  1066. Assert(!this->IsLoopPrePass());
  1067. return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
  1068. (!this->currentBlock->globOptData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
  1069. }
  1070. bool
  1071. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const
  1072. {
  1073. if (mayNeedImplicitCallBailOut &&
  1074. !instr->CallsAccessor() &&
  1075. (
  1076. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  1077. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  1078. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  1079. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass) ||
  1080. (instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0)
  1081. ) &&
  1082. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  1083. {
  1084. return true;
  1085. }
  1086. #if DBG
  1087. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  1088. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  1089. {
  1090. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  1091. return true;
  1092. }
  1093. #endif
  1094. return false;
  1095. }
  1096. bool
  1097. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  1098. {
  1099. #if DBG
  1100. IR::Opnd* dst = instr->GetDst();
  1101. IR::Opnd* src1 = instr->GetSrc1();
  1102. IR::Opnd* src2 = instr->GetSrc2();
  1103. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  1104. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  1105. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  1106. #endif
  1107. IR::Opnd * opnd = instr->GetDst();
  1108. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1109. {
  1110. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1111. }
  1112. opnd = instr->GetSrc1();
  1113. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1114. {
  1115. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1116. }
  1117. return false;
  1118. }
  1119. bool
  1120. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  1121. {
  1122. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  1123. {
  1124. return false;
  1125. }
  1126. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  1127. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  1128. if (propertySymOpnd->MayNeedTypeCheckProtection())
  1129. {
  1130. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  1131. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) ||
  1132. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed method/data phase disabled?");
  1133. Assert(!isStore || !isCheckFixedFld);
  1134. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  1135. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  1136. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  1137. {
  1138. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  1139. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  1140. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  1141. return true;
  1142. }
  1143. else
  1144. {
  1145. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  1146. *pBailOutKind = IR::BailOutInvalid;
  1147. return false;
  1148. }
  1149. }
  1150. else
  1151. {
  1152. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  1153. *pBailOutKind = IR::BailOutInvalid;
  1154. return false;
  1155. }
  1156. }
  1157. bool
  1158. GlobOpt::MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val)
  1159. {
  1160. if (!instr->HasAnyImplicitCalls())
  1161. {
  1162. return false;
  1163. }
  1164. bool isLdElem = false;
  1165. switch (instr->m_opcode)
  1166. {
  1167. case Js::OpCode::LdLen_A:
  1168. {
  1169. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  1170. return
  1171. !(
  1172. baseValueType.IsString() ||
  1173. baseValueType.IsArray() ||
  1174. (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength) // guarantees no implicit calls
  1175. );
  1176. }
  1177. case Js::OpCode::LdElemI_A:
  1178. case Js::OpCode::LdMethodElem:
  1179. case Js::OpCode::InlineArrayPop:
  1180. isLdElem = true;
  1181. // fall-through
  1182. case Js::OpCode::StElemI_A:
  1183. case Js::OpCode::StElemI_A_Strict:
  1184. case Js::OpCode::InlineArrayPush:
  1185. {
  1186. if(!instr->HasBailOutInfo())
  1187. {
  1188. return true;
  1189. }
  1190. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  1191. // implicit call result in a pre-op bailout.
  1192. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1193. return
  1194. !(
  1195. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  1196. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  1197. (isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  1198. );
  1199. }
  1200. case Js::OpCode::NewScObjectNoCtor:
  1201. if (instr->HasBailOutInfo() && (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutFailedCtorGuardCheck)
  1202. {
  1203. // No helper call with this bailout.
  1204. return false;
  1205. }
  1206. break;
  1207. default:
  1208. break;
  1209. }
  1210. if (OpCodeAttr::HasImplicitCall(instr->m_opcode))
  1211. {
  1212. // Operation has an implicit call regardless of operand attributes.
  1213. return true;
  1214. }
  1215. IR::Opnd const * opnd = instr->GetDst();
  1216. if (opnd)
  1217. {
  1218. switch (opnd->GetKind())
  1219. {
  1220. case IR::OpndKindReg:
  1221. break;
  1222. case IR::OpndKindSym:
  1223. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1224. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1225. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1226. // can't optimize (e.g. copy prop) across such field stores.
  1227. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1228. {
  1229. return false;
  1230. }
  1231. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1232. {
  1233. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1234. if (!propertySymOpnd->MayHaveImplicitCall())
  1235. {
  1236. return false;
  1237. }
  1238. }
  1239. return true;
  1240. case IR::OpndKindIndir:
  1241. return true;
  1242. default:
  1243. Assume(UNREACHED);
  1244. }
  1245. }
  1246. opnd = instr->GetSrc1();
  1247. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src1Val))
  1248. {
  1249. return true;
  1250. }
  1251. opnd = instr->GetSrc2();
  1252. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src2Val))
  1253. {
  1254. return true;
  1255. }
  1256. return false;
  1257. }
  1258. void
  1259. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1260. {
  1261. Assert(pInstr && *pInstr);
  1262. IR::Instr* instr = *pInstr;
  1263. IR::Instr * nextInstr = instr->GetNextByteCodeInstr();
  1264. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1265. if (this->currentBlock->GetLastInstr() == instr)
  1266. {
  1267. this->currentBlock->SetLastInstr(bailOutInstr);
  1268. }
  1269. FillBailOutInfo(this->currentBlock, bailOutInstr);
  1270. *pInstr = bailOutInstr;
  1271. }
  1272. void
  1273. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1274. {
  1275. Assert(pInstr);
  1276. IR::Instr * instr = *pInstr;
  1277. Assert(instr);
  1278. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1279. Assert(bailOutKind != IR::BailOutInvalid);
  1280. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1281. if (this->currentBlock->GetLastInstr() == instr)
  1282. {
  1283. this->currentBlock->SetLastInstr(bailOutInstr);
  1284. }
  1285. FillBailOutInfo(currentBlock, bailOutInstr);
  1286. *pInstr = bailOutInstr;
  1287. }
  1288. IR::Instr *
  1289. GlobOpt::EnsureBailTarget(Loop * loop)
  1290. {
  1291. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1292. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1293. if (bailOutInstr == nullptr)
  1294. {
  1295. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1296. loop->landingPad->InsertAfter(bailOutInstr);
  1297. }
  1298. return bailOutInstr;
  1299. }