GlobOptBailOut.cpp 63 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  8. {
  9. if (!sym->IsStackSym())
  10. {
  11. return;
  12. }
  13. StackSym * copyPropSym = block->globOptData.GetCopyPropSym(sym, val);
  14. if (copyPropSym != nullptr)
  15. {
  16. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  17. }
  18. }
  19. void
  20. GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
  21. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  22. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  23. BVSparse<JitArenaAllocator>* argsToCapture)
  24. {
  25. Sym * sym = nullptr;
  26. Value * value = nullptr;
  27. ValueInfo * valueInfo = nullptr;
  28. block->globOptData.changedSyms->ClearAll();
  29. FOREACH_VALUEHASHTABLE_ENTRY(GlobHashBucket, bucket, block->globOptData.symToValueMap)
  30. {
  31. value = bucket.element;
  32. valueInfo = value->GetValueInfo();
  33. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  34. {
  35. continue;
  36. }
  37. sym = bucket.value;
  38. if (sym == nullptr || !sym->IsStackSym() || !(sym->AsStackSym()->HasByteCodeRegSlot()))
  39. {
  40. continue;
  41. }
  42. block->globOptData.changedSyms->Set(sym->m_id);
  43. }
  44. NEXT_VALUEHASHTABLE_ENTRY;
  45. if (argsToCapture)
  46. {
  47. block->globOptData.changedSyms->Or(argsToCapture);
  48. }
  49. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  50. {
  51. HashBucket<Sym*, Value*> * bucket = block->globOptData.symToValueMap->GetBucket(symId);
  52. StackSym * stackSym = bucket->value->AsStackSym();
  53. value = bucket->element;
  54. valueInfo = value->GetValueInfo();
  55. int intConstantValue;
  56. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  57. {
  58. BailoutConstantValue constValue;
  59. constValue.InitIntConstValue(intConstantValue);
  60. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  61. }
  62. else if (valueInfo->IsVarConstant())
  63. {
  64. BailoutConstantValue constValue;
  65. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  66. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  67. }
  68. else
  69. {
  70. CaptureCopyPropValue(block, stackSym, value, bailOutCopySymsIter);
  71. }
  72. }
  73. NEXT_BITSET_IN_SPARSEBV
  74. }
  75. void
  76. GlobOpt::CaptureValuesIncremental(BasicBlock * block,
  77. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  78. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter,
  79. BVSparse<JitArenaAllocator>* argsToCapture)
  80. {
  81. CapturedValues * currCapturedValues = block->globOptData.capturedValues;
  82. SListBase<ConstantStackSymValue>::Iterator iterConst(currCapturedValues ? &currCapturedValues->constantValues : nullptr);
  83. SListBase<CopyPropSyms>::Iterator iterCopyPropSym(currCapturedValues ? &currCapturedValues->copyPropSyms : nullptr);
  84. bool hasConstValue = currCapturedValues ? iterConst.Next() : false;
  85. bool hasCopyPropSym = currCapturedValues ? iterCopyPropSym.Next() : false;
  86. block->globOptData.changedSyms->Set(Js::Constants::InvalidSymID);
  87. if (argsToCapture)
  88. {
  89. block->globOptData.changedSyms->Or(argsToCapture);
  90. }
  91. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  92. {
  93. Value * val = nullptr;
  94. // First process all unchanged syms with m_id < symId. Then, recapture the current changed sym.
  95. // copy unchanged const sym to new capturedValues
  96. Sym * constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  97. while (constSym && constSym->m_id < symId)
  98. {
  99. Assert(constSym->IsStackSym());
  100. if (!constSym->AsStackSym()->HasArgSlotNum())
  101. {
  102. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  103. }
  104. hasConstValue = iterConst.Next();
  105. constSym = hasConstValue ? iterConst.Data().Key() : nullptr;
  106. }
  107. if (constSym && constSym->m_id == symId)
  108. {
  109. hasConstValue = iterConst.Next();
  110. }
  111. // process unchanged sym; copy-prop sym might have changed
  112. Sym * capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  113. while (capturedSym && capturedSym->m_id < symId)
  114. {
  115. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  116. Assert(capturedSym->IsStackSym());
  117. if (!block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id))
  118. {
  119. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  120. {
  121. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  122. }
  123. }
  124. else
  125. {
  126. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  127. {
  128. val = this->currentBlock->globOptData.FindValue(capturedSym);
  129. if (val != nullptr)
  130. {
  131. CaptureCopyPropValue(block, capturedSym, val, bailOutCopySymsIter);
  132. }
  133. }
  134. }
  135. hasCopyPropSym = iterCopyPropSym.Next();
  136. capturedSym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  137. }
  138. if (capturedSym && capturedSym->m_id == symId)
  139. {
  140. hasCopyPropSym = iterCopyPropSym.Next();
  141. }
  142. // recapture changed sym
  143. HashBucket<Sym *, Value *> * symIdBucket = nullptr;
  144. if (symId != Js::Constants::InvalidSymID)
  145. {
  146. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  147. if (symIdBucket != nullptr)
  148. {
  149. Sym * symIdSym = symIdBucket->value;
  150. Assert(symIdSym->IsStackSym() && (symIdSym->AsStackSym()->HasByteCodeRegSlot() || symIdSym->AsStackSym()->HasArgSlotNum()));
  151. val = symIdBucket->element;
  152. Assert(val);
  153. ValueInfo* valueInfo = val->GetValueInfo();
  154. if (valueInfo->GetSymStore() != nullptr)
  155. {
  156. int32 intConstValue;
  157. BailoutConstantValue constValue;
  158. if (valueInfo->TryGetIntConstantValue(&intConstValue))
  159. {
  160. constValue.InitIntConstValue(intConstValue);
  161. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  162. }
  163. else if (valueInfo->IsVarConstant())
  164. {
  165. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  166. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  167. }
  168. else
  169. {
  170. CaptureCopyPropValue(block, symIdSym, val, bailOutCopySymsIter);
  171. }
  172. }
  173. }
  174. }
  175. }
  176. NEXT_BITSET_IN_SPARSEBV
  177. // If, after going over the set of changed syms since the last time we captured values,
  178. // there are remaining unprocessed entries in the current captured values set,
  179. // they can simply be copied over to the new bailout info.
  180. while (hasConstValue)
  181. {
  182. Sym * constSym = iterConst.Data().Key();
  183. Assert(constSym->IsStackSym());
  184. Assert(!block->globOptData.changedSyms->Test(constSym->m_id));
  185. if (!constSym->AsStackSym()->HasArgSlotNum())
  186. {
  187. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, constSym->AsStackSym(), iterConst.Data().Value());
  188. }
  189. hasConstValue = iterConst.Next();
  190. }
  191. while (hasCopyPropSym)
  192. {
  193. Sym * capturedSym = iterCopyPropSym.Data().Key();
  194. StackSym * capturedCopyPropSym = iterCopyPropSym.Data().Value();
  195. Assert(capturedSym->IsStackSym());
  196. Assert(!block->globOptData.changedSyms->Test(capturedSym->m_id) &&
  197. !block->globOptData.changedSyms->Test(capturedCopyPropSym->m_id));
  198. if (!capturedSym->AsStackSym()->HasArgSlotNum())
  199. {
  200. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, capturedSym->AsStackSym(), capturedCopyPropSym);
  201. }
  202. hasCopyPropSym = iterCopyPropSym.Next();
  203. }
  204. }
  205. void
  206. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator>* argsToCapture)
  207. {
  208. CapturedValues capturedValues;
  209. SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
  210. SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
  211. bailOutConstValuesIter.Next();
  212. bailOutCopySymsIter.Next();
  213. if (!block->globOptData.capturedValues)
  214. {
  215. CaptureValuesFromScratch(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  216. }
  217. else
  218. {
  219. CaptureValuesIncremental(block, bailOutConstValuesIter, bailOutCopySymsIter, argsToCapture);
  220. }
  221. // attach capturedValues to bailOutInfo
  222. bailOutInfo->capturedValues->constantValues.Clear(this->func->m_alloc);
  223. bailOutConstValuesIter.SetNext(&bailOutInfo->capturedValues->constantValues);
  224. bailOutInfo->capturedValues->constantValues = capturedValues.constantValues;
  225. bailOutInfo->capturedValues->copyPropSyms.Clear(this->func->m_alloc);
  226. bailOutCopySymsIter.SetNext(&bailOutInfo->capturedValues->copyPropSyms);
  227. bailOutInfo->capturedValues->copyPropSyms = capturedValues.copyPropSyms;
  228. // In pre-pass only bailout info created should be for the loop header, and that doesn't take into account the back edge.
  229. // Don't use the captured values on that bailout for incremental capturing of values.
  230. if (!PHASE_OFF(Js::IncrementalBailoutPhase, func) && !this->IsLoopPrePass())
  231. {
  232. // cache the pointer of current bailout as potential baseline for later bailout in this block
  233. if (block->globOptData.capturedValuesCandidate)
  234. {
  235. block->globOptData.capturedValuesCandidate->DecrementRefCount();
  236. }
  237. block->globOptData.capturedValuesCandidate = bailOutInfo->capturedValues;
  238. block->globOptData.capturedValuesCandidate->IncrementRefCount();
  239. // reset changed syms to track symbols change after the above captured values candidate
  240. this->changedSymsAfterIncBailoutCandidate->ClearAll();
  241. }
  242. }
  243. void
  244. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  245. {
  246. FOREACH_BITSET_IN_SPARSEBV(id, this->currentBlock->globOptData.argObjSyms)
  247. {
  248. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  249. Assert(stackSym != nullptr);
  250. if (!stackSym->HasByteCodeRegSlot())
  251. {
  252. continue;
  253. }
  254. if (!bailOutInfo->capturedValues->argObjSyms)
  255. {
  256. bailOutInfo->capturedValues->argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  257. }
  258. bailOutInfo->capturedValues->argObjSyms->Set(id);
  259. // Add to BailOutInfo
  260. }
  261. NEXT_BITSET_IN_SPARSEBV
  262. }
  263. void
  264. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  265. {
  266. if(instr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  267. {
  268. return;
  269. }
  270. IR::Opnd * src = instr->GetSrc1();
  271. if (src)
  272. {
  273. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  274. src = instr->GetSrc2();
  275. if (src)
  276. {
  277. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  278. }
  279. }
  280. #if DBG
  281. // There should be no more than one property sym used.
  282. PropertySym *propertySymFromSrc = *pPropertySym;
  283. #endif
  284. IR::Opnd * dst = instr->GetDst();
  285. if (dst)
  286. {
  287. StackSym *stackSym = dst->GetStackSym();
  288. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  289. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  290. if (stackSym == NULL)
  291. {
  292. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  293. }
  294. }
  295. #if DBG
  296. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  297. "Lost a property sym use?");
  298. #endif
  299. }
  300. void
  301. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  302. {
  303. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  304. if (!regOpnd->GetIsJITOptimizedReg())
  305. {
  306. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  307. }
  308. }
  309. void
  310. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  311. {
  312. if (opnd->GetIsJITOptimizedReg())
  313. {
  314. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  315. return;
  316. }
  317. switch(opnd->GetKind())
  318. {
  319. case IR::OpndKindReg:
  320. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  321. break;
  322. case IR::OpndKindSym:
  323. {
  324. Sym * sym = opnd->AsSymOpnd()->m_sym;
  325. if (sym->IsStackSym())
  326. {
  327. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  328. }
  329. else
  330. {
  331. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  332. *pPropertySym = sym->AsPropertySym();
  333. }
  334. }
  335. break;
  336. case IR::OpndKindIndir:
  337. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  338. {
  339. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  340. if (indexOpnd)
  341. {
  342. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  343. }
  344. }
  345. break;
  346. }
  347. }
  348. void
  349. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  350. {
  351. // We only care about stack sym that has a corresponding byte code register
  352. if (sym->HasByteCodeRegSlot())
  353. {
  354. if (sym->IsTypeSpec())
  355. {
  356. // It has to have a var version for byte code regs
  357. sym = sym->GetVarEquivSym(nullptr);
  358. }
  359. instrByteCodeStackSymUsed->Set(sym->m_id);
  360. }
  361. }
  362. void
  363. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  364. {
  365. IR::Opnd * dst = instr->GetDst();
  366. if (dst)
  367. {
  368. MarkNonByteCodeUsed(dst);
  369. }
  370. IR::Opnd * src1 = instr->GetSrc1();
  371. if (src1)
  372. {
  373. MarkNonByteCodeUsed(src1);
  374. IR::Opnd * src2 = instr->GetSrc2();
  375. if (src2)
  376. {
  377. MarkNonByteCodeUsed(src2);
  378. }
  379. }
  380. }
  381. void
  382. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  383. {
  384. switch(opnd->GetKind())
  385. {
  386. case IR::OpndKindReg:
  387. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  388. break;
  389. case IR::OpndKindIndir:
  390. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  391. {
  392. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  393. if (indexOpnd)
  394. {
  395. indexOpnd->SetIsJITOptimizedReg(true);
  396. }
  397. }
  398. break;
  399. }
  400. }
  401. void
  402. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  403. {
  404. if (this->byteCodeUses || this->func->GetJITFunctionBody()->IsAsmJsMode())
  405. {
  406. // We already captured it before.
  407. return;
  408. }
  409. Assert(this->propertySymUse == NULL);
  410. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  411. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  412. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  413. "Instruction edited before capturing the byte code use");
  414. }
  415. void
  416. GlobOpt::ProcessInlineeEnd(IR::Instr* instr)
  417. {
  418. if (!PHASE_OFF(Js::StackArgLenConstOptPhase, instr->m_func) &&
  419. !IsLoopPrePass() &&
  420. (!instr->m_func->GetJITFunctionBody()->UsesArgumentsObject() || instr->m_func->IsStackArgsEnabled()))
  421. {
  422. if (instr->m_func->unoptimizableArgumentsObjReference == 0 && instr->m_func->unoptimizableArgumentsObjReferenceInInlinees == 0)
  423. {
  424. instr->m_func->hasUnoptimizedArgumentsAccess = false;
  425. if (!instr->m_func->m_hasInlineArgsOpt && DoInlineArgsOpt(instr->m_func))
  426. {
  427. instr->m_func->m_hasInlineArgsOpt = true;
  428. Assert(instr->m_func->cachedInlineeFrameInfo);
  429. instr->m_func->frameInfo = instr->m_func->cachedInlineeFrameInfo;
  430. }
  431. }
  432. else
  433. {
  434. instr->m_func->hasUnoptimizedArgumentsAccess = true;
  435. if (instr->m_func->m_hasInlineArgsOpt && instr->m_func->cachedInlineeFrameInfo)
  436. {
  437. instr->m_func->m_hasInlineArgsOpt = false;
  438. ClearInlineeFrameInfo(instr);
  439. }
  440. }
  441. }
  442. if (instr->m_func->m_hasInlineArgsOpt)
  443. {
  444. RecordInlineeFrameInfo(instr);
  445. }
  446. EndTrackingOfArgObjSymsForInlinee();
  447. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  448. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  449. instr->m_func->GetParentFunc()->unoptimizableArgumentsObjReferenceInInlinees += instr->m_func->unoptimizableArgumentsObjReference;
  450. }
  451. void
  452. GlobOpt::TrackCalls(IR::Instr * instr)
  453. {
  454. // Keep track of out params for bailout
  455. switch (instr->m_opcode)
  456. {
  457. case Js::OpCode::StartCall:
  458. Assert(!this->isCallHelper);
  459. Assert(instr->GetDst()->IsRegOpnd());
  460. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  461. if (this->currentBlock->globOptData.callSequence == nullptr)
  462. {
  463. this->currentBlock->globOptData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  464. }
  465. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  466. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  467. this->currentBlock->globOptData.startCallCount++;
  468. break;
  469. case Js::OpCode::BytecodeArgOutCapture:
  470. {
  471. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  472. this->currentBlock->globOptData.argOutCount++;
  473. break;
  474. }
  475. case Js::OpCode::ArgOut_A:
  476. case Js::OpCode::ArgOut_A_Inline:
  477. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  478. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  479. case Js::OpCode::ArgOut_A_Dynamic:
  480. case Js::OpCode::ArgOut_A_FromStackArgs:
  481. case Js::OpCode::ArgOut_A_SpreadArg:
  482. {
  483. IR::Opnd * opnd = instr->GetDst();
  484. if (opnd->IsSymOpnd())
  485. {
  486. Assert(!this->isCallHelper);
  487. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  488. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  489. // These scenarios are already tracked using BytecodeArgOutCapture,
  490. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  491. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  492. {
  493. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  494. this->currentBlock->globOptData.argOutCount++;
  495. }
  496. Assert(stackSym->IsArgSlotSym());
  497. if (stackSym->m_isInlinedArgSlot)
  498. {
  499. uint size = TySize[instr->GetDst()->GetType()];
  500. this->currentBlock->globOptData.inlinedArgOutSize += size < MachPtr ? MachPtr : size;
  501. // We want to update the offsets only once: don't do in prepass.
  502. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  503. {
  504. Func * currentFunc = instr->m_func;
  505. stackSym->FixupStackOffset(currentFunc);
  506. }
  507. }
  508. }
  509. else
  510. {
  511. // It is a reg opnd if it is a helper call
  512. // It should be all ArgOut until the CallHelper instruction
  513. Assert(opnd->IsRegOpnd());
  514. this->isCallHelper = true;
  515. }
  516. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  517. {
  518. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  519. }
  520. break;
  521. }
  522. case Js::OpCode::InlineeStart:
  523. {
  524. Assert(instr->m_func->GetParentFunc() == this->currentBlock->globOptData.curFunc);
  525. Assert(instr->m_func->GetParentFunc());
  526. this->currentBlock->globOptData.curFunc = instr->m_func;
  527. this->func->UpdateMaxInlineeArgOutSize(this->currentBlock->globOptData.inlinedArgOutSize);
  528. this->EndTrackCall(instr);
  529. InlineeFrameInfo* inlineeFrameInfo = InlineeFrameInfo::New(instr->m_func->m_alloc);
  530. inlineeFrameInfo->functionSymStartValue = instr->GetSrc1()->GetSym() ?
  531. CurrentBlockData()->FindValue(instr->GetSrc1()->GetSym()) : nullptr;
  532. inlineeFrameInfo->floatSyms = CurrentBlockData()->liveFloat64Syms->CopyNew(this->alloc);
  533. inlineeFrameInfo->intSyms = CurrentBlockData()->liveInt32Syms->MinusNew(CurrentBlockData()->liveLossyInt32Syms, this->alloc);
  534. inlineeFrameInfo->varSyms = CurrentBlockData()->liveVarSyms->CopyNew(this->alloc);
  535. if (DoInlineArgsOpt(instr->m_func))
  536. {
  537. instr->m_func->m_hasInlineArgsOpt = true;
  538. instr->m_func->frameInfo = inlineeFrameInfo;
  539. }
  540. else
  541. {
  542. instr->m_func->cachedInlineeFrameInfo = inlineeFrameInfo;
  543. }
  544. break;
  545. }
  546. case Js::OpCode::EndCallForPolymorphicInlinee:
  547. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  548. this->EndTrackCall(instr);
  549. break;
  550. case Js::OpCode::CallHelper:
  551. case Js::OpCode::IsInst:
  552. Assert(this->isCallHelper);
  553. this->isCallHelper = false;
  554. break;
  555. case Js::OpCode::InlineeEnd:
  556. ProcessInlineeEnd(instr);
  557. break;
  558. case Js::OpCode::InlineeMetaArg:
  559. {
  560. Assert(instr->GetDst()->IsSymOpnd());
  561. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  562. Assert(stackSym->IsArgSlotSym());
  563. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  564. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  565. // We want to update the offsets only once: don't do in prepass.
  566. if (!this->IsLoopPrePass())
  567. {
  568. Func * currentFunc = instr->m_func->GetParentFunc();
  569. stackSym->FixupStackOffset(currentFunc);
  570. }
  571. this->currentBlock->globOptData.inlinedArgOutSize += MachPtr;
  572. break;
  573. }
  574. case Js::OpCode::InlineBuiltInStart:
  575. this->inInlinedBuiltIn = true;
  576. break;
  577. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  578. case Js::OpCode::InlineBuiltInEnd:
  579. {
  580. // If extra bailouts were added for the InlineMathXXX call itself,
  581. // move InlineeBuiltInStart just above the InlineMathXXX.
  582. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  583. // otherwise when we bailout we would get wrong function.
  584. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  585. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  586. {
  587. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  588. }
  589. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  590. IR::Instr * insertBeforeInstr = instr->m_prev;
  591. IR::Instr * tmpInstr = insertBeforeInstr;
  592. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  593. {
  594. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  595. {
  596. insertBeforeInstr = tmpInstr;
  597. }
  598. tmpInstr = tmpInstr->m_prev;
  599. }
  600. inlineBuiltInStartInstr->Unlink();
  601. if(insertBeforeInstr == instr->m_prev)
  602. {
  603. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  604. }
  605. else
  606. {
  607. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  608. }
  609. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  610. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  611. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  612. // byte code register holding the function object needs to be restored on bailout.
  613. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  614. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  615. {
  616. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  617. while(
  618. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  619. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  620. {
  621. IR::Instr *const instrToMove = byteCodeUsesInstr;
  622. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  623. instrToMove->Unlink();
  624. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  625. }
  626. }
  627. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  628. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  629. // need to know about current bailout record, but since they are added after TrackCalls is called
  630. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  631. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  632. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  633. {
  634. this->EndTrackCall(instr);
  635. }
  636. Assert(this->currentBlock->globOptData.inlinedArgOutSize >= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false));
  637. this->currentBlock->globOptData.inlinedArgOutSize -= instr->GetArgOutSize(/*getInterpreterArgOutCount*/ false);
  638. this->inInlinedBuiltIn = false;
  639. break;
  640. }
  641. case Js::OpCode::InlineArrayPop:
  642. {
  643. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  644. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  645. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  646. this->EndTrackCall(instr);
  647. break;
  648. }
  649. default:
  650. if (OpCodeAttr::CallInstr(instr->m_opcode))
  651. {
  652. this->EndTrackCall(instr);
  653. // With `InlineeBuiltInStart` and `InlineeBuiltInEnd` surrounding CallI/CallIDirect/CallIDynamic/CallIFixed,
  654. // we are not popping the call sequence correctly. That makes the bailout code thinks that we need to restore
  655. // argouts of the remaining call even though we shouldn't.
  656. // Also see Inline::InlineApplyWithArgumentsObject, Inline::InlineApplyWithoutArrayArgument, Inline::InlineCall
  657. // in which we set the end tag instruction's opcode to InlineNonTrackingBuiltInEnd
  658. if (this->inInlinedBuiltIn &&
  659. (instr->m_opcode == Js::OpCode::CallDirect || instr->m_opcode == Js::OpCode::CallI ||
  660. instr->m_opcode == Js::OpCode::CallIDynamic || instr->m_opcode == Js::OpCode::CallIFixed))
  661. {
  662. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  663. //
  664. // StartCall
  665. // ArgOut_InlineBuiltIn
  666. // ArgOut_InlineBuiltIn
  667. // ArgOut_InlineBuiltIn
  668. // InlineBuiltInStart
  669. // ArgOut_A_InlineSpecialized
  670. // ArgOut_A
  671. // ArgOut_A
  672. // CallDirect
  673. // InlineNonTrackingBuiltInEnd
  674. //
  675. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  676. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  677. // happened when we bail out.
  678. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  679. this->EndTrackCall(instr);
  680. }
  681. }
  682. break;
  683. }
  684. }
  685. void GlobOpt::ClearInlineeFrameInfo(IR::Instr* inlineeEnd)
  686. {
  687. if (this->IsLoopPrePass())
  688. {
  689. return;
  690. }
  691. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  692. inlineeEnd->m_func->frameInfo = nullptr;
  693. if (!frameInfo || !frameInfo->isRecorded)
  694. {
  695. return;
  696. }
  697. frameInfo->function = InlineFrameInfoValue();
  698. frameInfo->arguments->Clear();
  699. }
  700. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  701. {
  702. if (this->IsLoopPrePass())
  703. {
  704. return;
  705. }
  706. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  707. if (frameInfo->isRecorded)
  708. {
  709. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  710. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  711. return;
  712. }
  713. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  714. {
  715. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  716. {
  717. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  718. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  719. if (functionObject->m_sym->IsConst())
  720. {
  721. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  722. }
  723. else
  724. {
  725. // If the value of the functionObject symbol has changed between the inlineeStart and the inlineeEnd,
  726. // we don't record the inlinee frame info (see OS#18318884).
  727. Assert(frameInfo->functionSymStartValue != nullptr);
  728. if (!frameInfo->functionSymStartValue->IsEqualTo(CurrentBlockData()->FindValue(functionObject->m_sym)))
  729. {
  730. argInstr->m_func->DisableCanDoInlineArgOpt();
  731. return true;
  732. }
  733. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  734. }
  735. }
  736. else if(!GetIsAsmJSFunc()) // don't care about saving arg syms for wasm/asm.js
  737. {
  738. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  739. IR::Opnd* argOpnd = argInstr->GetSrc1();
  740. InlineFrameInfoValue frameInfoValue;
  741. StackSym* argSym = argOpnd->GetStackSym();
  742. if (!argSym)
  743. {
  744. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  745. }
  746. else if (argSym->IsConst() && !argSym->IsInt64Const())
  747. {
  748. // InlineFrameInfo doesn't currently support Int64Const
  749. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  750. }
  751. else
  752. {
  753. if (!PHASE_OFF(Js::CopyPropPhase, func))
  754. {
  755. Value* value = this->currentBlock->globOptData.FindValue(argSym);
  756. if (value)
  757. {
  758. StackSym * copyPropSym = this->currentBlock->globOptData.GetCopyPropSym(argSym, value);
  759. if (copyPropSym &&
  760. frameInfo->varSyms->TestEmpty() && frameInfo->varSyms->Test(copyPropSym->m_id))
  761. {
  762. argSym = copyPropSym;
  763. }
  764. }
  765. }
  766. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  767. {
  768. // Var version of the sym is not live, use the int32 version
  769. argSym = argSym->GetInt32EquivSym(nullptr);
  770. Assert(argSym);
  771. }
  772. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  773. {
  774. // Var/int32 version of the sym is not live, use the float64 version
  775. argSym = argSym->GetFloat64EquivSym(nullptr);
  776. Assert(argSym);
  777. }
  778. else
  779. {
  780. Assert(frameInfo->varSyms->Test(argSym->m_id));
  781. }
  782. if (argSym->IsConst() && !argSym->IsInt64Const())
  783. {
  784. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  785. }
  786. else
  787. {
  788. frameInfoValue = InlineFrameInfoValue(argSym);
  789. }
  790. }
  791. Assert(argSlot >= 1);
  792. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  793. }
  794. return false;
  795. });
  796. JitAdelete(this->alloc, frameInfo->intSyms);
  797. frameInfo->intSyms = nullptr;
  798. JitAdelete(this->alloc, frameInfo->floatSyms);
  799. frameInfo->floatSyms = nullptr;
  800. JitAdelete(this->alloc, frameInfo->varSyms);
  801. frameInfo->varSyms = nullptr;
  802. frameInfo->isRecorded = true;
  803. }
  804. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  805. {
  806. Assert(this->currentBlock->globOptData.curFunc->GetParentFunc());
  807. if (this->currentBlock->globOptData.curFunc->argObjSyms && TrackArgumentsObject())
  808. {
  809. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  810. tempBv->Minus(this->currentBlock->globOptData.curFunc->argObjSyms, this->currentBlock->globOptData.argObjSyms);
  811. if(!tempBv->IsEmpty())
  812. {
  813. // This means there are arguments object symbols in the current function which are not in the current block.
  814. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  815. // Rare case, abort stack arguments optimization in this case.
  816. CannotAllocateArgumentsObjectOnStack(this->currentBlock->globOptData.curFunc);
  817. }
  818. else
  819. {
  820. Assert(this->currentBlock->globOptData.argObjSyms->OrNew(this->currentBlock->globOptData.curFunc->argObjSyms)->Equal(this->currentBlock->globOptData.argObjSyms));
  821. this->currentBlock->globOptData.argObjSyms->Minus(this->currentBlock->globOptData.curFunc->argObjSyms);
  822. }
  823. JitAdelete(this->tempAlloc, tempBv);
  824. }
  825. this->currentBlock->globOptData.curFunc = this->currentBlock->globOptData.curFunc->GetParentFunc();
  826. }
  827. void GlobOpt::EndTrackCall(IR::Instr* instr)
  828. {
  829. Assert(instr);
  830. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  831. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  832. Assert(!this->isCallHelper);
  833. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  834. #if DBG
  835. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  836. #endif
  837. while (this->currentBlock->globOptData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  838. {
  839. this->currentBlock->globOptData.argOutCount--;
  840. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  841. }
  842. StackSym * sym = this->currentBlock->globOptData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  843. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  844. #if DBG
  845. Assert(sym->m_isSingleDef);
  846. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  847. // Number of argument set should be the same as indicated at StartCall
  848. // except NewScObject has an implicit arg1
  849. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  850. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  851. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  852. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  853. #endif
  854. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  855. this->currentBlock->globOptData.startCallCount--;
  856. }
  857. void
  858. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  859. {
  860. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  861. BVSparse<JitArenaAllocator>* argsToCapture = nullptr;
  862. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  863. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  864. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  865. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  866. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  867. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  868. // the lossily-converted int value back into a var, restoring the wrong value.
  869. bailOutInfo->liveLosslessInt32Syms =
  870. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  871. // Save the stack literal init field count so we can null out the uninitialized fields
  872. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  873. if (stackLiteralInitFldDataMap != nullptr)
  874. {
  875. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  876. if (stackLiteralInitFldDataCount != 0)
  877. {
  878. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  879. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  880. uint i = 0;
  881. stackLiteralInitFldDataMap->Map(
  882. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  883. {
  884. Assert(i < stackLiteralInitFldDataCount);
  885. stackLiteralBailOutInfo[i].stackSym = stackSym;
  886. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  887. i++;
  888. });
  889. Assert(i == stackLiteralInitFldDataCount);
  890. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  891. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  892. }
  893. }
  894. if (TrackArgumentsObject())
  895. {
  896. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  897. }
  898. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  899. {
  900. uint currentArgOutCount = 0;
  901. uint startCallNumber = block->globOptData.startCallCount;
  902. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  903. bailOutInfo->startCallCount = startCallNumber;
  904. // Save the start call's func to identify the function (inlined) that the call sequence is for
  905. // We might not have any arg out yet to get the function from
  906. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  907. #ifdef _M_IX86
  908. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  909. #endif
  910. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  911. bailOutInfo->totalOutParamCount = totalOutParamCount;
  912. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  913. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  914. {
  915. if(opnd->GetStackSym()->HasArgSlotNum())
  916. {
  917. StackSym * sym;
  918. if(opnd->IsSymOpnd())
  919. {
  920. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  921. Assert(sym->IsArgSlotSym());
  922. Assert(sym->m_isSingleDef);
  923. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  924. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  925. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  926. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  927. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  928. }
  929. else
  930. {
  931. sym = opnd->GetStackSym();
  932. Assert(this->currentBlock->globOptData.FindValue(sym));
  933. // StackSym args need to be re-captured
  934. if (!argsToCapture)
  935. {
  936. argsToCapture = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  937. }
  938. argsToCapture->Set(sym->m_id);
  939. }
  940. Assert(totalOutParamCount != 0);
  941. Assert(totalOutParamCount > currentArgOutCount);
  942. currentArgOutCount++;
  943. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  944. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  945. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  946. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  947. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  948. }
  949. else
  950. {
  951. Assert(opnd->IsRegOpnd());
  952. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  953. Assert(!sym->IsArgSlotSym());
  954. Assert(sym->m_isSingleDef);
  955. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  956. Assert(startCallNumber != 0);
  957. startCallNumber--;
  958. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  959. #ifdef _M_IX86
  960. if (sym->m_isInlinedArgSlot)
  961. {
  962. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  963. }
  964. #endif
  965. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  966. Assert(totalOutParamCount >= argOutCount);
  967. Assert(argOutCount >= currentArgOutCount);
  968. bailOutInfo->RecordStartCallInfo(startCallNumber, sym->m_instrDef);
  969. totalOutParamCount -= argOutCount;
  970. currentArgOutCount = 0;
  971. }
  972. }
  973. NEXT_SLISTBASE_ENTRY;
  974. Assert(totalOutParamCount == 0);
  975. Assert(startCallNumber == 0);
  976. Assert(currentArgOutCount == 0);
  977. }
  978. // Save the constant values that we know so we can restore them directly.
  979. // This allows us to dead store the constant value assign.
  980. this->CaptureValues(block, bailOutInfo, argsToCapture);
  981. }
  982. void
  983. GlobOpt::FillBailOutInfo(BasicBlock *block, _In_ IR::Instr * instr)
  984. {
  985. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  986. Assert(instr->HasBailOutInfo());
  987. if (this->isRecursiveCallOnLandingPad)
  988. {
  989. Assert(block->IsLandingPad());
  990. Loop * loop = block->next->loop;
  991. EnsureBailTarget(loop);
  992. if (instr->GetBailOutInfo() != loop->bailOutInfo)
  993. {
  994. instr->ReplaceBailOutInfo(loop->bailOutInfo);
  995. }
  996. return;
  997. }
  998. FillBailOutInfo(block, instr->GetBailOutInfo());
  999. }
  1000. IR::ByteCodeUsesInstr *
  1001. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  1002. {
  1003. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  1004. if (!this->byteCodeUses)
  1005. {
  1006. Assert(this->isAsmJSFunc);
  1007. return nullptr;
  1008. }
  1009. IR::RegOpnd * dstOpnd = nullptr;
  1010. if (includeDef)
  1011. {
  1012. IR::Opnd * opnd = instr->GetDst();
  1013. if (opnd && opnd->IsRegOpnd())
  1014. {
  1015. dstOpnd = opnd->AsRegOpnd();
  1016. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  1017. {
  1018. dstOpnd = nullptr;
  1019. }
  1020. }
  1021. }
  1022. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  1023. {
  1024. if (instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset || !instr->HasBailOutInfo())
  1025. {
  1026. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  1027. }
  1028. else
  1029. {
  1030. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr->m_func, instr->GetBailOutInfo()->bailOutOffset);
  1031. }
  1032. if (!this->byteCodeUses->IsEmpty())
  1033. {
  1034. byteCodeUsesInstr->SetBV(byteCodeUses->CopyNew(instr->m_func->m_alloc));
  1035. }
  1036. if (dstOpnd != nullptr)
  1037. {
  1038. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  1039. }
  1040. if (this->propertySymUse)
  1041. {
  1042. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  1043. }
  1044. instr->InsertBefore(byteCodeUsesInstr);
  1045. }
  1046. JitAdelete(this->alloc, this->byteCodeUses);
  1047. this->byteCodeUses = nullptr;
  1048. this->propertySymUse = nullptr;
  1049. return byteCodeUsesInstr;
  1050. }
  1051. IR::ByteCodeUsesInstr *
  1052. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  1053. {
  1054. #if DBG
  1055. PropertySym *propertySymUseBefore = NULL;
  1056. Assert(this->byteCodeUses == nullptr);
  1057. this->byteCodeUsesBeforeOpt->ClearAll();
  1058. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  1059. #endif
  1060. this->CaptureByteCodeSymUses(instr);
  1061. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  1062. instr->Remove();
  1063. if (byteCodeUsesInstr)
  1064. {
  1065. byteCodeUsesInstr->AggregateFollowingByteCodeUses();
  1066. }
  1067. return byteCodeUsesInstr;
  1068. }
  1069. bool
  1070. GlobOpt::MayNeedBailOut(Loop * loop) const
  1071. {
  1072. Assert(this->IsLoopPrePass());
  1073. return loop->CanHoistInvariants() || this->DoFieldCopyProp(loop) ;
  1074. }
  1075. bool
  1076. GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val)
  1077. {
  1078. switch (opnd->GetKind())
  1079. {
  1080. case IR::OpndKindAddr:
  1081. case IR::OpndKindFloatConst:
  1082. case IR::OpndKindIntConst:
  1083. return false;
  1084. case IR::OpndKindReg:
  1085. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  1086. // that it is already a primitive
  1087. return
  1088. !(val && val->GetValueInfo()->IsPrimitive()) &&
  1089. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  1090. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  1091. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  1092. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  1093. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  1094. case IR::OpndKindSym:
  1095. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1096. {
  1097. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1098. if (!propertySymOpnd->MayHaveImplicitCall())
  1099. {
  1100. return false;
  1101. }
  1102. }
  1103. return true;
  1104. default:
  1105. return true;
  1106. };
  1107. }
  1108. bool
  1109. GlobOpt::IsLazyBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, bool isHoisted) const
  1110. {
  1111. #ifdef _M_X64
  1112. if (!this->func->ShouldDoLazyBailOut() ||
  1113. this->IsLoopPrePass() ||
  1114. isHoisted
  1115. )
  1116. {
  1117. return false;
  1118. }
  1119. if (this->currentBlock->IsLandingPad())
  1120. {
  1121. Assert(!instr->HasAnyImplicitCalls() || this->currentBlock->GetNext()->loop->endDisableImplicitCall != nullptr);
  1122. return false;
  1123. }
  1124. // These opcodes can change the value of a field regardless whether the
  1125. // instruction has any implicit call
  1126. if (OpCodeAttr::CallInstr(instr->m_opcode) || instr->IsStElemVariant() || instr->IsStFldVariant())
  1127. {
  1128. return true;
  1129. }
  1130. // Now onto those that might change values of fixed fields through implicit calls.
  1131. // There are certain bailouts that are already attached to this instruction that
  1132. // prevent implicit calls from happening, so we won't need lazy bailout for those.
  1133. // If a type check fails, we will bail out and therefore no need for lazy bailout
  1134. if (instr->HasTypeCheckBailOut())
  1135. {
  1136. return false;
  1137. }
  1138. // We decided to do StackArgs optimization, which means that this instruction
  1139. // could only either be LdElemI_A or TypeofElem, and that it does not have
  1140. // an implicit call. So no need for lazy bailout.
  1141. if (instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOnStackArgsOutOfActualsRange)
  1142. {
  1143. Assert(instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem);
  1144. return false;
  1145. }
  1146. // If all operands are type specialized, we won't generate helper path;
  1147. // therefore no need for lazy bailout
  1148. if (instr->AreAllOpndsTypeSpecialized())
  1149. {
  1150. return false;
  1151. }
  1152. // The instruction might have other bailouts that prevent
  1153. // implicit calls from happening. That is captured in
  1154. // GlobOpt::MayNeedBailOnImplicitCall. So we only
  1155. // need lazy bailout of we think there might be implicit calls
  1156. // or if there aren't any bailouts that prevent them from happening.
  1157. return this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val);
  1158. #else // _M_X64
  1159. return false;
  1160. #endif
  1161. }
  1162. void
  1163. GlobOpt::GenerateLazyBailOut(IR::Instr *&instr)
  1164. {
  1165. // LazyBailOut:
  1166. // + For all StFld variants (o.x), in the forward pass, we set LazyBailOutBit in the instruction.
  1167. // In DeadStore, we will remove the bit if the field that the instruction is setting to is not fixed
  1168. // downstream.
  1169. // + For StElem variants (o[x]), we do not need LazyBailOut if the `x` operand is a number because
  1170. // we currently only "fix" a field if the property name is non-numeric.
  1171. // + For all other cases (instructions that may have implicit calls), we will just add on the bit anyway and figure
  1172. // out later whether we need LazyBailOut during DeadStore.
  1173. //
  1174. // Note that for StFld and StElem instructions which can change fixed fields whether or not implicit calls will happen,
  1175. // if such instructions already have a preop bailout, they should both have BailOnImplicitCallPreOp and LazyBailOut attached.
  1176. // This is to cover two cases:
  1177. // + if the operation turns out to be an implicit call, we do a preop bailout
  1178. // + if the operation isn't an implicit call, but if it invalidates our fixed field's PropertyGuard, then LazyBailOut preop
  1179. // is triggered. LazyBailOut preop means that we will perform the StFld/StElem again in the interpreter, but that is fine
  1180. // since we are simply overwriting the value again.
  1181. if (instr->forcePreOpBailOutIfNeeded)
  1182. {
  1183. // `forcePreOpBailOutIfNeeded` indicates that when we need to bail on implicit calls,
  1184. // the bailout should be preop because these instructions are lowerered to multiple helper calls.
  1185. // In such cases, simply adding a postop lazy bailout to the instruction wouldn't be correct,
  1186. // so we must generate a bailout on implicit calls preop in place of lazy bailout.
  1187. if (instr->HasBailOutInfo())
  1188. {
  1189. Assert(instr->GetBailOutKind() == IR::BailOutOnImplicitCallsPreOp);
  1190. instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()));
  1191. }
  1192. else
  1193. {
  1194. this->GenerateBailAtOperation(&instr, BailOutInfo::WithLazyBailOut(IR::BailOutOnImplicitCallsPreOp));
  1195. }
  1196. }
  1197. else if (!instr->IsStElemVariant() || this->IsNonNumericRegOpnd(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd(), true /* inGlobOpt */))
  1198. {
  1199. if (instr->HasBailOutInfo())
  1200. {
  1201. instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()));
  1202. }
  1203. else
  1204. {
  1205. this->GenerateBailAfterOperation(&instr, IR::LazyBailOut);
  1206. }
  1207. }
  1208. }
  1209. bool
  1210. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const
  1211. {
  1212. Assert(!this->IsLoopPrePass());
  1213. return this->IsImplicitCallBailOutCurrentlyNeeded(
  1214. instr, src1Val, src2Val, this->currentBlock,
  1215. (!this->currentBlock->globOptData.liveFields->IsEmpty()) /* hasLiveFields */,
  1216. !this->currentBlock->IsLandingPad() /* mayNeedImplicitCallBailOut */,
  1217. true /* isForwardPass */
  1218. );
  1219. }
  1220. bool
  1221. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block,
  1222. bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass, bool mayNeedLazyBailOut) const
  1223. {
  1224. // We use BailOnImplicitCallPreOp for fixed field optimization in place of LazyBailOut when
  1225. // an instruction already has a preop bailout. This function is called both from the forward
  1226. // and backward passes to check if implicit bailout is needed and use the result to insert/remove
  1227. // bailout. In the backward pass, we would want to override the decision to not
  1228. // use implicit call to true when we need lazy bailout so that the bailout isn't removed.
  1229. // In the forward pass, however, we don't want to influence the result. So make sure that
  1230. // mayNeedLazyBailOut is false when we are in the forward pass.
  1231. Assert(!isForwardPass || !mayNeedLazyBailOut);
  1232. if (mayNeedImplicitCallBailOut &&
  1233. // If we know that we are calling an accessor, don't insert bailout on implicit calls
  1234. // because we will bail out anyway. However, with fixed field optimization we still
  1235. // want the bailout to prevent any side effects from happening.
  1236. (!instr->CallsAccessor() || mayNeedLazyBailOut) &&
  1237. (
  1238. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  1239. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  1240. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  1241. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass) ||
  1242. (instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0) ||
  1243. mayNeedLazyBailOut
  1244. ) &&
  1245. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  1246. {
  1247. return true;
  1248. }
  1249. #if DBG
  1250. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  1251. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  1252. {
  1253. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  1254. return true;
  1255. }
  1256. #endif
  1257. return false;
  1258. }
  1259. bool
  1260. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  1261. {
  1262. #if DBG
  1263. IR::Opnd* dst = instr->GetDst();
  1264. IR::Opnd* src1 = instr->GetSrc1();
  1265. IR::Opnd* src2 = instr->GetSrc2();
  1266. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  1267. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  1268. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  1269. #endif
  1270. IR::Opnd * opnd = instr->GetDst();
  1271. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1272. {
  1273. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1274. }
  1275. opnd = instr->GetSrc1();
  1276. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1277. {
  1278. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1279. }
  1280. return false;
  1281. }
  1282. bool
  1283. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  1284. {
  1285. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  1286. {
  1287. return false;
  1288. }
  1289. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  1290. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  1291. if (propertySymOpnd->MayNeedTypeCheckProtection())
  1292. {
  1293. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  1294. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) ||
  1295. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed method/data phase disabled?");
  1296. Assert(!isStore || !isCheckFixedFld);
  1297. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  1298. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  1299. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  1300. {
  1301. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  1302. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  1303. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  1304. return true;
  1305. }
  1306. else
  1307. {
  1308. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  1309. *pBailOutKind = IR::BailOutInvalid;
  1310. return false;
  1311. }
  1312. }
  1313. else
  1314. {
  1315. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  1316. *pBailOutKind = IR::BailOutInvalid;
  1317. return false;
  1318. }
  1319. }
  1320. bool
  1321. GlobOpt::MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val)
  1322. {
  1323. if (!instr->HasAnyImplicitCalls())
  1324. {
  1325. return false;
  1326. }
  1327. bool isLdElem = false;
  1328. switch (instr->m_opcode)
  1329. {
  1330. case Js::OpCode::LdLen_A:
  1331. {
  1332. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  1333. return
  1334. !(
  1335. baseValueType.IsString() ||
  1336. baseValueType.IsArray() ||
  1337. (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength) // guarantees no implicit calls
  1338. );
  1339. }
  1340. case Js::OpCode::LdElemI_A:
  1341. case Js::OpCode::LdMethodElem:
  1342. case Js::OpCode::InlineArrayPop:
  1343. isLdElem = true;
  1344. // fall-through
  1345. case Js::OpCode::StElemI_A:
  1346. case Js::OpCode::StElemI_A_Strict:
  1347. case Js::OpCode::InlineArrayPush:
  1348. {
  1349. if(!instr->HasBailOutInfo())
  1350. {
  1351. return true;
  1352. }
  1353. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  1354. // implicit call result in a pre-op bailout.
  1355. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1356. return
  1357. !(
  1358. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  1359. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  1360. (isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  1361. );
  1362. }
  1363. case Js::OpCode::NewScObjectNoCtor:
  1364. if (instr->HasBailOutInfo() && (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutFailedCtorGuardCheck)
  1365. {
  1366. // No helper call with this bailout.
  1367. return false;
  1368. }
  1369. break;
  1370. default:
  1371. break;
  1372. }
  1373. if (OpCodeAttr::HasImplicitCall(instr->m_opcode))
  1374. {
  1375. // Operation has an implicit call regardless of operand attributes.
  1376. return true;
  1377. }
  1378. IR::Opnd const * opnd = instr->GetDst();
  1379. if (opnd)
  1380. {
  1381. switch (opnd->GetKind())
  1382. {
  1383. case IR::OpndKindReg:
  1384. break;
  1385. case IR::OpndKindSym:
  1386. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1387. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1388. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1389. // can't optimize (e.g. copy prop) across such field stores.
  1390. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1391. {
  1392. return false;
  1393. }
  1394. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1395. {
  1396. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1397. if (!propertySymOpnd->MayHaveImplicitCall())
  1398. {
  1399. return false;
  1400. }
  1401. }
  1402. return true;
  1403. case IR::OpndKindIndir:
  1404. return true;
  1405. default:
  1406. Assume(UNREACHED);
  1407. }
  1408. }
  1409. opnd = instr->GetSrc1();
  1410. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src1Val))
  1411. {
  1412. return true;
  1413. }
  1414. opnd = instr->GetSrc2();
  1415. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src2Val))
  1416. {
  1417. return true;
  1418. }
  1419. return false;
  1420. }
  1421. void
  1422. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1423. {
  1424. Assert(pInstr && *pInstr);
  1425. IR::Instr* instr = *pInstr;
  1426. IR::Instr * nextInstr = instr->GetNextByteCodeInstr();
  1427. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1428. if (this->currentBlock->GetLastInstr() == instr)
  1429. {
  1430. this->currentBlock->SetLastInstr(bailOutInstr);
  1431. }
  1432. FillBailOutInfo(this->currentBlock, bailOutInstr);
  1433. *pInstr = bailOutInstr;
  1434. }
  1435. void
  1436. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1437. {
  1438. Assert(pInstr);
  1439. IR::Instr * instr = *pInstr;
  1440. Assert(instr);
  1441. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1442. Assert(bailOutKind != IR::BailOutInvalid);
  1443. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1444. if (this->currentBlock->GetLastInstr() == instr)
  1445. {
  1446. this->currentBlock->SetLastInstr(bailOutInstr);
  1447. }
  1448. FillBailOutInfo(currentBlock, bailOutInstr);
  1449. *pInstr = bailOutInstr;
  1450. }
  1451. IR::Instr *
  1452. GlobOpt::EnsureBailTarget(Loop * loop)
  1453. {
  1454. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1455. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1456. if (bailOutInstr == nullptr)
  1457. {
  1458. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1459. loop->landingPad->InsertAfter(bailOutInstr);
  1460. }
  1461. return bailOutInstr;
  1462. }