GlobOptBailOut.cpp 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureCopyPropValue(BasicBlock * block, Sym * sym, Value * val, SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  8. {
  9. if (!sym->IsStackSym())
  10. {
  11. return;
  12. }
  13. StackSym * copyPropSym = block->globOptData.GetCopyPropSym(sym, val);
  14. if (copyPropSym != nullptr)
  15. {
  16. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  17. }
  18. }
  19. void
  20. GlobOpt::CaptureValuesFromScratch(BasicBlock * block,
  21. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  22. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  23. {
  24. Sym * sym = nullptr;
  25. Value * value = nullptr;
  26. ValueInfo * valueInfo = nullptr;
  27. block->globOptData.changedSyms->ClearAll();
  28. FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
  29. {
  30. value = bucket.element;
  31. valueInfo = value->GetValueInfo();
  32. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  33. {
  34. continue;
  35. }
  36. sym = bucket.value;
  37. if (sym == nullptr || !sym->IsStackSym() || !(sym->AsStackSym()->HasByteCodeRegSlot()))
  38. {
  39. continue;
  40. }
  41. block->globOptData.changedSyms->Set(sym->m_id);
  42. }
  43. NEXT_GLOBHASHTABLE_ENTRY;
  44. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  45. {
  46. HashBucket<Sym*, Value*> * bucket = block->globOptData.symToValueMap->GetBucket(symId);
  47. StackSym * stackSym = bucket->value->AsStackSym();
  48. value = bucket->element;
  49. valueInfo = value->GetValueInfo();
  50. int intConstantValue;
  51. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  52. {
  53. BailoutConstantValue constValue;
  54. constValue.InitIntConstValue(intConstantValue);
  55. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  56. }
  57. else if (valueInfo->IsVarConstant())
  58. {
  59. BailoutConstantValue constValue;
  60. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  61. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, stackSym, constValue);
  62. }
  63. else
  64. {
  65. CaptureCopyPropValue(block, stackSym, value, bailOutCopySymsIter);
  66. }
  67. }
  68. NEXT_BITSET_IN_SPARSEBV
  69. }
  70. void
  71. GlobOpt::CaptureValuesIncremental(BasicBlock * block,
  72. SListBase<ConstantStackSymValue>::EditingIterator & bailOutConstValuesIter,
  73. SListBase<CopyPropSyms>::EditingIterator & bailOutCopySymsIter)
  74. {
  75. CapturedValues * currCapturedValues = block->globOptData.capturedValues;
  76. SListBase<ConstantStackSymValue>::Iterator iterConst(currCapturedValues ? &currCapturedValues->constantValues : nullptr);
  77. SListBase<CopyPropSyms>::Iterator iterCopyPropSym(currCapturedValues ? &currCapturedValues->copyPropSyms : nullptr);
  78. bool hasConstValue = currCapturedValues ? iterConst.Next() : false;
  79. bool hasCopyPropSym = currCapturedValues ? iterCopyPropSym.Next() : false;
  80. block->globOptData.changedSyms->Set(Js::Constants::InvalidSymID);
  81. FOREACH_BITSET_IN_SPARSEBV(symId, block->globOptData.changedSyms)
  82. {
  83. Sym * sym = hasConstValue ? iterConst.Data().Key() : nullptr;
  84. Value * val = nullptr;
  85. HashBucket<Sym *, Value *> * symIdBucket = nullptr;
  86. // copy unchanged sym to new capturedValues
  87. while (sym && sym->m_id < symId)
  88. {
  89. Assert(sym->IsStackSym());
  90. if (!sym->AsStackSym()->HasArgSlotNum())
  91. {
  92. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), iterConst.Data().Value());
  93. }
  94. hasConstValue = iterConst.Next();
  95. sym = hasConstValue ? iterConst.Data().Key() : nullptr;
  96. }
  97. if (sym && sym->m_id == symId)
  98. {
  99. hasConstValue = iterConst.Next();
  100. }
  101. if (symId != Js::Constants::InvalidSymID)
  102. {
  103. // recapture changed constant sym
  104. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  105. if (symIdBucket == nullptr)
  106. {
  107. continue;
  108. }
  109. Sym * symIdSym = symIdBucket->value;
  110. Assert(symIdSym->IsStackSym() && (symIdSym->AsStackSym()->HasByteCodeRegSlot() || symIdSym->AsStackSym()->HasArgSlotNum()));
  111. val = symIdBucket->element;
  112. ValueInfo* valueInfo = val->GetValueInfo();
  113. if (valueInfo->GetSymStore() != nullptr)
  114. {
  115. int32 intConstValue;
  116. BailoutConstantValue constValue;
  117. if (valueInfo->TryGetIntConstantValue(&intConstValue))
  118. {
  119. constValue.InitIntConstValue(intConstValue);
  120. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  121. continue;
  122. }
  123. else if(valueInfo->IsVarConstant())
  124. {
  125. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  126. bailOutConstValuesIter.InsertNodeBefore(this->func->m_alloc, symIdSym->AsStackSym(), constValue);
  127. continue;
  128. }
  129. }
  130. else if (!valueInfo->HasIntConstantValue())
  131. {
  132. continue;
  133. }
  134. }
  135. sym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  136. // process unchanged sym, but copy sym might have changed
  137. while (sym && sym->m_id < symId)
  138. {
  139. StackSym * copyPropSym = iterCopyPropSym.Data().Value();
  140. Assert(sym->IsStackSym());
  141. if (!block->globOptData.changedSyms->Test(copyPropSym->m_id))
  142. {
  143. if (!sym->AsStackSym()->HasArgSlotNum())
  144. {
  145. bailOutCopySymsIter.InsertNodeBefore(this->func->m_alloc, sym->AsStackSym(), copyPropSym);
  146. }
  147. }
  148. else
  149. {
  150. if (!sym->AsStackSym()->HasArgSlotNum())
  151. {
  152. val = this->currentBlock->globOptData.FindValue(sym);
  153. if (val != nullptr)
  154. {
  155. CaptureCopyPropValue(block, sym, val, bailOutCopySymsIter);
  156. }
  157. }
  158. }
  159. hasCopyPropSym = iterCopyPropSym.Next();
  160. sym = hasCopyPropSym ? iterCopyPropSym.Data().Key() : nullptr;
  161. }
  162. if (sym && sym->m_id == symId)
  163. {
  164. hasCopyPropSym = iterCopyPropSym.Next();
  165. }
  166. if (symId != Js::Constants::InvalidSymID)
  167. {
  168. // recapture changed copy prop sym
  169. symIdBucket = block->globOptData.symToValueMap->GetBucket(symId);
  170. if (symIdBucket != nullptr)
  171. {
  172. Sym * symIdSym = symIdBucket->value;
  173. val = this->currentBlock->globOptData.FindValue(symIdSym);
  174. if (val != nullptr)
  175. {
  176. CaptureCopyPropValue(block, symIdSym, val, bailOutCopySymsIter);
  177. }
  178. }
  179. }
  180. }
  181. NEXT_BITSET_IN_SPARSEBV
  182. }
  183. void
  184. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo)
  185. {
  186. if (!this->func->DoGlobOptsForGeneratorFunc())
  187. {
  188. // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
  189. // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
  190. // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
  191. return;
  192. }
  193. CapturedValues capturedValues;
  194. SListBase<ConstantStackSymValue>::EditingIterator bailOutConstValuesIter(&capturedValues.constantValues);
  195. SListBase<CopyPropSyms>::EditingIterator bailOutCopySymsIter(&capturedValues.copyPropSyms);
  196. bailOutConstValuesIter.Next();
  197. bailOutCopySymsIter.Next();
  198. if (!block->globOptData.capturedValues)
  199. {
  200. CaptureValuesFromScratch(block, bailOutConstValuesIter, bailOutCopySymsIter);
  201. }
  202. else
  203. {
  204. CaptureValuesIncremental(block, bailOutConstValuesIter, bailOutCopySymsIter);
  205. }
  206. // attach capturedValues to bailOutInfo
  207. bailOutInfo->capturedValues.constantValues.Clear(this->func->m_alloc);
  208. bailOutConstValuesIter.SetNext(&bailOutInfo->capturedValues.constantValues);
  209. bailOutInfo->capturedValues.constantValues = capturedValues.constantValues;
  210. bailOutInfo->capturedValues.copyPropSyms.Clear(this->func->m_alloc);
  211. bailOutCopySymsIter.SetNext(&bailOutInfo->capturedValues.copyPropSyms);
  212. bailOutInfo->capturedValues.copyPropSyms = capturedValues.copyPropSyms;
  213. if (!PHASE_OFF(Js::IncrementalBailoutPhase, func))
  214. {
  215. // cache the pointer of current bailout as potential baseline for later bailout in this block
  216. block->globOptData.capturedValuesCandidate = &bailOutInfo->capturedValues;
  217. // reset changed syms to track symbols change after the above captured values candidate
  218. this->changedSymsAfterIncBailoutCandidate->ClearAll();
  219. }
  220. }
  221. void
  222. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  223. {
  224. FOREACH_BITSET_IN_SPARSEBV(id, this->currentBlock->globOptData.argObjSyms)
  225. {
  226. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  227. Assert(stackSym != nullptr);
  228. if (!stackSym->HasByteCodeRegSlot())
  229. {
  230. continue;
  231. }
  232. if (!bailOutInfo->capturedValues.argObjSyms)
  233. {
  234. bailOutInfo->capturedValues.argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  235. }
  236. bailOutInfo->capturedValues.argObjSyms->Set(id);
  237. // Add to BailOutInfo
  238. }
  239. NEXT_BITSET_IN_SPARSEBV
  240. }
  241. void
  242. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  243. {
  244. IR::Opnd * src = instr->GetSrc1();
  245. if (src)
  246. {
  247. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  248. src = instr->GetSrc2();
  249. if (src)
  250. {
  251. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  252. }
  253. }
  254. #if DBG
  255. // There should be no more than one property sym used.
  256. PropertySym *propertySymFromSrc = *pPropertySym;
  257. #endif
  258. IR::Opnd * dst = instr->GetDst();
  259. if (dst)
  260. {
  261. StackSym *stackSym = dst->GetStackSym();
  262. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  263. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  264. if (stackSym == NULL)
  265. {
  266. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  267. }
  268. }
  269. #if DBG
  270. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  271. "Lost a property sym use?");
  272. #endif
  273. }
  274. void
  275. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  276. {
  277. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  278. if (!regOpnd->GetIsJITOptimizedReg())
  279. {
  280. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  281. }
  282. }
  283. void
  284. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  285. {
  286. if (opnd->GetIsJITOptimizedReg())
  287. {
  288. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  289. return;
  290. }
  291. switch(opnd->GetKind())
  292. {
  293. case IR::OpndKindReg:
  294. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  295. break;
  296. case IR::OpndKindSym:
  297. {
  298. Sym * sym = opnd->AsSymOpnd()->m_sym;
  299. if (sym->IsStackSym())
  300. {
  301. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  302. }
  303. else
  304. {
  305. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  306. *pPropertySym = sym->AsPropertySym();
  307. }
  308. }
  309. break;
  310. case IR::OpndKindIndir:
  311. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  312. {
  313. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  314. if (indexOpnd)
  315. {
  316. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  317. }
  318. }
  319. break;
  320. }
  321. }
  322. void
  323. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  324. {
  325. // We only care about stack sym that has a corresponding byte code register
  326. if (sym->HasByteCodeRegSlot())
  327. {
  328. if (sym->IsTypeSpec())
  329. {
  330. // It has to have a var version for byte code regs
  331. sym = sym->GetVarEquivSym(nullptr);
  332. }
  333. instrByteCodeStackSymUsed->Set(sym->m_id);
  334. }
  335. }
  336. void
  337. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  338. {
  339. IR::Opnd * dst = instr->GetDst();
  340. if (dst)
  341. {
  342. MarkNonByteCodeUsed(dst);
  343. }
  344. IR::Opnd * src1 = instr->GetSrc1();
  345. if (src1)
  346. {
  347. MarkNonByteCodeUsed(src1);
  348. IR::Opnd * src2 = instr->GetSrc2();
  349. if (src2)
  350. {
  351. MarkNonByteCodeUsed(src2);
  352. }
  353. }
  354. }
  355. void
  356. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  357. {
  358. switch(opnd->GetKind())
  359. {
  360. case IR::OpndKindReg:
  361. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  362. break;
  363. case IR::OpndKindIndir:
  364. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  365. {
  366. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  367. if (indexOpnd)
  368. {
  369. indexOpnd->SetIsJITOptimizedReg(true);
  370. }
  371. }
  372. break;
  373. }
  374. }
  375. void
  376. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  377. {
  378. if (this->byteCodeUses)
  379. {
  380. // We already captured it before.
  381. return;
  382. }
  383. Assert(this->propertySymUse == NULL);
  384. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  385. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  386. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  387. "Instruction edited before capturing the byte code use");
  388. }
  389. void
  390. GlobOpt::TrackCalls(IR::Instr * instr)
  391. {
  392. // Keep track of out params for bailout
  393. switch (instr->m_opcode)
  394. {
  395. case Js::OpCode::StartCall:
  396. Assert(!this->isCallHelper);
  397. Assert(instr->GetDst()->IsRegOpnd());
  398. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  399. if (this->currentBlock->globOptData.callSequence == nullptr)
  400. {
  401. this->currentBlock->globOptData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  402. this->currentBlock->globOptData.callSequence = this->currentBlock->globOptData.callSequence;
  403. }
  404. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  405. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  406. this->currentBlock->globOptData.startCallCount++;
  407. break;
  408. case Js::OpCode::BytecodeArgOutCapture:
  409. {
  410. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  411. this->currentBlock->globOptData.argOutCount++;
  412. break;
  413. }
  414. case Js::OpCode::ArgOut_A:
  415. case Js::OpCode::ArgOut_A_Inline:
  416. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  417. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  418. case Js::OpCode::ArgOut_A_Dynamic:
  419. case Js::OpCode::ArgOut_A_FromStackArgs:
  420. case Js::OpCode::ArgOut_A_SpreadArg:
  421. {
  422. IR::Opnd * opnd = instr->GetDst();
  423. if (opnd->IsSymOpnd())
  424. {
  425. Assert(!this->isCallHelper);
  426. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  427. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  428. // These scenarios are already tracked using BytecodeArgOutCapture,
  429. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  430. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  431. {
  432. this->currentBlock->globOptData.callSequence->Prepend(this->alloc, instr->GetDst());
  433. this->currentBlock->globOptData.argOutCount++;
  434. }
  435. Assert(stackSym->IsArgSlotSym());
  436. if (stackSym->m_isInlinedArgSlot)
  437. {
  438. this->currentBlock->globOptData.inlinedArgOutCount++;
  439. // We want to update the offsets only once: don't do in prepass.
  440. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  441. {
  442. Func * currentFunc = instr->m_func;
  443. stackSym->FixupStackOffset(currentFunc);
  444. }
  445. }
  446. }
  447. else
  448. {
  449. // It is a reg opnd if it is a helper call
  450. // It should be all ArgOut until the CallHelper instruction
  451. Assert(opnd->IsRegOpnd());
  452. this->isCallHelper = true;
  453. }
  454. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  455. {
  456. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  457. }
  458. break;
  459. }
  460. case Js::OpCode::InlineeStart:
  461. Assert(instr->m_func->GetParentFunc() == this->currentBlock->globOptData.curFunc);
  462. Assert(instr->m_func->GetParentFunc());
  463. this->currentBlock->globOptData.curFunc = instr->m_func;
  464. this->currentBlock->globOptData.curFunc = instr->m_func;
  465. this->func->UpdateMaxInlineeArgOutCount(this->currentBlock->globOptData.inlinedArgOutCount);
  466. this->EndTrackCall(instr);
  467. if (DoInlineArgsOpt(instr->m_func))
  468. {
  469. instr->m_func->m_hasInlineArgsOpt = true;
  470. InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(func->m_alloc);
  471. instr->m_func->frameInfo = frameInfo;
  472. frameInfo->floatSyms = currentBlock->globOptData.liveFloat64Syms->CopyNew(this->alloc);
  473. frameInfo->intSyms = currentBlock->globOptData.liveInt32Syms->MinusNew(currentBlock->globOptData.liveLossyInt32Syms, this->alloc);
  474. #ifdef ENABLE_SIMDJS
  475. // SIMD_JS
  476. frameInfo->simd128F4Syms = currentBlock->globOptData.liveSimd128F4Syms->CopyNew(this->alloc);
  477. frameInfo->simd128I4Syms = currentBlock->globOptData.liveSimd128I4Syms->CopyNew(this->alloc);
  478. #endif
  479. }
  480. break;
  481. case Js::OpCode::EndCallForPolymorphicInlinee:
  482. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  483. this->EndTrackCall(instr);
  484. break;
  485. case Js::OpCode::CallHelper:
  486. case Js::OpCode::IsInst:
  487. Assert(this->isCallHelper);
  488. this->isCallHelper = false;
  489. break;
  490. case Js::OpCode::InlineeEnd:
  491. if (instr->m_func->m_hasInlineArgsOpt)
  492. {
  493. RecordInlineeFrameInfo(instr);
  494. }
  495. EndTrackingOfArgObjSymsForInlinee();
  496. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  497. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  498. break;
  499. case Js::OpCode::InlineeMetaArg:
  500. {
  501. Assert(instr->GetDst()->IsSymOpnd());
  502. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  503. Assert(stackSym->IsArgSlotSym());
  504. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  505. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  506. // We want to update the offsets only once: don't do in prepass.
  507. if (!this->IsLoopPrePass())
  508. {
  509. Func * currentFunc = instr->m_func->GetParentFunc();
  510. stackSym->FixupStackOffset(currentFunc);
  511. }
  512. this->currentBlock->globOptData.inlinedArgOutCount++;
  513. break;
  514. }
  515. case Js::OpCode::InlineBuiltInStart:
  516. this->inInlinedBuiltIn = true;
  517. break;
  518. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  519. case Js::OpCode::InlineBuiltInEnd:
  520. {
  521. // If extra bailouts were added for the InlineMathXXX call itself,
  522. // move InlineeBuiltInStart just above the InlineMathXXX.
  523. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  524. // otherwise when we bailout we would get wrong function.
  525. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  526. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  527. {
  528. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  529. }
  530. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  531. IR::Instr * insertBeforeInstr = instr->m_prev;
  532. IR::Instr * tmpInstr = insertBeforeInstr;
  533. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  534. {
  535. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  536. {
  537. insertBeforeInstr = tmpInstr;
  538. }
  539. tmpInstr = tmpInstr->m_prev;
  540. }
  541. inlineBuiltInStartInstr->Unlink();
  542. if(insertBeforeInstr == instr->m_prev)
  543. {
  544. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  545. }
  546. else
  547. {
  548. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  549. }
  550. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  551. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  552. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  553. // byte code register holding the function object needs to be restored on bailout.
  554. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  555. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  556. {
  557. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  558. while(
  559. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  560. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  561. {
  562. IR::Instr *const instrToMove = byteCodeUsesInstr;
  563. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  564. instrToMove->Unlink();
  565. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  566. }
  567. }
  568. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  569. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  570. // need to know about current bailout record, but since they are added after TrackCalls is called
  571. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  572. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  573. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  574. {
  575. this->EndTrackCall(instr);
  576. }
  577. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  578. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  579. this->inInlinedBuiltIn = false;
  580. break;
  581. }
  582. case Js::OpCode::InlineArrayPop:
  583. {
  584. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  585. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  586. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  587. this->EndTrackCall(instr);
  588. break;
  589. }
  590. default:
  591. if (OpCodeAttr::CallInstr(instr->m_opcode))
  592. {
  593. this->EndTrackCall(instr);
  594. if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
  595. {
  596. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  597. //
  598. // StartCall
  599. // ArgOut_InlineBuiltIn
  600. // ArgOut_InlineBuiltIn
  601. // ArgOut_InlineBuiltIn
  602. // InlineBuiltInStart
  603. // ArgOut_A_InlineSpecialized
  604. // ArgOut_A
  605. // ArgOut_A
  606. // CallDirect
  607. // InlineNonTrackingBuiltInEnd
  608. //
  609. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  610. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  611. // happened when we bail out.
  612. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  613. this->EndTrackCall(instr);
  614. }
  615. }
  616. break;
  617. }
  618. }
  619. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  620. {
  621. if (this->IsLoopPrePass())
  622. {
  623. return;
  624. }
  625. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  626. if (frameInfo->isRecorded)
  627. {
  628. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  629. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  630. return;
  631. }
  632. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  633. {
  634. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  635. {
  636. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  637. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  638. if (functionObject->m_sym->IsConst())
  639. {
  640. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  641. }
  642. else
  643. {
  644. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  645. }
  646. }
  647. else
  648. {
  649. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  650. IR::Opnd* argOpnd = argInstr->GetSrc1();
  651. InlineFrameInfoValue frameInfoValue;
  652. StackSym* argSym = argOpnd->GetStackSym();
  653. if (!argSym)
  654. {
  655. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  656. }
  657. else if (argSym->IsConst())
  658. {
  659. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  660. }
  661. else
  662. {
  663. if (PHASE_ON(Js::CopyPropPhase, func))
  664. {
  665. Value* value = this->currentBlock->globOptData.FindValue(argSym);
  666. StackSym * copyPropSym = this->currentBlock->globOptData.GetCopyPropSym(argSym, value);
  667. if (copyPropSym)
  668. {
  669. argSym = copyPropSym;
  670. }
  671. }
  672. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  673. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  674. {
  675. // Var version of the sym is not live, use the int32 version
  676. argSym = argSym->GetInt32EquivSym(nullptr);
  677. Assert(argSym);
  678. }
  679. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  680. {
  681. // Var/int32 version of the sym is not live, use the float64 version
  682. argSym = argSym->GetFloat64EquivSym(nullptr);
  683. Assert(argSym);
  684. }
  685. #ifdef ENABLE_SIMDJS
  686. // SIMD_JS
  687. else if (frameInfo->simd128F4Syms->TestEmpty() && frameInfo->simd128F4Syms->Test(argSym->m_id))
  688. {
  689. argSym = argSym->GetSimd128F4EquivSym(nullptr);
  690. }
  691. else if (frameInfo->simd128I4Syms->TestEmpty() && frameInfo->simd128I4Syms->Test(argSym->m_id))
  692. {
  693. argSym = argSym->GetSimd128I4EquivSym(nullptr);
  694. }
  695. #endif
  696. else
  697. {
  698. Assert(globOptData.liveVarSyms->Test(argSym->m_id));
  699. }
  700. if (argSym->IsConst())
  701. {
  702. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  703. }
  704. else
  705. {
  706. frameInfoValue = InlineFrameInfoValue(argSym);
  707. }
  708. }
  709. Assert(argSlot >= 1);
  710. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  711. }
  712. return false;
  713. });
  714. JitAdelete(this->alloc, frameInfo->intSyms);
  715. frameInfo->intSyms = nullptr;
  716. JitAdelete(this->alloc, frameInfo->floatSyms);
  717. frameInfo->floatSyms = nullptr;
  718. #ifdef ENABLE_SIMDJS
  719. // SIMD_JS
  720. JitAdelete(this->alloc, frameInfo->simd128F4Syms);
  721. frameInfo->simd128F4Syms = nullptr;
  722. JitAdelete(this->alloc, frameInfo->simd128I4Syms);
  723. frameInfo->simd128I4Syms = nullptr;
  724. #endif
  725. frameInfo->isRecorded = true;
  726. }
  727. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  728. {
  729. Assert(this->currentBlock->globOptData.curFunc->GetParentFunc());
  730. if (this->currentBlock->globOptData.curFunc->argObjSyms && TrackArgumentsObject())
  731. {
  732. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  733. tempBv->Minus(this->currentBlock->globOptData.curFunc->argObjSyms, this->currentBlock->globOptData.argObjSyms);
  734. if(!tempBv->IsEmpty())
  735. {
  736. // This means there are arguments object symbols in the current function which are not in the current block.
  737. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  738. // Rare case, abort stack arguments optimization in this case.
  739. CannotAllocateArgumentsObjectOnStack();
  740. }
  741. else
  742. {
  743. Assert(this->currentBlock->globOptData.argObjSyms->OrNew(this->currentBlock->globOptData.curFunc->argObjSyms)->Equal(this->currentBlock->globOptData.argObjSyms));
  744. this->currentBlock->globOptData.argObjSyms->Minus(this->currentBlock->globOptData.curFunc->argObjSyms);
  745. }
  746. JitAdelete(this->tempAlloc, tempBv);
  747. }
  748. this->currentBlock->globOptData.curFunc = this->currentBlock->globOptData.curFunc->GetParentFunc();
  749. this->currentBlock->globOptData.curFunc = this->currentBlock->globOptData.curFunc;
  750. }
  751. void GlobOpt::EndTrackCall(IR::Instr* instr)
  752. {
  753. Assert(instr);
  754. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  755. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  756. Assert(!this->isCallHelper);
  757. Assert(!this->currentBlock->globOptData.callSequence->Empty());
  758. #if DBG
  759. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  760. #endif
  761. while (this->currentBlock->globOptData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  762. {
  763. this->currentBlock->globOptData.argOutCount--;
  764. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  765. }
  766. StackSym * sym = this->currentBlock->globOptData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  767. this->currentBlock->globOptData.callSequence->RemoveHead(this->alloc);
  768. #if DBG
  769. Assert(sym->m_isSingleDef);
  770. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  771. // Number of argument set should be the same as indicated at StartCall
  772. // except NewScObject has an implicit arg1
  773. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  774. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  775. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  776. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  777. #endif
  778. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  779. this->currentBlock->globOptData.startCallCount--;
  780. }
  781. void
  782. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  783. {
  784. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  785. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  786. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  787. #ifdef ENABLE_SIMDJS
  788. // SIMD_JS
  789. bailOutInfo->liveSimd128F4Syms = block->globOptData.liveSimd128F4Syms->CopyNew(this->func->m_alloc);
  790. bailOutInfo->liveSimd128I4Syms = block->globOptData.liveSimd128I4Syms->CopyNew(this->func->m_alloc);
  791. #endif
  792. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  793. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  794. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  795. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  796. // the lossily-converted int value back into a var, restoring the wrong value.
  797. bailOutInfo->liveLosslessInt32Syms =
  798. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  799. // Save the stack literal init field count so we can null out the uninitialized fields
  800. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  801. if (stackLiteralInitFldDataMap != nullptr)
  802. {
  803. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  804. if (stackLiteralInitFldDataCount != 0)
  805. {
  806. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  807. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  808. uint i = 0;
  809. stackLiteralInitFldDataMap->Map(
  810. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  811. {
  812. Assert(i < stackLiteralInitFldDataCount);
  813. stackLiteralBailOutInfo[i].stackSym = stackSym;
  814. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  815. i++;
  816. });
  817. Assert(i == stackLiteralInitFldDataCount);
  818. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  819. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  820. }
  821. }
  822. if (TrackArgumentsObject())
  823. {
  824. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  825. }
  826. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  827. {
  828. uint currentArgOutCount = 0;
  829. uint startCallNumber = block->globOptData.startCallCount;
  830. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  831. bailOutInfo->startCallCount = startCallNumber;
  832. // Save the start call's func to identify the function (inlined) that the call sequence is for
  833. // We might not have any arg out yet to get the function from
  834. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  835. #ifdef _M_IX86
  836. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  837. #endif
  838. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  839. bailOutInfo->totalOutParamCount = totalOutParamCount;
  840. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  841. uint argRestoreAdjustCount = 0;
  842. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  843. {
  844. if(opnd->GetStackSym()->HasArgSlotNum())
  845. {
  846. StackSym * sym;
  847. if(opnd->IsSymOpnd())
  848. {
  849. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  850. Assert(sym->IsArgSlotSym());
  851. Assert(sym->m_isSingleDef);
  852. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  853. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  854. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  855. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  856. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  857. }
  858. else
  859. {
  860. sym = opnd->GetStackSym();
  861. Assert(this->currentBlock->globOptData.FindValue(sym));
  862. // StackSym args need to be re-captured
  863. this->currentBlock->globOptData.SetChangedSym(sym->m_id);
  864. }
  865. Assert(totalOutParamCount != 0);
  866. Assert(totalOutParamCount > currentArgOutCount);
  867. currentArgOutCount++;
  868. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  869. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  870. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  871. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  872. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  873. }
  874. else
  875. {
  876. Assert(opnd->IsRegOpnd());
  877. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  878. Assert(!sym->IsArgSlotSym());
  879. Assert(sym->m_isSingleDef);
  880. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  881. Assert(startCallNumber != 0);
  882. startCallNumber--;
  883. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  884. #ifdef _M_IX86
  885. if (this->currentRegion && (this->currentRegion->GetType() == RegionTypeTry || this->currentRegion->GetType() == RegionTypeFinally))
  886. {
  887. // For a bailout in argument evaluation from an EH region, the esp is offset by the TryCatch helper�s frame. So, the argouts are not actually pushed at the
  888. // offsets stored in the bailout record, which are relative to ebp. Need to restore the argouts from the actual value of esp before calling the Bailout helper.
  889. // For nested calls, argouts for the outer call need to be restored from an offset of stack-adjustment-done-by-the-inner-call from esp.
  890. if (startCallNumber + 1 == bailOutInfo->startCallCount)
  891. {
  892. argRestoreAdjustCount = 0;
  893. }
  894. else
  895. {
  896. argRestoreAdjustCount = bailOutInfo->startCallInfo[startCallNumber + 1].argRestoreAdjustCount + bailOutInfo->startCallInfo[startCallNumber + 1].argCount;
  897. if ((Math::Align<int32>(bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr, MachStackAlignment) - (bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr)) != 0)
  898. {
  899. argRestoreAdjustCount++;
  900. }
  901. }
  902. }
  903. if (sym->m_isInlinedArgSlot)
  904. {
  905. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  906. }
  907. #endif
  908. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  909. Assert(totalOutParamCount >= argOutCount);
  910. Assert(argOutCount >= currentArgOutCount);
  911. bailOutInfo->RecordStartCallInfo(startCallNumber, argRestoreAdjustCount, sym->m_instrDef);
  912. totalOutParamCount -= argOutCount;
  913. currentArgOutCount = 0;
  914. }
  915. }
  916. NEXT_SLISTBASE_ENTRY;
  917. Assert(totalOutParamCount == 0);
  918. Assert(startCallNumber == 0);
  919. Assert(currentArgOutCount == 0);
  920. }
  921. // Save the constant values that we know so we can restore them directly.
  922. // This allows us to dead store the constant value assign.
  923. this->CaptureValues(block, bailOutInfo);
  924. }
  925. IR::ByteCodeUsesInstr *
  926. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  927. {
  928. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  929. Assert(this->byteCodeUses);
  930. IR::RegOpnd * dstOpnd = nullptr;
  931. if (includeDef)
  932. {
  933. IR::Opnd * opnd = instr->GetDst();
  934. if (opnd && opnd->IsRegOpnd())
  935. {
  936. dstOpnd = opnd->AsRegOpnd();
  937. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  938. {
  939. dstOpnd = nullptr;
  940. }
  941. }
  942. }
  943. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  944. {
  945. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  946. if (!this->byteCodeUses->IsEmpty())
  947. {
  948. byteCodeUsesInstr->SetBV(byteCodeUses->CopyNew(instr->m_func->m_alloc));
  949. }
  950. if (dstOpnd != nullptr)
  951. {
  952. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  953. }
  954. if (this->propertySymUse)
  955. {
  956. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  957. }
  958. instr->InsertBefore(byteCodeUsesInstr);
  959. }
  960. JitAdelete(this->alloc, this->byteCodeUses);
  961. this->byteCodeUses = nullptr;
  962. this->propertySymUse = nullptr;
  963. return byteCodeUsesInstr;
  964. }
  965. IR::ByteCodeUsesInstr *
  966. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  967. {
  968. #if DBG
  969. PropertySym *propertySymUseBefore = NULL;
  970. Assert(this->byteCodeUses == nullptr);
  971. this->byteCodeUsesBeforeOpt->ClearAll();
  972. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  973. #endif
  974. this->CaptureByteCodeSymUses(instr);
  975. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  976. instr->Remove();
  977. if (byteCodeUsesInstr)
  978. {
  979. byteCodeUsesInstr->Aggregate();
  980. }
  981. return byteCodeUsesInstr;
  982. }
  983. bool
  984. GlobOpt::MayNeedBailOut(Loop * loop) const
  985. {
  986. Assert(this->IsLoopPrePass());
  987. return loop->CanHoistInvariants() ||
  988. this->DoFieldCopyProp(loop) || (this->DoFieldHoisting(loop) && !loop->fieldHoistCandidates->IsEmpty());
  989. }
  990. bool
  991. GlobOpt::MaySrcNeedBailOnImplicitCall(IR::Opnd const * opnd, Value const * val)
  992. {
  993. switch (opnd->GetKind())
  994. {
  995. case IR::OpndKindAddr:
  996. case IR::OpndKindFloatConst:
  997. case IR::OpndKindIntConst:
  998. return false;
  999. case IR::OpndKindReg:
  1000. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  1001. // that it is already a primitive
  1002. return
  1003. !(val && val->GetValueInfo()->IsPrimitive()) &&
  1004. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  1005. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  1006. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  1007. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  1008. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  1009. case IR::OpndKindSym:
  1010. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1011. {
  1012. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1013. if (!propertySymOpnd->MayHaveImplicitCall())
  1014. {
  1015. return false;
  1016. }
  1017. }
  1018. return true;
  1019. default:
  1020. return true;
  1021. };
  1022. }
  1023. bool
  1024. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val) const
  1025. {
  1026. Assert(!this->IsLoopPrePass());
  1027. return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
  1028. (!this->currentBlock->globOptData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
  1029. }
  1030. bool
  1031. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Value const * src2Val, BasicBlock const * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass) const
  1032. {
  1033. if (mayNeedImplicitCallBailOut &&
  1034. !instr->CallsAccessor() &&
  1035. (
  1036. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  1037. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  1038. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  1039. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass)
  1040. ) &&
  1041. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  1042. {
  1043. return true;
  1044. }
  1045. #if DBG
  1046. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  1047. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  1048. {
  1049. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  1050. return true;
  1051. }
  1052. #endif
  1053. return false;
  1054. }
  1055. bool
  1056. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  1057. {
  1058. #if DBG
  1059. IR::Opnd* dst = instr->GetDst();
  1060. IR::Opnd* src1 = instr->GetSrc1();
  1061. IR::Opnd* src2 = instr->GetSrc2();
  1062. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  1063. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  1064. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  1065. #endif
  1066. IR::Opnd * opnd = instr->GetDst();
  1067. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1068. {
  1069. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1070. }
  1071. opnd = instr->GetSrc1();
  1072. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  1073. {
  1074. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  1075. }
  1076. return false;
  1077. }
  1078. bool
  1079. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  1080. {
  1081. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  1082. {
  1083. return false;
  1084. }
  1085. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  1086. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  1087. if (propertySymOpnd->MayNeedTypeCheckProtection())
  1088. {
  1089. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  1090. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func) ||
  1091. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func), "CheckFixedFld with fixed method/data phase disabled?");
  1092. Assert(!isStore || !isCheckFixedFld);
  1093. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  1094. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  1095. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  1096. {
  1097. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  1098. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  1099. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  1100. return true;
  1101. }
  1102. else
  1103. {
  1104. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  1105. *pBailOutKind = IR::BailOutInvalid;
  1106. return false;
  1107. }
  1108. }
  1109. else
  1110. {
  1111. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  1112. *pBailOutKind = IR::BailOutInvalid;
  1113. return false;
  1114. }
  1115. }
  1116. bool
  1117. GlobOpt::MayNeedBailOnImplicitCall(IR::Instr const * instr, Value const * src1Val, Value const * src2Val)
  1118. {
  1119. if (!instr->HasAnyImplicitCalls())
  1120. {
  1121. return false;
  1122. }
  1123. bool isLdElem = false;
  1124. switch (instr->m_opcode)
  1125. {
  1126. case Js::OpCode::LdLen_A:
  1127. {
  1128. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  1129. return
  1130. !(
  1131. baseValueType.IsString() ||
  1132. (baseValueType.IsAnyArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray) ||
  1133. (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength) // guarantees no implicit calls
  1134. );
  1135. }
  1136. case Js::OpCode::LdElemI_A:
  1137. case Js::OpCode::LdMethodElem:
  1138. case Js::OpCode::InlineArrayPop:
  1139. isLdElem = true;
  1140. // fall-through
  1141. case Js::OpCode::StElemI_A:
  1142. case Js::OpCode::StElemI_A_Strict:
  1143. case Js::OpCode::InlineArrayPush:
  1144. {
  1145. if(!instr->HasBailOutInfo())
  1146. {
  1147. return true;
  1148. }
  1149. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  1150. // implicit call result in a pre-op bailout.
  1151. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1152. return
  1153. !(
  1154. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  1155. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  1156. (isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly)
  1157. );
  1158. }
  1159. default:
  1160. break;
  1161. }
  1162. if (OpCodeAttr::HasImplicitCall(instr->m_opcode))
  1163. {
  1164. // Operation has an implicit call regardless of operand attributes.
  1165. return true;
  1166. }
  1167. IR::Opnd const * opnd = instr->GetDst();
  1168. if (opnd)
  1169. {
  1170. switch (opnd->GetKind())
  1171. {
  1172. case IR::OpndKindReg:
  1173. break;
  1174. case IR::OpndKindSym:
  1175. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1176. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1177. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1178. // can't optimize (e.g. copy prop) across such field stores.
  1179. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1180. {
  1181. return false;
  1182. }
  1183. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1184. {
  1185. IR::PropertySymOpnd const * propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1186. if (!propertySymOpnd->MayHaveImplicitCall())
  1187. {
  1188. return false;
  1189. }
  1190. }
  1191. return true;
  1192. case IR::OpndKindIndir:
  1193. return true;
  1194. default:
  1195. Assume(UNREACHED);
  1196. }
  1197. }
  1198. opnd = instr->GetSrc1();
  1199. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src1Val))
  1200. {
  1201. return true;
  1202. }
  1203. opnd = instr->GetSrc2();
  1204. if (opnd != nullptr && MaySrcNeedBailOnImplicitCall(opnd, src2Val))
  1205. {
  1206. return true;
  1207. }
  1208. return false;
  1209. }
  1210. void
  1211. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1212. {
  1213. Assert(pInstr);
  1214. IR::Instr* instr = *pInstr;
  1215. Assert(instr);
  1216. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  1217. uint32 currentOffset = instr->GetByteCodeOffset();
  1218. while (nextInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1219. nextInstr->GetByteCodeOffset() == currentOffset)
  1220. {
  1221. nextInstr = nextInstr->GetNextRealInstrOrLabel();
  1222. }
  1223. // This can happen due to break block removal
  1224. while (nextInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1225. nextInstr->GetByteCodeOffset() < currentOffset)
  1226. {
  1227. nextInstr = nextInstr->GetNextRealInstrOrLabel();
  1228. }
  1229. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1230. if (this->currentBlock->GetLastInstr() == instr)
  1231. {
  1232. this->currentBlock->SetLastInstr(bailOutInstr);
  1233. }
  1234. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  1235. *pInstr = bailOutInstr;
  1236. }
  1237. void
  1238. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1239. {
  1240. Assert(pInstr);
  1241. IR::Instr * instr = *pInstr;
  1242. Assert(instr);
  1243. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1244. Assert(bailOutKind != IR::BailOutInvalid);
  1245. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1246. if (this->currentBlock->GetLastInstr() == instr)
  1247. {
  1248. this->currentBlock->SetLastInstr(bailOutInstr);
  1249. }
  1250. FillBailOutInfo(currentBlock, bailOutInstr->GetBailOutInfo());
  1251. *pInstr = bailOutInstr;
  1252. }
  1253. IR::Instr *
  1254. GlobOpt::EnsureBailTarget(Loop * loop)
  1255. {
  1256. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1257. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1258. if (bailOutInstr == nullptr)
  1259. {
  1260. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1261. loop->landingPad->InsertAfter(bailOutInstr);
  1262. }
  1263. return bailOutInstr;
  1264. }