GlobOptBailout.cpp 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. void
  7. GlobOpt::CaptureValue(BasicBlock *block, StackSym * stackSym, Value * value, BailOutInfo * bailOutInfo)
  8. {
  9. if (!this->func->DoGlobOptsForGeneratorFunc())
  10. {
  11. // TODO[generators][ianhall]: Enable constprop and copyprop for generator functions; see GlobOpt::CopyProp()
  12. // Even though CopyProp is disabled for generator functions we must also not put the copy-prop sym into the
  13. // bailOutInfo so that the bailOutInfo keeps track of the key sym in its byteCodeUpwardExposed list.
  14. return;
  15. }
  16. ValueInfo * valueInfo = value->GetValueInfo();
  17. Assert(stackSym->HasByteCodeRegSlot() || stackSym->HasArgSlotNum());
  18. Assert(!stackSym->IsTypeSpec());
  19. int32 intConstantValue;
  20. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  21. {
  22. BailoutConstantValue constValue;
  23. constValue.InitIntConstValue(intConstantValue);
  24. bailOutInfo->capturedValues.constantValues.PrependNode(this->func->m_alloc, stackSym, constValue);
  25. }
  26. else if (valueInfo->IsVarConstant())
  27. {
  28. BailoutConstantValue constValue;
  29. constValue.InitVarConstValue(valueInfo->AsVarConstant()->VarValue());
  30. bailOutInfo->capturedValues.constantValues.PrependNode(this->func->m_alloc, stackSym, constValue);
  31. }
  32. else
  33. {
  34. StackSym * copyPropSym = this->GetCopyPropSym(block, stackSym, value);
  35. if (copyPropSym)
  36. {
  37. bailOutInfo->capturedValues.copyPropSyms.PrependNode(this->func->m_alloc, stackSym, copyPropSym);
  38. }
  39. }
  40. }
  41. void
  42. GlobOpt::CaptureValues(BasicBlock *block, BailOutInfo * bailOutInfo)
  43. {
  44. FOREACH_GLOBHASHTABLE_ENTRY(bucket, block->globOptData.symToValueMap)
  45. {
  46. Value* value = bucket.element;
  47. ValueInfo * valueInfo = value->GetValueInfo();
  48. if (valueInfo->GetSymStore() == nullptr && !valueInfo->HasIntConstantValue())
  49. {
  50. continue;
  51. }
  52. Sym * sym = bucket.value;
  53. if (sym == nullptr || !sym->IsStackSym() || !sym->AsStackSym()->HasByteCodeRegSlot())
  54. {
  55. continue;
  56. }
  57. this->CaptureValue(block, sym->AsStackSym(), value, bailOutInfo);
  58. }
  59. NEXT_GLOBHASHTABLE_ENTRY;
  60. }
  61. void
  62. GlobOpt::CaptureArguments(BasicBlock *block, BailOutInfo * bailOutInfo, JitArenaAllocator *allocator)
  63. {
  64. FOREACH_BITSET_IN_SPARSEBV(id, this->blockData.argObjSyms)
  65. {
  66. StackSym * stackSym = this->func->m_symTable->FindStackSym(id);
  67. Assert(stackSym != nullptr);
  68. if (!stackSym->HasByteCodeRegSlot())
  69. {
  70. continue;
  71. }
  72. if (!bailOutInfo->capturedValues.argObjSyms)
  73. {
  74. bailOutInfo->capturedValues.argObjSyms = JitAnew(allocator, BVSparse<JitArenaAllocator>, allocator);
  75. }
  76. bailOutInfo->capturedValues.argObjSyms->Set(id);
  77. // Add to BailOutInfo
  78. }
  79. NEXT_BITSET_IN_SPARSEBV
  80. }
  81. void
  82. GlobOpt::TrackByteCodeSymUsed(IR::Instr * instr, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  83. {
  84. IR::Opnd * src = instr->GetSrc1();
  85. if (src)
  86. {
  87. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  88. src = instr->GetSrc2();
  89. if (src)
  90. {
  91. TrackByteCodeSymUsed(src, instrByteCodeStackSymUsed, pPropertySym);
  92. }
  93. }
  94. #if DBG
  95. // There should be no more than one property sym used.
  96. PropertySym *propertySymFromSrc = *pPropertySym;
  97. #endif
  98. IR::Opnd * dst = instr->GetDst();
  99. if (dst)
  100. {
  101. StackSym *stackSym = dst->GetStackSym();
  102. // We want stackSym uses: IndirOpnd and SymOpnds of propertySyms.
  103. // RegOpnd and SymOPnd of StackSyms are stack sym defs.
  104. if (stackSym == NULL)
  105. {
  106. TrackByteCodeSymUsed(dst, instrByteCodeStackSymUsed, pPropertySym);
  107. }
  108. }
  109. #if DBG
  110. AssertMsg(propertySymFromSrc == NULL || propertySymFromSrc == *pPropertySym,
  111. "Lost a property sym use?");
  112. #endif
  113. }
  114. void
  115. GlobOpt::TrackByteCodeSymUsed(IR::RegOpnd * regOpnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  116. {
  117. // Check JITOptimizedReg to catch case where baseOpnd of indir was optimized.
  118. if (!regOpnd->GetIsJITOptimizedReg())
  119. {
  120. TrackByteCodeSymUsed(regOpnd->m_sym, instrByteCodeStackSymUsed);
  121. }
  122. }
  123. void
  124. GlobOpt::TrackByteCodeSymUsed(IR::Opnd * opnd, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed, PropertySym **pPropertySym)
  125. {
  126. if (opnd->GetIsJITOptimizedReg())
  127. {
  128. AssertMsg(!opnd->IsIndirOpnd(), "TrackByteCodeSymUsed doesn't expect IndirOpnd with IsJITOptimizedReg turned on");
  129. return;
  130. }
  131. switch(opnd->GetKind())
  132. {
  133. case IR::OpndKindReg:
  134. TrackByteCodeSymUsed(opnd->AsRegOpnd(), instrByteCodeStackSymUsed);
  135. break;
  136. case IR::OpndKindSym:
  137. {
  138. Sym * sym = opnd->AsSymOpnd()->m_sym;
  139. if (sym->IsStackSym())
  140. {
  141. TrackByteCodeSymUsed(sym->AsStackSym(), instrByteCodeStackSymUsed);
  142. }
  143. else
  144. {
  145. TrackByteCodeSymUsed(sym->AsPropertySym()->m_stackSym, instrByteCodeStackSymUsed);
  146. *pPropertySym = sym->AsPropertySym();
  147. }
  148. }
  149. break;
  150. case IR::OpndKindIndir:
  151. TrackByteCodeSymUsed(opnd->AsIndirOpnd()->GetBaseOpnd(), instrByteCodeStackSymUsed);
  152. {
  153. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  154. if (indexOpnd)
  155. {
  156. TrackByteCodeSymUsed(indexOpnd, instrByteCodeStackSymUsed);
  157. }
  158. }
  159. break;
  160. }
  161. }
  162. void
  163. GlobOpt::TrackByteCodeSymUsed(StackSym * sym, BVSparse<JitArenaAllocator> * instrByteCodeStackSymUsed)
  164. {
  165. // We only care about stack sym that has a corresponding byte code register
  166. if (sym->HasByteCodeRegSlot())
  167. {
  168. if (sym->IsTypeSpec())
  169. {
  170. // It has to have a var version for byte code regs
  171. sym = sym->GetVarEquivSym(nullptr);
  172. }
  173. instrByteCodeStackSymUsed->Set(sym->m_id);
  174. }
  175. }
  176. void
  177. GlobOpt::MarkNonByteCodeUsed(IR::Instr * instr)
  178. {
  179. IR::Opnd * dst = instr->GetDst();
  180. if (dst)
  181. {
  182. MarkNonByteCodeUsed(dst);
  183. }
  184. IR::Opnd * src1 = instr->GetSrc1();
  185. if (src1)
  186. {
  187. MarkNonByteCodeUsed(src1);
  188. IR::Opnd * src2 = instr->GetSrc2();
  189. if (src2)
  190. {
  191. MarkNonByteCodeUsed(src2);
  192. }
  193. }
  194. }
  195. void
  196. GlobOpt::MarkNonByteCodeUsed(IR::Opnd * opnd)
  197. {
  198. switch(opnd->GetKind())
  199. {
  200. case IR::OpndKindReg:
  201. opnd->AsRegOpnd()->SetIsJITOptimizedReg(true);
  202. break;
  203. case IR::OpndKindIndir:
  204. opnd->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  205. {
  206. IR::RegOpnd * indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  207. if (indexOpnd)
  208. {
  209. indexOpnd->SetIsJITOptimizedReg(true);
  210. }
  211. }
  212. break;
  213. }
  214. }
  215. void
  216. GlobOpt::CaptureByteCodeSymUses(IR::Instr * instr)
  217. {
  218. if (this->byteCodeUses)
  219. {
  220. // We already captured it before.
  221. return;
  222. }
  223. Assert(this->propertySymUse == NULL);
  224. this->byteCodeUses = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  225. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUses, &this->propertySymUse);
  226. AssertMsg(this->byteCodeUses->Equal(this->byteCodeUsesBeforeOpt),
  227. "Instruction edited before capturing the byte code use");
  228. }
  229. void
  230. GlobOpt::TrackCalls(IR::Instr * instr)
  231. {
  232. // Keep track of out params for bailout
  233. switch (instr->m_opcode)
  234. {
  235. case Js::OpCode::StartCall:
  236. Assert(!this->isCallHelper);
  237. Assert(instr->GetDst()->IsRegOpnd());
  238. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  239. if (this->blockData.callSequence == nullptr)
  240. {
  241. this->blockData.callSequence = JitAnew(this->alloc, SListBase<IR::Opnd *>);
  242. this->currentBlock->globOptData.callSequence = this->blockData.callSequence;
  243. }
  244. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  245. this->currentBlock->globOptData.totalOutParamCount += instr->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  246. this->currentBlock->globOptData.startCallCount++;
  247. break;
  248. case Js::OpCode::BytecodeArgOutCapture:
  249. {
  250. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  251. this->currentBlock->globOptData.argOutCount++;
  252. break;
  253. }
  254. case Js::OpCode::ArgOut_A:
  255. case Js::OpCode::ArgOut_A_Inline:
  256. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  257. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  258. case Js::OpCode::ArgOut_A_Dynamic:
  259. case Js::OpCode::ArgOut_A_FromStackArgs:
  260. case Js::OpCode::ArgOut_A_SpreadArg:
  261. {
  262. IR::Opnd * opnd = instr->GetDst();
  263. if (opnd->IsSymOpnd())
  264. {
  265. Assert(!this->isCallHelper);
  266. Assert(!this->blockData.callSequence->Empty());
  267. StackSym* stackSym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  268. // These scenarios are already tracked using BytecodeArgOutCapture,
  269. // and we don't want to be tracking ArgOut_A_FixupForStackArgs as these are only visible to the JIT and we should not be restoring them upon bailout.
  270. if (!stackSym->m_isArgCaptured && instr->m_opcode != Js::OpCode::ArgOut_A_FixupForStackArgs)
  271. {
  272. this->blockData.callSequence->Prepend(this->alloc, instr->GetDst());
  273. this->currentBlock->globOptData.argOutCount++;
  274. }
  275. Assert(stackSym->IsArgSlotSym());
  276. if (stackSym->m_isInlinedArgSlot)
  277. {
  278. this->currentBlock->globOptData.inlinedArgOutCount++;
  279. // We want to update the offsets only once: don't do in prepass.
  280. if (!this->IsLoopPrePass() && stackSym->m_offset >= 0)
  281. {
  282. Func * currentFunc = instr->m_func;
  283. stackSym->FixupStackOffset(currentFunc);
  284. }
  285. }
  286. }
  287. else
  288. {
  289. // It is a reg opnd if it is a helper call
  290. // It should be all ArgOut until the CallHelper instruction
  291. Assert(opnd->IsRegOpnd());
  292. this->isCallHelper = true;
  293. }
  294. if (instr->m_opcode == Js::OpCode::ArgOut_A_FixupForStackArgs && !this->IsLoopPrePass())
  295. {
  296. instr->m_opcode = Js::OpCode::ArgOut_A_Inline;
  297. }
  298. break;
  299. }
  300. case Js::OpCode::InlineeStart:
  301. Assert(instr->m_func->GetParentFunc() == this->blockData.curFunc);
  302. Assert(instr->m_func->GetParentFunc());
  303. this->blockData.curFunc = instr->m_func;
  304. this->currentBlock->globOptData.curFunc = instr->m_func;
  305. this->EndTrackCall(instr);
  306. if (DoInlineArgsOpt(instr->m_func))
  307. {
  308. instr->m_func->m_hasInlineArgsOpt = true;
  309. InlineeFrameInfo* frameInfo = InlineeFrameInfo::New(func->m_alloc);
  310. instr->m_func->frameInfo = frameInfo;
  311. frameInfo->floatSyms = currentBlock->globOptData.liveFloat64Syms->CopyNew(this->alloc);
  312. frameInfo->intSyms = currentBlock->globOptData.liveInt32Syms->MinusNew(currentBlock->globOptData.liveLossyInt32Syms, this->alloc);
  313. // SIMD_JS
  314. frameInfo->simd128F4Syms = currentBlock->globOptData.liveSimd128F4Syms->CopyNew(this->alloc);
  315. frameInfo->simd128I4Syms = currentBlock->globOptData.liveSimd128I4Syms->CopyNew(this->alloc);
  316. }
  317. break;
  318. case Js::OpCode::EndCallForPolymorphicInlinee:
  319. // Have this opcode mimic the functions of both InlineeStart and InlineeEnd in the bailout block of a polymorphic call inlined using fixed methods.
  320. this->EndTrackCall(instr);
  321. break;
  322. case Js::OpCode::CallHelper:
  323. case Js::OpCode::IsInst:
  324. Assert(this->isCallHelper);
  325. this->isCallHelper = false;
  326. break;
  327. case Js::OpCode::BailOnNoProfile:
  328. case Js::OpCode::InlineThrow:
  329. case Js::OpCode::InlineRuntimeTypeError:
  330. case Js::OpCode::InlineRuntimeReferenceError:
  331. //We are not going to see an inlinee end
  332. this->func->UpdateMaxInlineeArgOutCount(this->currentBlock->globOptData.inlinedArgOutCount);
  333. break;
  334. case Js::OpCode::InlineeEnd:
  335. if (instr->m_func->m_hasInlineArgsOpt)
  336. {
  337. RecordInlineeFrameInfo(instr);
  338. }
  339. EndTrackingOfArgObjSymsForInlinee();
  340. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  341. this->func->UpdateMaxInlineeArgOutCount(this->currentBlock->globOptData.inlinedArgOutCount);
  342. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  343. break;
  344. case Js::OpCode::InlineeMetaArg:
  345. {
  346. Assert(instr->GetDst()->IsSymOpnd());
  347. StackSym * stackSym = instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  348. Assert(stackSym->IsArgSlotSym());
  349. // InlineeMetaArg has the m_func set as the "inlinee" and not the "inliner"
  350. // TODO: Review this and fix the m_func of InlineeMetaArg to be "inliner" (as for the rest of the ArgOut's)
  351. // We want to update the offsets only once: don't do in prepass.
  352. if (!this->IsLoopPrePass())
  353. {
  354. Func * currentFunc = instr->m_func->GetParentFunc();
  355. stackSym->FixupStackOffset(currentFunc);
  356. }
  357. this->currentBlock->globOptData.inlinedArgOutCount++;
  358. break;
  359. }
  360. case Js::OpCode::InlineBuiltInStart:
  361. this->inInlinedBuiltIn = true;
  362. break;
  363. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  364. case Js::OpCode::InlineBuiltInEnd:
  365. {
  366. // If extra bailouts were added for the InlineMathXXX call itself,
  367. // move InlineeBuiltInStart just above the InlineMathXXX.
  368. // This is needed so that the function argument has lifetime after all bailouts for InlineMathXXX,
  369. // otherwise when we bailout we would get wrong function.
  370. IR::Instr* inlineBuiltInStartInstr = instr->m_prev;
  371. while (inlineBuiltInStartInstr->m_opcode != Js::OpCode::InlineBuiltInStart)
  372. {
  373. inlineBuiltInStartInstr = inlineBuiltInStartInstr->m_prev;
  374. }
  375. IR::Instr *byteCodeUsesInstr = inlineBuiltInStartInstr->m_prev;
  376. IR::Instr * insertBeforeInstr = instr->m_prev;
  377. IR::Instr * tmpInstr = insertBeforeInstr;
  378. while(tmpInstr->m_opcode != Js::OpCode::InlineBuiltInStart )
  379. {
  380. if(tmpInstr->m_opcode == Js::OpCode::ByteCodeUses)
  381. {
  382. insertBeforeInstr = tmpInstr;
  383. }
  384. tmpInstr = tmpInstr->m_prev;
  385. }
  386. inlineBuiltInStartInstr->Unlink();
  387. if(insertBeforeInstr == instr->m_prev)
  388. {
  389. insertBeforeInstr->InsertBefore(inlineBuiltInStartInstr);
  390. }
  391. else
  392. {
  393. insertBeforeInstr->m_prev->InsertBefore(inlineBuiltInStartInstr);
  394. }
  395. // Need to move the byte code uses instructions associated with inline built-in start instruction as well. For instance,
  396. // copy-prop may have replaced the function sym and inserted a byte code uses for the original sym holding the function.
  397. // That byte code uses instruction needs to appear after bailouts inserted for the InlinMathXXX instruction since the
  398. // byte code register holding the function object needs to be restored on bailout.
  399. IR::Instr *const insertByteCodeUsesAfterInstr = inlineBuiltInStartInstr->m_prev;
  400. if(byteCodeUsesInstr != insertByteCodeUsesAfterInstr)
  401. {
  402. // The InlineBuiltInStart instruction was moved, look for its ByteCodeUses instructions that also need to be moved
  403. while(
  404. byteCodeUsesInstr->IsByteCodeUsesInstr() &&
  405. byteCodeUsesInstr->AsByteCodeUsesInstr()->GetByteCodeOffset() == inlineBuiltInStartInstr->GetByteCodeOffset())
  406. {
  407. IR::Instr *const instrToMove = byteCodeUsesInstr;
  408. byteCodeUsesInstr = byteCodeUsesInstr->m_prev;
  409. instrToMove->Unlink();
  410. insertByteCodeUsesAfterInstr->InsertAfter(instrToMove);
  411. }
  412. }
  413. // The following code makes more sense to be processed when we hit InlineeBuiltInStart,
  414. // but when extra bailouts are added for the InlineMathXXX and InlineArrayPop instructions itself, those bailouts
  415. // need to know about current bailout record, but since they are added after TrackCalls is called
  416. // for InlineeBuiltInStart, we can't clear current record when got InlineeBuiltInStart
  417. // Do not track calls for InlineNonTrackingBuiltInEnd, as it is already tracked for InlineArrayPop
  418. if(instr->m_opcode == Js::OpCode::InlineBuiltInEnd)
  419. {
  420. this->EndTrackCall(instr);
  421. }
  422. Assert(this->currentBlock->globOptData.inlinedArgOutCount >= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false));
  423. this->currentBlock->globOptData.inlinedArgOutCount -= instr->GetArgOutCount(/*getInterpreterArgOutCount*/ false);
  424. this->inInlinedBuiltIn = false;
  425. break;
  426. }
  427. case Js::OpCode::InlineArrayPop:
  428. {
  429. // EndTrackCall should be called here as the Post-op BailOutOnImplicitCalls will bail out to the instruction after the Pop function call instr.
  430. // This bailout shouldn't be tracking the call sequence as it will then erroneously reserve stack space for arguments when the call would have already happened
  431. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for InlineArrayPop.
  432. this->EndTrackCall(instr);
  433. break;
  434. }
  435. default:
  436. if (OpCodeAttr::CallInstr(instr->m_opcode))
  437. {
  438. this->EndTrackCall(instr);
  439. if (this->inInlinedBuiltIn && instr->m_opcode == Js::OpCode::CallDirect)
  440. {
  441. // We can end up in this situation when a built-in apply target is inlined to a CallDirect. We have the following IR:
  442. //
  443. // StartCall
  444. // ArgOut_InlineBuiltIn
  445. // ArgOut_InlineBuiltIn
  446. // ArgOut_InlineBuiltIn
  447. // InlineBuiltInStart
  448. // ArgOut_A_InlineSpecialized
  449. // ArgOut_A
  450. // ArgOut_A
  451. // CallDirect
  452. // InlineNonTrackingBuiltInEnd
  453. //
  454. // We need to call EndTrackCall twice for CallDirect in this case. The CallDirect may get a BailOutOnImplicitCalls later,
  455. // but it should not be tracking the call sequence for the apply call as it is a post op bailout and the call would have
  456. // happened when we bail out.
  457. // Can't wait till InlineBuiltinEnd like we do for other InlineMathXXX because by then we would have filled bailout info for the BailOutOnImplicitCalls for CallDirect.
  458. this->EndTrackCall(instr);
  459. }
  460. }
  461. break;
  462. }
  463. }
  464. void GlobOpt::RecordInlineeFrameInfo(IR::Instr* inlineeEnd)
  465. {
  466. if (this->IsLoopPrePass())
  467. {
  468. return;
  469. }
  470. InlineeFrameInfo* frameInfo = inlineeEnd->m_func->frameInfo;
  471. if (frameInfo->isRecorded)
  472. {
  473. Assert(frameInfo->function.type != InlineeFrameInfoValueType_None);
  474. // Due to Cmp peeps in flow graph - InlineeEnd can be cloned.
  475. return;
  476. }
  477. inlineeEnd->IterateArgInstrs([=] (IR::Instr* argInstr)
  478. {
  479. if (argInstr->m_opcode == Js::OpCode::InlineeStart)
  480. {
  481. Assert(frameInfo->function.type == InlineeFrameInfoValueType_None);
  482. IR::RegOpnd* functionObject = argInstr->GetSrc1()->AsRegOpnd();
  483. if (functionObject->m_sym->IsConst())
  484. {
  485. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym->GetConstValueForBailout());
  486. }
  487. else
  488. {
  489. frameInfo->function = InlineFrameInfoValue(functionObject->m_sym);
  490. }
  491. }
  492. else
  493. {
  494. Js::ArgSlot argSlot = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->GetArgSlotNum();
  495. IR::Opnd* argOpnd = argInstr->GetSrc1();
  496. InlineFrameInfoValue frameInfoValue;
  497. StackSym* argSym = argOpnd->GetStackSym();
  498. if (!argSym)
  499. {
  500. frameInfoValue = InlineFrameInfoValue(argOpnd->GetConstValue());
  501. }
  502. else if (argSym->IsConst())
  503. {
  504. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  505. }
  506. else
  507. {
  508. if (PHASE_ON(Js::CopyPropPhase, func))
  509. {
  510. Value* value = FindValue(argSym);
  511. StackSym * copyPropSym = this->GetCopyPropSym(this->currentBlock, argSym, value);
  512. if (copyPropSym)
  513. {
  514. argSym = copyPropSym;
  515. }
  516. }
  517. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  518. if (frameInfo->intSyms->TestEmpty() && frameInfo->intSyms->Test(argSym->m_id))
  519. {
  520. // Var version of the sym is not live, use the int32 version
  521. argSym = argSym->GetInt32EquivSym(nullptr);
  522. Assert(argSym);
  523. }
  524. else if (frameInfo->floatSyms->TestEmpty() && frameInfo->floatSyms->Test(argSym->m_id))
  525. {
  526. // Var/int32 version of the sym is not live, use the float64 version
  527. argSym = argSym->GetFloat64EquivSym(nullptr);
  528. Assert(argSym);
  529. }
  530. // SIMD_JS
  531. else if (frameInfo->simd128F4Syms->TestEmpty() && frameInfo->simd128F4Syms->Test(argSym->m_id))
  532. {
  533. argSym = argSym->GetSimd128F4EquivSym(nullptr);
  534. }
  535. else if (frameInfo->simd128I4Syms->TestEmpty() && frameInfo->simd128I4Syms->Test(argSym->m_id))
  536. {
  537. argSym = argSym->GetSimd128I4EquivSym(nullptr);
  538. }
  539. else
  540. {
  541. Assert(globOptData.liveVarSyms->Test(argSym->m_id));
  542. }
  543. if (argSym->IsConst())
  544. {
  545. frameInfoValue = InlineFrameInfoValue(argSym->GetConstValueForBailout());
  546. }
  547. else
  548. {
  549. frameInfoValue = InlineFrameInfoValue(argSym);
  550. }
  551. }
  552. Assert(argSlot >= 1);
  553. frameInfo->arguments->SetItem(argSlot - 1, frameInfoValue);
  554. }
  555. return false;
  556. });
  557. JitAdelete(this->alloc, frameInfo->intSyms);
  558. frameInfo->intSyms = nullptr;
  559. JitAdelete(this->alloc, frameInfo->floatSyms);
  560. frameInfo->floatSyms = nullptr;
  561. // SIMD_JS
  562. JitAdelete(this->alloc, frameInfo->simd128F4Syms);
  563. frameInfo->simd128F4Syms = nullptr;
  564. JitAdelete(this->alloc, frameInfo->simd128I4Syms);
  565. frameInfo->simd128I4Syms = nullptr;
  566. frameInfo->isRecorded = true;
  567. }
  568. void GlobOpt::EndTrackingOfArgObjSymsForInlinee()
  569. {
  570. Assert(this->blockData.curFunc->GetParentFunc());
  571. if (this->blockData.curFunc->argObjSyms && TrackArgumentsObject())
  572. {
  573. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  574. tempBv->Minus(this->blockData.curFunc->argObjSyms, this->blockData.argObjSyms);
  575. if(!tempBv->IsEmpty())
  576. {
  577. // This means there are arguments object symbols in the current function which are not in the current block.
  578. // This could happen when one of the blocks has a throw and arguments object aliased in it and other blocks don't see it.
  579. // Rare case, abort stack arguments optimization in this case.
  580. CannotAllocateArgumentsObjectOnStack();
  581. }
  582. else
  583. {
  584. Assert(this->blockData.argObjSyms->OrNew(this->blockData.curFunc->argObjSyms)->Equal(this->blockData.argObjSyms));
  585. this->blockData.argObjSyms->Minus(this->blockData.curFunc->argObjSyms);
  586. }
  587. JitAdelete(this->tempAlloc, tempBv);
  588. }
  589. this->blockData.curFunc = this->blockData.curFunc->GetParentFunc();
  590. this->currentBlock->globOptData.curFunc = this->blockData.curFunc;
  591. }
  592. void GlobOpt::EndTrackCall(IR::Instr* instr)
  593. {
  594. Assert(instr);
  595. Assert(OpCodeAttr::CallInstr(instr->m_opcode) || instr->m_opcode == Js::OpCode::InlineeStart || instr->m_opcode == Js::OpCode::InlineBuiltInEnd
  596. || instr->m_opcode == Js::OpCode::InlineArrayPop || instr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee);
  597. Assert(!this->isCallHelper);
  598. Assert(!this->blockData.callSequence->Empty());
  599. #if DBG
  600. uint origArgOutCount = this->currentBlock->globOptData.argOutCount;
  601. #endif
  602. while (this->blockData.callSequence->Head()->GetStackSym()->HasArgSlotNum())
  603. {
  604. this->currentBlock->globOptData.argOutCount--;
  605. this->blockData.callSequence->RemoveHead(this->alloc);
  606. }
  607. StackSym * sym = this->blockData.callSequence->Head()->AsRegOpnd()->m_sym->AsStackSym();
  608. this->blockData.callSequence->RemoveHead(this->alloc);
  609. #if DBG
  610. Assert(sym->m_isSingleDef);
  611. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  612. // Number of argument set should be the same as indicated at StartCall
  613. // except NewScObject has an implicit arg1
  614. Assert((uint)sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true) ==
  615. origArgOutCount - this->currentBlock->globOptData.argOutCount +
  616. (instr->m_opcode == Js::OpCode::NewScObject || instr->m_opcode == Js::OpCode::NewScObjArray
  617. || instr->m_opcode == Js::OpCode::NewScObjectSpread || instr->m_opcode == Js::OpCode::NewScObjArraySpread));
  618. #endif
  619. this->currentBlock->globOptData.totalOutParamCount -= sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  620. this->currentBlock->globOptData.startCallCount--;
  621. }
  622. void
  623. GlobOpt::FillBailOutInfo(BasicBlock *block, BailOutInfo * bailOutInfo)
  624. {
  625. AssertMsg(!this->isCallHelper, "Bail out can't be inserted the middle of CallHelper sequence");
  626. bailOutInfo->liveVarSyms = block->globOptData.liveVarSyms->CopyNew(this->func->m_alloc);
  627. bailOutInfo->liveFloat64Syms = block->globOptData.liveFloat64Syms->CopyNew(this->func->m_alloc);
  628. // SIMD_JS
  629. bailOutInfo->liveSimd128F4Syms = block->globOptData.liveSimd128F4Syms->CopyNew(this->func->m_alloc);
  630. bailOutInfo->liveSimd128I4Syms = block->globOptData.liveSimd128I4Syms->CopyNew(this->func->m_alloc);
  631. // The live int32 syms in the bailout info are only the syms resulting from lossless conversion to int. If the int32 value
  632. // was created from a lossy conversion to int, the original var value cannot be re-materialized from the int32 value. So, the
  633. // int32 version is considered to be not live for the purposes of bailout, which forces the var or float versions to be used
  634. // directly for restoring the value during bailout. Otherwise, bailout may try to re-materialize the var value by converting
  635. // the lossily-converted int value back into a var, restoring the wrong value.
  636. bailOutInfo->liveLosslessInt32Syms =
  637. block->globOptData.liveInt32Syms->MinusNew(block->globOptData.liveLossyInt32Syms, this->func->m_alloc);
  638. // Save the stack literal init field count so we can null out the uninitialized fields
  639. StackLiteralInitFldDataMap * stackLiteralInitFldDataMap = block->globOptData.stackLiteralInitFldDataMap;
  640. if (stackLiteralInitFldDataMap != nullptr)
  641. {
  642. uint stackLiteralInitFldDataCount = stackLiteralInitFldDataMap->Count();
  643. if (stackLiteralInitFldDataCount != 0)
  644. {
  645. auto stackLiteralBailOutInfo = AnewArray(this->func->m_alloc,
  646. BailOutInfo::StackLiteralBailOutInfo, stackLiteralInitFldDataCount);
  647. uint i = 0;
  648. stackLiteralInitFldDataMap->Map(
  649. [stackLiteralBailOutInfo, stackLiteralInitFldDataCount, &i](StackSym * stackSym, StackLiteralInitFldData const& data)
  650. {
  651. Assert(i < stackLiteralInitFldDataCount);
  652. stackLiteralBailOutInfo[i].stackSym = stackSym;
  653. stackLiteralBailOutInfo[i].initFldCount = data.currentInitFldCount;
  654. i++;
  655. });
  656. Assert(i == stackLiteralInitFldDataCount);
  657. bailOutInfo->stackLiteralBailOutInfoCount = stackLiteralInitFldDataCount;
  658. bailOutInfo->stackLiteralBailOutInfo = stackLiteralBailOutInfo;
  659. }
  660. }
  661. // Save the constant values that we know so we can restore them directly.
  662. // This allows us to dead store the constant value assign.
  663. this->CaptureValues(block, bailOutInfo);
  664. if (TrackArgumentsObject())
  665. {
  666. this->CaptureArguments(block, bailOutInfo, this->func->m_alloc);
  667. }
  668. if (block->globOptData.callSequence && !block->globOptData.callSequence->Empty())
  669. {
  670. uint currentArgOutCount = 0;
  671. uint startCallNumber = block->globOptData.startCallCount;
  672. bailOutInfo->startCallInfo = JitAnewArray(this->func->m_alloc, BailOutInfo::StartCallInfo, startCallNumber);
  673. bailOutInfo->startCallCount = startCallNumber;
  674. // Save the start call's func to identify the function (inlined) that the call sequence is for
  675. // We might not have any arg out yet to get the function from
  676. bailOutInfo->startCallFunc = JitAnewArray(this->func->m_alloc, Func *, startCallNumber);
  677. #ifdef _M_IX86
  678. bailOutInfo->inlinedStartCall = BVFixed::New(startCallNumber, this->func->m_alloc, false);
  679. #endif
  680. uint totalOutParamCount = block->globOptData.totalOutParamCount;
  681. bailOutInfo->totalOutParamCount = totalOutParamCount;
  682. bailOutInfo->argOutSyms = JitAnewArrayZ(this->func->m_alloc, StackSym *, totalOutParamCount);
  683. uint argRestoreAdjustCount = 0;
  684. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, block->globOptData.callSequence)
  685. {
  686. if(opnd->GetStackSym()->HasArgSlotNum())
  687. {
  688. StackSym * sym;
  689. if(opnd->IsSymOpnd())
  690. {
  691. sym = opnd->AsSymOpnd()->m_sym->AsStackSym();
  692. Assert(sym->IsArgSlotSym());
  693. Assert(sym->m_isSingleDef);
  694. Assert(sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A
  695. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Inline
  696. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_InlineBuiltIn
  697. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_SpreadArg
  698. || sym->m_instrDef->m_opcode == Js::OpCode::ArgOut_A_Dynamic);
  699. }
  700. else
  701. {
  702. sym = opnd->GetStackSym();
  703. Value* val = FindValue(sym);
  704. Assert(val);
  705. CaptureValue(block, sym, val, bailOutInfo);
  706. }
  707. Assert(totalOutParamCount != 0);
  708. Assert(totalOutParamCount > currentArgOutCount);
  709. currentArgOutCount++;
  710. #pragma prefast(suppress:26000, "currentArgOutCount is never 0");
  711. bailOutInfo->argOutSyms[totalOutParamCount - currentArgOutCount] = sym;
  712. // Note that there could be ArgOuts below current bailout instr that belong to current call (currentArgOutCount < argOutCount),
  713. // in which case we will have nulls in argOutSyms[] in start of section for current call, because we fill from tail.
  714. // Example: StartCall 3, ArgOut1,.. ArgOut2, Bailout,.. Argout3 -> [NULL, ArgOut1, ArgOut2].
  715. }
  716. else
  717. {
  718. Assert(opnd->IsRegOpnd());
  719. StackSym * sym = opnd->AsRegOpnd()->m_sym;
  720. Assert(!sym->IsArgSlotSym());
  721. Assert(sym->m_isSingleDef);
  722. Assert(sym->m_instrDef->m_opcode == Js::OpCode::StartCall);
  723. Assert(startCallNumber != 0);
  724. startCallNumber--;
  725. bailOutInfo->startCallFunc[startCallNumber] = sym->m_instrDef->m_func;
  726. #ifdef _M_IX86
  727. if (this->currentRegion && this->currentRegion->GetType() == RegionTypeTry)
  728. {
  729. // For a bailout in argument evaluation from an EH region, the esp is offset by the TryCatch helper’s frame. So, the argouts are not actually pushed at the
  730. // offsets stored in the bailout record, which are relative to ebp. Need to restore the argouts from the actual value of esp before calling the Bailout helper.
  731. // For nested calls, argouts for the outer call need to be restored from an offset of stack-adjustment-done-by-the-inner-call from esp.
  732. if (startCallNumber + 1 == bailOutInfo->startCallCount)
  733. {
  734. argRestoreAdjustCount = 0;
  735. }
  736. else
  737. {
  738. argRestoreAdjustCount = bailOutInfo->startCallInfo[startCallNumber + 1].argRestoreAdjustCount + bailOutInfo->startCallInfo[startCallNumber + 1].argCount;
  739. if ((Math::Align<int32>(bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr, MachStackAlignment) - (bailOutInfo->startCallInfo[startCallNumber + 1].argCount * MachPtr)) != 0)
  740. {
  741. argRestoreAdjustCount++;
  742. }
  743. }
  744. }
  745. if (sym->m_isInlinedArgSlot)
  746. {
  747. bailOutInfo->inlinedStartCall->Set(startCallNumber);
  748. }
  749. #endif
  750. uint argOutCount = sym->m_instrDef->GetArgOutCount(/*getInterpreterArgOutCount*/ true);
  751. Assert(totalOutParamCount >= argOutCount);
  752. Assert(argOutCount >= currentArgOutCount);
  753. bailOutInfo->RecordStartCallInfo(startCallNumber, argRestoreAdjustCount, sym->m_instrDef);
  754. totalOutParamCount -= argOutCount;
  755. currentArgOutCount = 0;
  756. }
  757. }
  758. NEXT_SLISTBASE_ENTRY;
  759. Assert(totalOutParamCount == 0);
  760. Assert(startCallNumber == 0);
  761. Assert(currentArgOutCount == 0);
  762. }
  763. }
  764. IR::ByteCodeUsesInstr *
  765. GlobOpt::InsertByteCodeUses(IR::Instr * instr, bool includeDef)
  766. {
  767. IR::ByteCodeUsesInstr * byteCodeUsesInstr = nullptr;
  768. Assert(this->byteCodeUses);
  769. IR::RegOpnd * dstOpnd = nullptr;
  770. if (includeDef)
  771. {
  772. IR::Opnd * opnd = instr->GetDst();
  773. if (opnd && opnd->IsRegOpnd())
  774. {
  775. dstOpnd = opnd->AsRegOpnd();
  776. if (dstOpnd->GetIsJITOptimizedReg() || !dstOpnd->m_sym->HasByteCodeRegSlot())
  777. {
  778. dstOpnd = nullptr;
  779. }
  780. }
  781. }
  782. if (!this->byteCodeUses->IsEmpty() || this->propertySymUse || dstOpnd != nullptr)
  783. {
  784. byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr->m_func);
  785. byteCodeUsesInstr->SetByteCodeOffset(instr);
  786. if (!this->byteCodeUses->IsEmpty())
  787. {
  788. byteCodeUsesInstr->byteCodeUpwardExposedUsed = byteCodeUses->CopyNew(instr->m_func->m_alloc);
  789. }
  790. if (dstOpnd != nullptr)
  791. {
  792. byteCodeUsesInstr->SetFakeDst(dstOpnd);
  793. }
  794. if (this->propertySymUse)
  795. {
  796. byteCodeUsesInstr->propertySymUse = this->propertySymUse;
  797. }
  798. instr->InsertBefore(byteCodeUsesInstr);
  799. }
  800. JitAdelete(this->alloc, this->byteCodeUses);
  801. this->byteCodeUses = nullptr;
  802. this->propertySymUse = nullptr;
  803. return byteCodeUsesInstr;
  804. }
  805. IR::ByteCodeUsesInstr *
  806. GlobOpt::ConvertToByteCodeUses(IR::Instr * instr)
  807. {
  808. #if DBG
  809. PropertySym *propertySymUseBefore = NULL;
  810. Assert(this->byteCodeUses == nullptr);
  811. this->byteCodeUsesBeforeOpt->ClearAll();
  812. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  813. #endif
  814. this->CaptureByteCodeSymUses(instr);
  815. IR::ByteCodeUsesInstr * byteCodeUsesInstr = this->InsertByteCodeUses(instr, true);
  816. instr->Remove();
  817. return byteCodeUsesInstr;
  818. }
  819. bool
  820. GlobOpt::MayNeedBailOut(Loop * loop) const
  821. {
  822. Assert(this->IsLoopPrePass());
  823. return loop->CanHoistInvariants() ||
  824. this->DoFieldCopyProp(loop) || (this->DoFieldHoisting(loop) && !loop->fieldHoistCandidates->IsEmpty());
  825. }
  826. bool
  827. GlobOpt::MayNeedBailOnImplicitCall(IR::Opnd * opnd, Value *val, bool callsToPrimitive)
  828. {
  829. switch (opnd->GetKind())
  830. {
  831. case IR::OpndKindAddr:
  832. case IR::OpndKindFloatConst:
  833. case IR::OpndKindIntConst:
  834. return false;
  835. case IR::OpndKindReg:
  836. // Only need implicit call if the operation will call ToPrimitive and we haven't prove
  837. // that it is already a primitive
  838. return callsToPrimitive &&
  839. !(val && val->GetValueInfo()->IsPrimitive()) &&
  840. !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  841. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  842. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  843. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  844. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  845. case IR::OpndKindSym:
  846. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  847. {
  848. IR::PropertySymOpnd* propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  849. if (!propertySymOpnd->MayHaveImplicitCall())
  850. {
  851. return false;
  852. }
  853. }
  854. return true;
  855. default:
  856. return true;
  857. };
  858. }
  859. bool
  860. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value *src1Val, Value *src2Val)
  861. {
  862. Assert(!this->IsLoopPrePass());
  863. return this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val, this->currentBlock,
  864. (!this->blockData.liveFields->IsEmpty()), !this->currentBlock->IsLandingPad(), true);
  865. }
  866. bool
  867. GlobOpt::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, Value *src1Val, Value *src2Val, BasicBlock * block, bool hasLiveFields, bool mayNeedImplicitCallBailOut, bool isForwardPass)
  868. {
  869. if (mayNeedImplicitCallBailOut &&
  870. !instr->CallsAccessor() &&
  871. (
  872. NeedBailOnImplicitCallForLiveValues(block, isForwardPass) ||
  873. NeedBailOnImplicitCallForCSE(block, isForwardPass) ||
  874. NeedBailOnImplicitCallWithFieldOpts(block->loop, hasLiveFields) ||
  875. NeedBailOnImplicitCallForArrayCheckHoist(block, isForwardPass)
  876. ) &&
  877. (!instr->HasTypeCheckBailOut() && MayNeedBailOnImplicitCall(instr, src1Val, src2Val)))
  878. {
  879. return true;
  880. }
  881. #if DBG
  882. if (Js::Configuration::Global.flags.IsEnabled(Js::BailOutAtEveryImplicitCallFlag) &&
  883. !instr->HasBailOutInfo() && MayNeedBailOnImplicitCall(instr, nullptr, nullptr))
  884. {
  885. // always add implicit call bailout even if we don't need it, but only on opcode that supports it
  886. return true;
  887. }
  888. #endif
  889. return false;
  890. }
  891. bool
  892. GlobOpt::IsTypeCheckProtected(const IR::Instr * instr)
  893. {
  894. #if DBG
  895. IR::Opnd* dst = instr->GetDst();
  896. IR::Opnd* src1 = instr->GetSrc1();
  897. IR::Opnd* src2 = instr->GetSrc2();
  898. AssertMsg(!dst || !dst->IsSymOpnd() || !dst->AsSymOpnd()->IsPropertySymOpnd() ||
  899. !src1 || !src1->IsSymOpnd() || !src1->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src1 and dst be a PropertySymOpnd.");
  900. AssertMsg(!src2 || !src2->IsSymOpnd() || !src2->AsSymOpnd()->IsPropertySymOpnd(), "No instruction should have a src2 be a PropertySymOpnd.");
  901. #endif
  902. IR::Opnd * opnd = instr->GetDst();
  903. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  904. {
  905. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  906. }
  907. opnd = instr->GetSrc1();
  908. if (opnd && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  909. {
  910. return opnd->AsPropertySymOpnd()->IsTypeCheckProtected();
  911. }
  912. return false;
  913. }
  914. bool
  915. GlobOpt::NeedsTypeCheckBailOut(const IR::Instr *instr, IR::PropertySymOpnd *propertySymOpnd, bool isStore, bool* pIsTypeCheckProtected, IR::BailOutKind *pBailOutKind)
  916. {
  917. if (instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType || instr->m_opcode == Js::OpCode::LdMethodFldPolyInlineMiss)
  918. {
  919. return false;
  920. }
  921. // CheckFixedFld always requires a type check and bailout either at the instruction or upstream.
  922. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld || (propertySymOpnd->UsesFixedValue() && propertySymOpnd->MayNeedTypeCheckProtection()));
  923. if (propertySymOpnd->MayNeedTypeCheckProtection())
  924. {
  925. bool isCheckFixedFld = instr->m_opcode == Js::OpCode::CheckFixedFld;
  926. AssertMsg(!isCheckFixedFld || !PHASE_OFF(Js::FixedMethodsPhase, instr->m_func->GetJnFunction()) ||
  927. !PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func->GetJnFunction()), "CheckFixedFld with fixed method/data phase disabled?");
  928. Assert(!isStore || !isCheckFixedFld);
  929. // We don't share caches between field loads and stores. We should never have a field store involving a proto cache.
  930. Assert(!isStore || !propertySymOpnd->IsLoadedFromProto());
  931. if (propertySymOpnd->NeedsTypeCheckAndBailOut())
  932. {
  933. *pBailOutKind = propertySymOpnd->HasEquivalentTypeSet() && !propertySymOpnd->MustDoMonoCheck() ?
  934. (isCheckFixedFld ? IR::BailOutFailedEquivalentFixedFieldTypeCheck : IR::BailOutFailedEquivalentTypeCheck) :
  935. (isCheckFixedFld ? IR::BailOutFailedFixedFieldTypeCheck : IR::BailOutFailedTypeCheck);
  936. return true;
  937. }
  938. else
  939. {
  940. *pIsTypeCheckProtected = propertySymOpnd->IsTypeCheckProtected();
  941. *pBailOutKind = IR::BailOutInvalid;
  942. return false;
  943. }
  944. }
  945. else
  946. {
  947. Assert(instr->m_opcode != Js::OpCode::CheckFixedFld);
  948. *pBailOutKind = IR::BailOutInvalid;
  949. return false;
  950. }
  951. }
  952. bool
  953. GlobOpt::MayNeedBailOnImplicitCall(const IR::Instr * instr, Value *src1Val, Value *src2Val)
  954. {
  955. if (!instr->HasAnyImplicitCalls())
  956. {
  957. return false;
  958. }
  959. bool isLdElem = false;
  960. switch (instr->m_opcode)
  961. {
  962. case Js::OpCode::LdLen_A:
  963. {
  964. const ValueType baseValueType(instr->GetSrc1()->GetValueType());
  965. return
  966. !(
  967. baseValueType.IsString() ||
  968. baseValueType.IsAnyArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray ||
  969. instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutOnIrregularLength // guarantees no implicit calls
  970. );
  971. }
  972. case Js::OpCode::LdElemI_A:
  973. case Js::OpCode::LdMethodElem:
  974. case Js::OpCode::InlineArrayPop:
  975. isLdElem = true;
  976. // fall-through
  977. case Js::OpCode::StElemI_A:
  978. case Js::OpCode::StElemI_A_Strict:
  979. case Js::OpCode::InlineArrayPush:
  980. {
  981. if(!instr->HasBailOutInfo())
  982. {
  983. return true;
  984. }
  985. // The following bailout kinds already prevent implicit calls from happening. Any conditions that could trigger an
  986. // implicit call result in a pre-op bailout.
  987. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  988. return
  989. !(
  990. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutConventionalTypedArrayAccessOnly ||
  991. bailOutKind & IR::BailOutOnArrayAccessHelperCall ||
  992. isLdElem && bailOutKind & IR::BailOutConventionalNativeArrayAccessOnly
  993. );
  994. }
  995. default:
  996. break;
  997. }
  998. IR::Opnd * opnd = instr->GetSrc1();
  999. bool callsToPrimitive = OpCodeAttr::CallsValueOf(instr->m_opcode);
  1000. if (opnd != nullptr && MayNeedBailOnImplicitCall(opnd, src1Val, callsToPrimitive))
  1001. {
  1002. return true;
  1003. }
  1004. opnd = instr->GetSrc2();
  1005. if (opnd != nullptr && MayNeedBailOnImplicitCall(opnd, src2Val, callsToPrimitive))
  1006. {
  1007. return true;
  1008. }
  1009. opnd = instr->GetDst();
  1010. if (opnd)
  1011. {
  1012. switch (opnd->GetKind())
  1013. {
  1014. case IR::OpndKindReg:
  1015. return false;
  1016. case IR::OpndKindSym:
  1017. // No implicit call if we are just storing to a stack sym. Note that stores to non-configurable root
  1018. // object fields may still need implicit call bailout. That's because a non-configurable field may still
  1019. // become read-only and thus the store field will not take place (or throw in strict mode). Hence, we
  1020. // can't optimize (e.g. copy prop) across such field stores.
  1021. if (opnd->AsSymOpnd()->m_sym->IsStackSym())
  1022. {
  1023. return false;
  1024. }
  1025. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  1026. {
  1027. IR::PropertySymOpnd* propertySymOpnd = opnd->AsSymOpnd()->AsPropertySymOpnd();
  1028. if (!propertySymOpnd->MayHaveImplicitCall())
  1029. {
  1030. return false;
  1031. }
  1032. }
  1033. return true;
  1034. case IR::OpndKindIndir:
  1035. return true;
  1036. default:
  1037. Assume(UNREACHED);
  1038. }
  1039. }
  1040. return false;
  1041. }
  1042. void
  1043. GlobOpt::GenerateBailAfterOperation(IR::Instr * *const pInstr, IR::BailOutKind kind)
  1044. {
  1045. Assert(pInstr);
  1046. IR::Instr* instr = *pInstr;
  1047. Assert(instr);
  1048. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  1049. uint32 currentOffset = instr->GetByteCodeOffset();
  1050. while (nextInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1051. nextInstr->GetByteCodeOffset() == currentOffset)
  1052. {
  1053. nextInstr = nextInstr->GetNextRealInstrOrLabel();
  1054. }
  1055. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(nextInstr, kind);
  1056. if (this->currentBlock->GetLastInstr() == instr)
  1057. {
  1058. this->currentBlock->SetLastInstr(bailOutInstr);
  1059. }
  1060. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  1061. *pInstr = bailOutInstr;
  1062. }
  1063. void
  1064. GlobOpt::GenerateBailAtOperation(IR::Instr * *const pInstr, const IR::BailOutKind bailOutKind)
  1065. {
  1066. Assert(pInstr);
  1067. IR::Instr * instr = *pInstr;
  1068. Assert(instr);
  1069. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset);
  1070. Assert(bailOutKind != IR::BailOutInvalid);
  1071. IR::Instr * bailOutInstr = instr->ConvertToBailOutInstr(instr, bailOutKind);
  1072. if (this->currentBlock->GetLastInstr() == instr)
  1073. {
  1074. this->currentBlock->SetLastInstr(bailOutInstr);
  1075. }
  1076. FillBailOutInfo(currentBlock, bailOutInstr->GetBailOutInfo());
  1077. *pInstr = bailOutInstr;
  1078. }
  1079. IR::Instr *
  1080. GlobOpt::EnsureBailTarget(Loop * loop)
  1081. {
  1082. BailOutInfo * bailOutInfo = loop->bailOutInfo;
  1083. IR::Instr * bailOutInstr = bailOutInfo->bailOutInstr;
  1084. if (bailOutInstr == nullptr)
  1085. {
  1086. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailTarget, IR::BailOutShared, bailOutInfo, bailOutInfo->bailOutFunc);
  1087. loop->landingPad->InsertAfter(bailOutInstr);
  1088. }
  1089. return bailOutInstr;
  1090. }