GlobOptArrays.cpp 77 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7. #define TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  8. if(PHASE_TESTTRACE(phase, this->func)) \
  9. { \
  10. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  11. Output::Print( \
  12. _u("Testtrace: %s function %s (%s): "), \
  13. Js::PhaseNames[phase], \
  14. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  15. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  16. Output::Print(__VA_ARGS__); \
  17. Output::Flush(); \
  18. }
  19. #else
  20. #define TESTTRACE_PHASE_INSTR(phase, instr, ...)
  21. #endif
  22. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  23. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  24. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  25. TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  26. #else
  27. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  28. #endif
  29. GlobOpt::ArraySrcOpt::~ArraySrcOpt()
  30. {
  31. if (originalIndexOpnd != nullptr)
  32. {
  33. Assert(instr->m_opcode == Js::OpCode::IsIn);
  34. instr->ReplaceSrc1(originalIndexOpnd);
  35. }
  36. }
  37. bool GlobOpt::ArraySrcOpt::CheckOpCode()
  38. {
  39. switch (instr->m_opcode)
  40. {
  41. // SIMD_JS
  42. case Js::OpCode::Simd128_LdArr_F4:
  43. case Js::OpCode::Simd128_LdArr_I4:
  44. // no type-spec for Asm.js
  45. if (globOpt->GetIsAsmJSFunc())
  46. {
  47. return false;
  48. }
  49. // fall through
  50. case Js::OpCode::LdElemI_A:
  51. case Js::OpCode::LdMethodElem:
  52. if (!instr->GetSrc1()->IsIndirOpnd())
  53. {
  54. return false;
  55. }
  56. baseOwnerIndir = instr->GetSrc1()->AsIndirOpnd();
  57. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  58. // LdMethodElem is currently not profiled
  59. isProfilableLdElem = instr->m_opcode != Js::OpCode::LdMethodElem;
  60. needsBoundChecks = true;
  61. needsHeadSegmentLength = true;
  62. needsHeadSegment = true;
  63. isLoad = true;
  64. break;
  65. // SIMD_JS
  66. case Js::OpCode::Simd128_StArr_F4:
  67. case Js::OpCode::Simd128_StArr_I4:
  68. // no type-spec for Asm.js
  69. if (globOpt->GetIsAsmJSFunc())
  70. {
  71. return false;
  72. }
  73. // fall through
  74. case Js::OpCode::StElemI_A:
  75. case Js::OpCode::StElemI_A_Strict:
  76. case Js::OpCode::StElemC:
  77. if (!instr->GetDst()->IsIndirOpnd())
  78. {
  79. return false;
  80. }
  81. baseOwnerIndir = instr->GetDst()->AsIndirOpnd();
  82. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  83. isProfilableStElem = instr->m_opcode != Js::OpCode::StElemC;
  84. needsBoundChecks = isProfilableStElem;
  85. needsHeadSegmentLength = true;
  86. needsHeadSegment = true;
  87. isStore = true;
  88. break;
  89. case Js::OpCode::InlineArrayPush:
  90. case Js::OpCode::InlineArrayPop:
  91. {
  92. IR::Opnd * thisOpnd = instr->GetSrc1();
  93. // Abort if it not a LikelyArray or Object with Array - No point in doing array check elimination.
  94. if (!thisOpnd->IsRegOpnd() || !thisOpnd->GetValueType().IsLikelyArrayOrObjectWithArray())
  95. {
  96. return false;
  97. }
  98. baseOwnerInstr = instr;
  99. baseOpnd = thisOpnd->AsRegOpnd();
  100. isLoad = instr->m_opcode == Js::OpCode::InlineArrayPop;
  101. isStore = instr->m_opcode == Js::OpCode::InlineArrayPush;
  102. needsLength = true;
  103. needsHeadSegmentLength = true;
  104. needsHeadSegment = true;
  105. break;
  106. }
  107. case Js::OpCode::LdLen_A:
  108. if (!instr->GetSrc1()->IsRegOpnd())
  109. {
  110. return false;
  111. }
  112. baseOpnd = instr->GetSrc1()->AsRegOpnd();
  113. if (baseOpnd->GetValueType().IsLikelyObject() && baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  114. {
  115. return false;
  116. }
  117. baseOwnerInstr = instr;
  118. needsLength = true;
  119. break;
  120. case Js::OpCode::IsIn:
  121. if (!globOpt->DoArrayMissingValueCheckHoist())
  122. {
  123. return false;
  124. }
  125. if (instr->GetSrc1()->IsAddrOpnd())
  126. {
  127. const Js::Var val = instr->GetSrc1()->AsAddrOpnd()->m_address;
  128. if (Js::TaggedInt::Is(val))
  129. {
  130. originalIndexOpnd = instr->UnlinkSrc1();
  131. instr->SetSrc1(IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(val), TyInt32, instr->m_func));
  132. }
  133. }
  134. if (!instr->GetSrc1()->IsRegOpnd() && !instr->GetSrc1()->IsIntConstOpnd())
  135. {
  136. return false;
  137. }
  138. if (!instr->GetSrc2()->IsRegOpnd())
  139. {
  140. return false;
  141. }
  142. baseOpnd = instr->GetSrc2()->AsRegOpnd();
  143. if (baseOpnd->GetValueType().IsLikelyObject() && baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  144. {
  145. return false;
  146. }
  147. if (!baseOpnd->GetValueType().IsLikelyAnyArray() || (baseOpnd->GetValueType().IsLikelyArrayOrObjectWithArray() && !baseOpnd->GetValueType().HasNoMissingValues()))
  148. {
  149. return false;
  150. }
  151. baseOwnerInstr = instr;
  152. needsBoundChecks = true;
  153. needsHeadSegmentLength = true;
  154. needsHeadSegment = true;
  155. break;
  156. default:
  157. return false;
  158. }
  159. return true;
  160. }
  161. void GlobOpt::ArraySrcOpt::TypeSpecIndex()
  162. {
  163. // Since this happens before type specialization, make sure that any necessary conversions are done, and that the index is int-specialized if possible such that the const flags are correct.
  164. if (!globOpt->IsLoopPrePass())
  165. {
  166. if (baseOwnerIndir)
  167. {
  168. globOpt->ToVarUses(instr, baseOwnerIndir, baseOwnerIndir == instr->GetDst(), nullptr);
  169. }
  170. else if (instr->m_opcode == Js::OpCode::IsIn && instr->GetSrc1()->IsRegOpnd())
  171. {
  172. // If the optimization is unable to eliminate the bounds checks, we need to restore the original var sym.
  173. Assert(originalIndexOpnd == nullptr);
  174. originalIndexOpnd = instr->GetSrc1()->Copy(func);
  175. globOpt->ToTypeSpecIndex(instr, instr->GetSrc1()->AsRegOpnd(), nullptr);
  176. }
  177. }
  178. if (baseOwnerIndir != nullptr)
  179. {
  180. indexOpnd = baseOwnerIndir->GetIndexOpnd();
  181. }
  182. else if (instr->m_opcode == Js::OpCode::IsIn)
  183. {
  184. indexOpnd = instr->GetSrc1();
  185. }
  186. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  187. {
  188. IR::RegOpnd * regOpnd = indexOpnd->AsRegOpnd();
  189. if (regOpnd->m_sym->IsTypeSpec())
  190. {
  191. Assert(regOpnd->m_sym->IsInt32());
  192. indexVarSym = regOpnd->m_sym->GetVarEquivSym(nullptr);
  193. }
  194. else
  195. {
  196. indexVarSym = regOpnd->m_sym;
  197. }
  198. indexValue = globOpt->CurrentBlockData()->FindValue(indexVarSym);
  199. }
  200. }
  201. void GlobOpt::ArraySrcOpt::UpdateValue(StackSym * newHeadSegmentSym, StackSym * newHeadSegmentLengthSym, StackSym * newLengthSym)
  202. {
  203. Assert(baseValueType.GetObjectType() == newBaseValueType.GetObjectType());
  204. Assert(newBaseValueType.IsObject());
  205. Assert(baseValueType.IsLikelyArray() || !newLengthSym);
  206. if (!(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym))
  207. {
  208. // We're not adding new information to the value other than changing the value type. Preserve any existing
  209. // information and just change the value type.
  210. globOpt->ChangeValueType(globOpt->currentBlock, baseValue, newBaseValueType, true);
  211. return;
  212. }
  213. // Merge the new syms into the value while preserving any existing information, and change the value type
  214. if (baseArrayValueInfo)
  215. {
  216. if (!newHeadSegmentSym)
  217. {
  218. newHeadSegmentSym = baseArrayValueInfo->HeadSegmentSym();
  219. }
  220. if (!newHeadSegmentLengthSym)
  221. {
  222. newHeadSegmentLengthSym = baseArrayValueInfo->HeadSegmentLengthSym();
  223. }
  224. if (!newLengthSym)
  225. {
  226. newLengthSym = baseArrayValueInfo->LengthSym();
  227. }
  228. Assert(!baseArrayValueInfo->HeadSegmentSym() || newHeadSegmentSym == baseArrayValueInfo->HeadSegmentSym());
  229. Assert(!baseArrayValueInfo->HeadSegmentLengthSym() || newHeadSegmentLengthSym == baseArrayValueInfo->HeadSegmentLengthSym());
  230. Assert(!baseArrayValueInfo->LengthSym() || newLengthSym == baseArrayValueInfo->LengthSym());
  231. }
  232. ArrayValueInfo *const newBaseArrayValueInfo =
  233. ArrayValueInfo::New(
  234. globOpt->alloc,
  235. newBaseValueType,
  236. newHeadSegmentSym,
  237. newHeadSegmentLengthSym,
  238. newLengthSym,
  239. baseValueInfo->GetSymStore());
  240. globOpt->ChangeValueInfo(globOpt->currentBlock, baseValue, newBaseArrayValueInfo);
  241. };
  242. void GlobOpt::ArraySrcOpt::CheckVirtualArrayBounds()
  243. {
  244. #if ENABLE_FAST_ARRAYBUFFER
  245. if (baseValueType.IsLikelyOptimizedVirtualTypedArray() && !Js::IsSimd128LoadStore(instr->m_opcode) /*Always extract bounds for SIMD */)
  246. {
  247. if (isProfilableStElem ||
  248. !instr->IsDstNotAlwaysConvertedToInt32() ||
  249. ((baseValueType.GetObjectType() == ObjectType::Float32VirtualArray ||
  250. baseValueType.GetObjectType() == ObjectType::Float64VirtualArray) &&
  251. !instr->IsDstNotAlwaysConvertedToNumber()
  252. )
  253. )
  254. {
  255. // Unless we're in asm.js (where it is guaranteed that virtual typed array accesses cannot read/write beyond 4GB),
  256. // check the range of the index to make sure we won't access beyond the reserved memory beforing eliminating bounds
  257. // checks in jitted code.
  258. if (!globOpt->GetIsAsmJSFunc() && baseOwnerIndir)
  259. {
  260. if (indexOpnd)
  261. {
  262. IntConstantBounds idxConstantBounds;
  263. if (indexValue && indexValue->GetValueInfo()->TryGetIntConstantBounds(&idxConstantBounds))
  264. {
  265. BYTE indirScale = Lowerer::GetArrayIndirScale(baseValueType);
  266. int32 upperBound = idxConstantBounds.UpperBound();
  267. int32 lowerBound = idxConstantBounds.LowerBound();
  268. if (lowerBound >= 0 && ((static_cast<uint64>(upperBound) << indirScale) < MAX_ASMJS_ARRAYBUFFER_LENGTH))
  269. {
  270. eliminatedLowerBoundCheck = true;
  271. eliminatedUpperBoundCheck = true;
  272. canBailOutOnArrayAccessHelperCall = false;
  273. }
  274. }
  275. }
  276. }
  277. else
  278. {
  279. if (baseOwnerIndir == nullptr)
  280. {
  281. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush ||
  282. instr->m_opcode == Js::OpCode::InlineArrayPop ||
  283. instr->m_opcode == Js::OpCode::LdLen_A ||
  284. instr->m_opcode == Js::OpCode::IsIn);
  285. }
  286. eliminatedLowerBoundCheck = true;
  287. eliminatedUpperBoundCheck = true;
  288. canBailOutOnArrayAccessHelperCall = false;
  289. }
  290. }
  291. }
  292. #endif
  293. }
  294. void GlobOpt::ArraySrcOpt::TryEliminiteBoundsCheck()
  295. {
  296. AnalysisAssert(indexOpnd != nullptr || baseOwnerIndir != nullptr);
  297. Assert(needsHeadSegmentLength);
  298. // Bound checks can be separated from the instruction only if it can bail out instead of making a helper call when a
  299. // bound check fails. And only if it would bail out, can we use a bound check to eliminate redundant bound checks later
  300. // on that path.
  301. doExtractBoundChecks = (headSegmentLengthIsAvailable || doHeadSegmentLengthLoad) && canBailOutOnArrayAccessHelperCall;
  302. // Get the index value
  303. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  304. {
  305. if (indexOpnd->AsRegOpnd()->m_sym->IsTypeSpec())
  306. {
  307. Assert(indexVarSym);
  308. Assert(indexValue);
  309. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  310. Assert(indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  311. Assert(
  312. (indexOpnd->GetType() == TyUint32) ==
  313. ValueInfo::IsGreaterThanOrEqualTo(
  314. indexValue,
  315. indexConstantBounds.LowerBound(),
  316. indexConstantBounds.UpperBound(),
  317. nullptr,
  318. 0,
  319. 0));
  320. if (indexOpnd->GetType() == TyUint32)
  321. {
  322. eliminatedLowerBoundCheck = true;
  323. }
  324. }
  325. else
  326. {
  327. doExtractBoundChecks = false; // Bound check instruction operates only on int-specialized operands
  328. if (!indexValue || !indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds))
  329. {
  330. return;
  331. }
  332. if (ValueInfo::IsGreaterThanOrEqualTo(
  333. indexValue,
  334. indexConstantBounds.LowerBound(),
  335. indexConstantBounds.UpperBound(),
  336. nullptr,
  337. 0,
  338. 0))
  339. {
  340. eliminatedLowerBoundCheck = true;
  341. }
  342. }
  343. if (!eliminatedLowerBoundCheck &&
  344. ValueInfo::IsLessThan(
  345. indexValue,
  346. indexConstantBounds.LowerBound(),
  347. indexConstantBounds.UpperBound(),
  348. nullptr,
  349. 0,
  350. 0))
  351. {
  352. eliminatedUpperBoundCheck = true;
  353. doExtractBoundChecks = false;
  354. return;
  355. }
  356. }
  357. else
  358. {
  359. const int32 indexConstantValue = indexOpnd ? indexOpnd->AsIntConstOpnd()->AsInt32() : baseOwnerIndir->GetOffset();
  360. if (indexConstantValue < 0)
  361. {
  362. eliminatedUpperBoundCheck = true;
  363. doExtractBoundChecks = false;
  364. return;
  365. }
  366. if (indexConstantValue == INT32_MAX)
  367. {
  368. eliminatedLowerBoundCheck = true;
  369. doExtractBoundChecks = false;
  370. return;
  371. }
  372. indexConstantBounds = IntConstantBounds(indexConstantValue, indexConstantValue);
  373. eliminatedLowerBoundCheck = true;
  374. }
  375. if (!headSegmentLengthIsAvailable)
  376. {
  377. return;
  378. }
  379. headSegmentLengthValue = globOpt->CurrentBlockData()->FindValue(baseArrayValueInfo->HeadSegmentLengthSym());
  380. if (!headSegmentLengthValue)
  381. {
  382. if (doExtractBoundChecks)
  383. {
  384. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  385. }
  386. return;
  387. }
  388. AssertVerify(headSegmentLengthValue->GetValueInfo()->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  389. if (ValueInfo::IsLessThanOrEqualTo(
  390. indexValue,
  391. indexConstantBounds.LowerBound(),
  392. indexConstantBounds.UpperBound(),
  393. headSegmentLengthValue,
  394. headSegmentLengthConstantBounds.LowerBound(),
  395. headSegmentLengthConstantBounds.UpperBound(),
  396. -1
  397. ))
  398. {
  399. eliminatedUpperBoundCheck = true;
  400. if (eliminatedLowerBoundCheck)
  401. {
  402. doExtractBoundChecks = false;
  403. }
  404. }
  405. }
  406. void GlobOpt::ArraySrcOpt::CheckLoops()
  407. {
  408. if (!doArrayChecks && !doHeadSegmentLoad && !doHeadSegmentLengthLoad && !doLengthLoad)
  409. {
  410. return;
  411. }
  412. // Find the loops out of which array checks and head segment loads need to be hoisted
  413. for (Loop *loop = globOpt->currentBlock->loop; loop; loop = loop->parent)
  414. {
  415. const JsArrayKills loopKills(loop->jsArrayKills);
  416. Value *baseValueInLoopLandingPad = nullptr;
  417. if (((isLikelyJsArray || isLikelyVirtualTypedArray) && loopKills.KillsValueType(newBaseValueType)) ||
  418. !globOpt->OptIsInvariant(baseOpnd->m_sym, globOpt->currentBlock, loop, baseValue, true, true, &baseValueInLoopLandingPad) ||
  419. !(doArrayChecks || baseValueInLoopLandingPad->GetValueInfo()->IsObject()))
  420. {
  421. break;
  422. }
  423. // The value types should be the same, except:
  424. // - The value type in the landing pad is a type that can merge to a specific object type. Typically, these
  425. // cases will use BailOnNoProfile, but that can be disabled due to excessive bailouts. Those value types
  426. // merge aggressively to the other side's object type, so the value type may have started off as
  427. // Uninitialized, [Likely]Undefined|Null, [Likely]UninitializedObject, etc., and changed in the loop to an
  428. // array type during a prepass.
  429. // - StElems in the loop can kill the no-missing-values info.
  430. // - The native array type may be made more conservative based on profile data by an instruction in the loop.
  431. #if DBG
  432. if (!baseValueInLoopLandingPad->GetValueInfo()->CanMergeToSpecificObjectType())
  433. {
  434. ValueType landingPadValueType = baseValueInLoopLandingPad->GetValueInfo()->Type();
  435. Assert(landingPadValueType.IsSimilar(baseValueType)
  436. || (landingPadValueType.IsLikelyNativeArray() && landingPadValueType.Merge(baseValueType).IsSimilar(baseValueType))
  437. || (baseValueType.IsLikelyNativeArray() && baseValueType.Merge(landingPadValueType).IsSimilar(landingPadValueType))
  438. );
  439. }
  440. #endif
  441. if (doArrayChecks)
  442. {
  443. hoistChecksOutOfLoop = loop;
  444. // If BailOnNotObject isn't hoisted, the value may still be tagged in the landing pad
  445. if (baseValueInLoopLandingPad->GetValueInfo()->Type().CanBeTaggedValue())
  446. {
  447. baseValueType = baseValueType.SetCanBeTaggedValue(true);
  448. baseOpnd->SetValueType(baseValueType);
  449. }
  450. }
  451. if (isLikelyJsArray && loopKills.KillsArrayHeadSegments())
  452. {
  453. Assert(loopKills.KillsArrayHeadSegmentLengths());
  454. if (!(doArrayChecks || doLengthLoad))
  455. {
  456. break;
  457. }
  458. }
  459. else
  460. {
  461. if (doHeadSegmentLoad || headSegmentIsAvailable)
  462. {
  463. // If the head segment is already available, we may need to rehoist the value including other
  464. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  465. // the head segment length is not being loaded here.
  466. hoistHeadSegmentLoadOutOfLoop = loop;
  467. }
  468. if (isLikelyJsArray
  469. ? loopKills.KillsArrayHeadSegmentLengths()
  470. : loopKills.KillsTypedArrayHeadSegmentLengths())
  471. {
  472. if (!(doArrayChecks || doHeadSegmentLoad || doLengthLoad))
  473. {
  474. break;
  475. }
  476. }
  477. else if (doHeadSegmentLengthLoad || headSegmentLengthIsAvailable)
  478. {
  479. // If the head segment length is already available, we may need to rehoist the value including other
  480. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  481. // the head segment length is not being loaded here.
  482. hoistHeadSegmentLengthLoadOutOfLoop = loop;
  483. }
  484. }
  485. if (isLikelyJsArray && loopKills.KillsArrayLengths())
  486. {
  487. if (!(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad))
  488. {
  489. break;
  490. }
  491. }
  492. else if (doLengthLoad || lengthIsAvailable)
  493. {
  494. // If the length is already available, we may need to rehoist the value including other information. So,
  495. // need to track the loop out of which the head segment length can be hoisted even if the length is not
  496. // being loaded here.
  497. hoistLengthLoadOutOfLoop = loop;
  498. }
  499. }
  500. }
  501. void GlobOpt::ArraySrcOpt::DoArrayChecks()
  502. {
  503. TRACE_TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Separating array checks with bailout\n"));
  504. IR::Instr *bailOnNotArray = IR::Instr::New(Js::OpCode::BailOnNotArray, instr->m_func);
  505. bailOnNotArray->SetSrc1(baseOpnd);
  506. bailOnNotArray->GetSrc1()->SetIsJITOptimizedReg(true);
  507. const IR::BailOutKind bailOutKind = newBaseValueType.IsLikelyNativeArray() ? IR::BailOutOnNotNativeArray : IR::BailOutOnNotArray;
  508. if (hoistChecksOutOfLoop)
  509. {
  510. Assert(!(isLikelyJsArray && hoistChecksOutOfLoop->jsArrayKills.KillsValueType(newBaseValueType)));
  511. TRACE_PHASE_INSTR(
  512. Js::ArrayCheckHoistPhase,
  513. instr,
  514. _u("Hoisting array checks with bailout out of loop %u to landing pad block %u\n"),
  515. hoistChecksOutOfLoop->GetLoopNumber(),
  516. hoistChecksOutOfLoop->landingPad->GetBlockNum());
  517. TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Hoisting array checks with bailout out of loop\n"));
  518. Assert(hoistChecksOutOfLoop->bailOutInfo);
  519. globOpt->EnsureBailTarget(hoistChecksOutOfLoop);
  520. InsertInstrInLandingPad(bailOnNotArray, hoistChecksOutOfLoop);
  521. bailOnNotArray = bailOnNotArray->ConvertToBailOutInstr(hoistChecksOutOfLoop->bailOutInfo, bailOutKind);
  522. }
  523. else
  524. {
  525. bailOnNotArray->SetByteCodeOffset(instr);
  526. insertBeforeInstr->InsertBefore(bailOnNotArray);
  527. globOpt->GenerateBailAtOperation(&bailOnNotArray, bailOutKind);
  528. shareableBailOutInfo = bailOnNotArray->GetBailOutInfo();
  529. shareableBailOutInfoOriginalOwner = bailOnNotArray;
  530. }
  531. baseValueType = newBaseValueType;
  532. baseOpnd->SetValueType(newBaseValueType);
  533. }
  534. void GlobOpt::ArraySrcOpt::DoLengthLoad()
  535. {
  536. Assert(baseValueType.IsArray());
  537. Assert(newLengthSym);
  538. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Separating array length load\n"));
  539. // Create an initial value for the length
  540. globOpt->CurrentBlockData()->liveVarSyms->Set(newLengthSym->m_id);
  541. Value *const lengthValue = globOpt->NewIntRangeValue(0, INT32_MAX, false);
  542. globOpt->CurrentBlockData()->SetValue(lengthValue, newLengthSym);
  543. // SetValue above would have set the sym store to newLengthSym. This sym won't be used for copy-prop though, so
  544. // remove it as the sym store.
  545. globOpt->SetSymStoreDirect(lengthValue->GetValueInfo(), nullptr);
  546. // length = [array + offsetOf(length)]
  547. IR::Instr *const loadLength =
  548. IR::Instr::New(
  549. Js::OpCode::LdIndir,
  550. IR::RegOpnd::New(newLengthSym, newLengthSym->GetType(), instr->m_func),
  551. IR::IndirOpnd::New(
  552. baseOpnd,
  553. Js::JavascriptArray::GetOffsetOfLength(),
  554. newLengthSym->GetType(),
  555. instr->m_func),
  556. instr->m_func);
  557. loadLength->GetDst()->SetIsJITOptimizedReg(true);
  558. loadLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  559. // BailOnNegative length (BailOutOnIrregularLength)
  560. IR::Instr *bailOnIrregularLength = IR::Instr::New(Js::OpCode::BailOnNegative, instr->m_func);
  561. bailOnIrregularLength->SetSrc1(loadLength->GetDst());
  562. const IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  563. if (hoistLengthLoadOutOfLoop)
  564. {
  565. Assert(!hoistLengthLoadOutOfLoop->jsArrayKills.KillsArrayLengths());
  566. TRACE_PHASE_INSTR(
  567. Js::Phase::ArrayLengthHoistPhase,
  568. instr,
  569. _u("Hoisting array length load out of loop %u to landing pad block %u\n"),
  570. hoistLengthLoadOutOfLoop->GetLoopNumber(),
  571. hoistLengthLoadOutOfLoop->landingPad->GetBlockNum());
  572. TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Hoisting array length load out of loop\n"));
  573. Assert(hoistLengthLoadOutOfLoop->bailOutInfo);
  574. globOpt->EnsureBailTarget(hoistLengthLoadOutOfLoop);
  575. InsertInstrInLandingPad(loadLength, hoistLengthLoadOutOfLoop);
  576. InsertInstrInLandingPad(bailOnIrregularLength, hoistLengthLoadOutOfLoop);
  577. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(hoistLengthLoadOutOfLoop->bailOutInfo, bailOutKind);
  578. // Hoist the length value
  579. for (InvariantBlockBackwardIterator it(
  580. globOpt,
  581. globOpt->currentBlock,
  582. hoistLengthLoadOutOfLoop->landingPad,
  583. baseOpnd->m_sym,
  584. baseValue->GetValueNumber());
  585. it.IsValid();
  586. it.MoveNext())
  587. {
  588. BasicBlock *const block = it.Block();
  589. block->globOptData.liveVarSyms->Set(newLengthSym->m_id);
  590. Assert(!block->globOptData.FindValue(newLengthSym));
  591. Value *const lengthValueCopy = globOpt->CopyValue(lengthValue, lengthValue->GetValueNumber());
  592. block->globOptData.SetValue(lengthValueCopy, newLengthSym);
  593. globOpt->SetSymStoreDirect(lengthValueCopy->GetValueInfo(), nullptr);
  594. }
  595. }
  596. else
  597. {
  598. loadLength->SetByteCodeOffset(instr);
  599. insertBeforeInstr->InsertBefore(loadLength);
  600. bailOnIrregularLength->SetByteCodeOffset(instr);
  601. insertBeforeInstr->InsertBefore(bailOnIrregularLength);
  602. if (shareableBailOutInfo)
  603. {
  604. ShareBailOut();
  605. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(shareableBailOutInfo, bailOutKind);
  606. }
  607. else
  608. {
  609. globOpt->GenerateBailAtOperation(&bailOnIrregularLength, bailOutKind);
  610. shareableBailOutInfo = bailOnIrregularLength->GetBailOutInfo();
  611. shareableBailOutInfoOriginalOwner = bailOnIrregularLength;
  612. }
  613. }
  614. }
  615. void GlobOpt::ArraySrcOpt::DoHeadSegmentLengthLoad()
  616. {
  617. Assert(!isLikelyJsArray || newHeadSegmentSym || baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym());
  618. Assert(newHeadSegmentLengthSym);
  619. Assert(!headSegmentLengthValue);
  620. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment length load\n"));
  621. // Create an initial value for the head segment length
  622. globOpt->CurrentBlockData()->liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  623. headSegmentLengthValue = globOpt->NewIntRangeValue(0, Js::SparseArraySegmentBase::MaxLength, false);
  624. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  625. globOpt->CurrentBlockData()->SetValue(headSegmentLengthValue, newHeadSegmentLengthSym);
  626. // SetValue above would have set the sym store to newHeadSegmentLengthSym. This sym won't be used for copy-prop
  627. // though, so remove it as the sym store.
  628. globOpt->SetSymStoreDirect(headSegmentLengthValue->GetValueInfo(), nullptr);
  629. StackSym *const headSegmentSym = isLikelyJsArray ? newHeadSegmentSym ? newHeadSegmentSym : baseArrayValueInfo->HeadSegmentSym() : nullptr;
  630. IR::Instr *const loadHeadSegmentLength =
  631. IR::Instr::New(
  632. Js::OpCode::LdIndir,
  633. IR::RegOpnd::New(newHeadSegmentLengthSym, newHeadSegmentLengthSym->GetType(), instr->m_func),
  634. IR::IndirOpnd::New(
  635. isLikelyJsArray ? IR::RegOpnd::New(headSegmentSym, headSegmentSym->GetType(), instr->m_func) : baseOpnd,
  636. isLikelyJsArray
  637. ? Js::SparseArraySegmentBase::GetOffsetOfLength()
  638. : Lowerer::GetArrayOffsetOfLength(baseValueType),
  639. newHeadSegmentLengthSym->GetType(),
  640. instr->m_func),
  641. instr->m_func);
  642. loadHeadSegmentLength->GetDst()->SetIsJITOptimizedReg(true);
  643. loadHeadSegmentLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  644. // We don't check the head segment length for negative (very large uint32) values. For JS arrays, the bound checks
  645. // cover that. For typed arrays, we currently don't allocate array buffers with more than 1 GB elements.
  646. if (hoistHeadSegmentLengthLoadOutOfLoop)
  647. {
  648. Assert(
  649. !(
  650. isLikelyJsArray
  651. ? hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegmentLengths()
  652. : hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  653. ));
  654. TRACE_PHASE_INSTR(
  655. Js::ArraySegmentHoistPhase,
  656. instr,
  657. _u("Hoisting array segment length load out of loop %u to landing pad block %u\n"),
  658. hoistHeadSegmentLengthLoadOutOfLoop->GetLoopNumber(),
  659. hoistHeadSegmentLengthLoadOutOfLoop->landingPad->GetBlockNum());
  660. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment length load out of loop\n"));
  661. InsertInstrInLandingPad(loadHeadSegmentLength, hoistHeadSegmentLengthLoadOutOfLoop);
  662. // Hoist the head segment length value
  663. for (InvariantBlockBackwardIterator it(
  664. globOpt,
  665. globOpt->currentBlock,
  666. hoistHeadSegmentLengthLoadOutOfLoop->landingPad,
  667. baseOpnd->m_sym,
  668. baseValue->GetValueNumber());
  669. it.IsValid();
  670. it.MoveNext())
  671. {
  672. BasicBlock *const block = it.Block();
  673. block->globOptData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  674. Assert(!block->globOptData.FindValue(newHeadSegmentLengthSym));
  675. Value *const headSegmentLengthValueCopy = globOpt->CopyValue(headSegmentLengthValue, headSegmentLengthValue->GetValueNumber());
  676. block->globOptData.SetValue(headSegmentLengthValueCopy, newHeadSegmentLengthSym);
  677. globOpt->SetSymStoreDirect(headSegmentLengthValueCopy->GetValueInfo(), nullptr);
  678. }
  679. }
  680. else
  681. {
  682. loadHeadSegmentLength->SetByteCodeOffset(instr);
  683. insertBeforeInstr->InsertBefore(loadHeadSegmentLength);
  684. instr->loadedArrayHeadSegmentLength = true;
  685. }
  686. }
  687. void GlobOpt::ArraySrcOpt::DoExtractBoundChecks()
  688. {
  689. Assert(!(eliminatedLowerBoundCheck && eliminatedUpperBoundCheck));
  690. Assert(baseOwnerIndir != nullptr || indexOpnd != nullptr);
  691. Assert(indexOpnd == nullptr || indexOpnd->IsIntConstOpnd() || indexOpnd->AsRegOpnd()->m_sym->IsTypeSpec());
  692. Assert(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable);
  693. Assert(canBailOutOnArrayAccessHelperCall);
  694. Assert(!isStore || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || Js::IsSimd128LoadStore(instr->m_opcode));
  695. headSegmentLengthSym = headSegmentLengthIsAvailable ? baseArrayValueInfo->HeadSegmentLengthSym() : newHeadSegmentLengthSym;
  696. Assert(headSegmentLengthSym);
  697. Assert(headSegmentLengthValue);
  698. if (globOpt->DoBoundCheckHoist())
  699. {
  700. if (indexVarSym)
  701. {
  702. TRACE_PHASE_INSTR_VERBOSE(
  703. Js::Phase::BoundCheckHoistPhase,
  704. instr,
  705. _u("Determining array bound check hoistability for index s%u\n"),
  706. indexVarSym->m_id);
  707. }
  708. else
  709. {
  710. TRACE_PHASE_INSTR_VERBOSE(
  711. Js::Phase::BoundCheckHoistPhase,
  712. instr,
  713. _u("Determining array bound check hoistability for index %d\n"),
  714. indexConstantBounds.LowerBound());
  715. }
  716. globOpt->DetermineArrayBoundCheckHoistability(
  717. !eliminatedLowerBoundCheck,
  718. !eliminatedUpperBoundCheck,
  719. lowerBoundCheckHoistInfo,
  720. upperBoundCheckHoistInfo,
  721. isLikelyJsArray,
  722. indexVarSym,
  723. indexValue,
  724. indexConstantBounds,
  725. headSegmentLengthSym,
  726. headSegmentLengthValue,
  727. headSegmentLengthConstantBounds,
  728. hoistHeadSegmentLengthLoadOutOfLoop,
  729. failedToUpdateCompatibleLowerBoundCheck,
  730. failedToUpdateCompatibleUpperBoundCheck);
  731. }
  732. if (!eliminatedLowerBoundCheck)
  733. {
  734. DoLowerBoundCheck();
  735. }
  736. if (!eliminatedUpperBoundCheck)
  737. {
  738. DoUpperBoundCheck();
  739. }
  740. }
  741. void GlobOpt::ArraySrcOpt::DoLowerBoundCheck()
  742. {
  743. eliminatedLowerBoundCheck = true;
  744. Assert(indexVarSym);
  745. Assert(indexOpnd);
  746. Assert(indexValue);
  747. GlobOpt::ArrayLowerBoundCheckHoistInfo &hoistInfo = lowerBoundCheckHoistInfo;
  748. if (hoistInfo.HasAnyInfo())
  749. {
  750. BasicBlock *hoistBlock;
  751. if (hoistInfo.CompatibleBoundCheckBlock())
  752. {
  753. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  754. TRACE_PHASE_INSTR(
  755. Js::Phase::BoundCheckHoistPhase,
  756. instr,
  757. _u("Hoisting array lower bound check into existing bound check instruction in block %u\n"),
  758. hoistBlock->GetBlockNum());
  759. TESTTRACE_PHASE_INSTR(
  760. Js::Phase::BoundCheckHoistPhase,
  761. instr,
  762. _u("Hoisting array lower bound check into existing bound check instruction\n"));
  763. }
  764. else
  765. {
  766. Assert(hoistInfo.Loop());
  767. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  768. hoistBlock = landingPad;
  769. StackSym *indexIntSym;
  770. if (hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  771. {
  772. if (!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  773. {
  774. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize
  775. // it in this block if it is invariant, as the conversion will be hoisted along with value
  776. // updates.
  777. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  778. IR::Instr *specializeBeforeInstr = nullptr;
  779. if (!globOpt->CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  780. globOpt->OptIsInvariant(
  781. hoistInfo.IndexSym(),
  782. globOpt->currentBlock,
  783. hoistInfo.Loop(),
  784. globOpt->CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  785. false,
  786. true))
  787. {
  788. specializationBlock = globOpt->currentBlock;
  789. specializeBeforeInstr = insertBeforeInstr;
  790. }
  791. Assert(globOpt->tempBv->IsEmpty());
  792. globOpt->tempBv->Set(hoistInfo.IndexSym()->m_id);
  793. globOpt->ToInt32(globOpt->tempBv, specializationBlock, false, specializeBeforeInstr);
  794. globOpt->tempBv->ClearAll();
  795. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  796. }
  797. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  798. Assert(indexIntSym);
  799. }
  800. else
  801. {
  802. indexIntSym = hoistInfo.IndexSym();
  803. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  804. }
  805. if (hoistInfo.IndexSym())
  806. {
  807. Assert(hoistInfo.Loop()->bailOutInfo);
  808. globOpt->EnsureBailTarget(hoistInfo.Loop());
  809. bool needsMagnitudeAdjustment = false;
  810. if (hoistInfo.LoopCount())
  811. {
  812. // Generate the loop count and loop count based bound that will be used for the bound check
  813. if (!hoistInfo.LoopCount()->HasBeenGenerated())
  814. {
  815. globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  816. }
  817. needsMagnitudeAdjustment = (hoistInfo.MaxMagnitudeChange() > 0)
  818. ? (hoistInfo.IndexOffset() < hoistInfo.MaxMagnitudeChange())
  819. : (hoistInfo.IndexOffset() > hoistInfo.MaxMagnitudeChange());
  820. globOpt->GenerateSecondaryInductionVariableBound(
  821. hoistInfo.Loop(),
  822. indexVarSym->GetInt32EquivSym(nullptr),
  823. hoistInfo.LoopCount(),
  824. hoistInfo.MaxMagnitudeChange(),
  825. needsMagnitudeAdjustment,
  826. hoistInfo.IndexSym());
  827. }
  828. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  829. IR::Opnd* upperBound = IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func);
  830. int offset = needsMagnitudeAdjustment ? (hoistInfo.IndexOffset() - hoistInfo.Offset()) : hoistInfo.Offset();
  831. upperBound->SetIsJITOptimizedReg(true);
  832. // 0 <= indexSym + offset (src1 <= src2 + dst)
  833. IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr(
  834. lowerBound,
  835. upperBound,
  836. offset,
  837. hoistInfo.IsLoopCountBasedBound()
  838. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  839. : IR::BailOutOnFailedHoistedBoundCheck,
  840. hoistInfo.Loop()->bailOutInfo,
  841. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  842. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  843. TRACE_PHASE_INSTR(
  844. Js::Phase::BoundCheckHoistPhase,
  845. instr,
  846. _u("Hoisting array lower bound check out of loop %u to landing pad block %u, as (0 <= s%u + %d)\n"),
  847. hoistInfo.Loop()->GetLoopNumber(),
  848. landingPad->GetBlockNum(),
  849. hoistInfo.IndexSym()->m_id,
  850. hoistInfo.Offset());
  851. TESTTRACE_PHASE_INSTR(
  852. Js::Phase::BoundCheckHoistPhase,
  853. instr,
  854. _u("Hoisting array lower bound check out of loop\n"));
  855. // Record the bound check instruction as available
  856. const IntBoundCheck boundCheckInfo(
  857. ZeroValueNumber,
  858. hoistInfo.IndexValueNumber(),
  859. boundCheck,
  860. landingPad);
  861. {
  862. const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  863. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  864. }
  865. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr);
  866. it.IsValid();
  867. it.MoveNext())
  868. {
  869. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  870. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  871. }
  872. }
  873. }
  874. // Update values of the syms involved in the bound check to reflect the bound check
  875. if (hoistBlock != globOpt->currentBlock && hoistInfo.IndexSym() && hoistInfo.Offset() != INT32_MIN)
  876. {
  877. for (InvariantBlockBackwardIterator it(
  878. globOpt,
  879. globOpt->currentBlock->next,
  880. hoistBlock,
  881. hoistInfo.IndexSym(),
  882. hoistInfo.IndexValueNumber(),
  883. true);
  884. it.IsValid();
  885. it.MoveNext())
  886. {
  887. Value *const value = it.InvariantSymValue();
  888. IntConstantBounds constantBounds;
  889. AssertVerify(value->GetValueInfo()->TryGetIntConstantBounds(&constantBounds, true));
  890. ValueInfo *const newValueInfo =
  891. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  892. value,
  893. constantBounds,
  894. nullptr,
  895. IntConstantBounds(-hoistInfo.Offset(), -hoistInfo.Offset()),
  896. false);
  897. if (newValueInfo)
  898. {
  899. globOpt->ChangeValueInfo(nullptr, value, newValueInfo);
  900. if (it.Block() == globOpt->currentBlock && value == indexValue)
  901. {
  902. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  903. }
  904. }
  905. }
  906. }
  907. }
  908. else
  909. {
  910. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  911. IR::Opnd* upperBound = indexOpnd;
  912. upperBound->SetIsJITOptimizedReg(true);
  913. const int offset = 0;
  914. IR::Instr *boundCheck;
  915. if (shareableBailOutInfo)
  916. {
  917. ShareBailOut();
  918. boundCheck = globOpt->CreateBoundsCheckInstr(
  919. lowerBound,
  920. upperBound,
  921. offset,
  922. IR::BailOutOnArrayAccessHelperCall,
  923. shareableBailOutInfo,
  924. shareableBailOutInfo->bailOutFunc);
  925. }
  926. else
  927. {
  928. boundCheck = globOpt->CreateBoundsCheckInstr(
  929. lowerBound,
  930. upperBound,
  931. offset,
  932. instr->m_func);
  933. }
  934. boundCheck->SetByteCodeOffset(instr);
  935. insertBeforeInstr->InsertBefore(boundCheck);
  936. if (!shareableBailOutInfo)
  937. {
  938. globOpt->GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  939. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  940. shareableBailOutInfoOriginalOwner = boundCheck;
  941. }
  942. TRACE_PHASE_INSTR(
  943. Js::Phase::BoundCheckEliminationPhase,
  944. instr,
  945. _u("Separating array lower bound check, as (0 <= s%u)\n"),
  946. indexVarSym->m_id);
  947. TESTTRACE_PHASE_INSTR(
  948. Js::Phase::BoundCheckEliminationPhase,
  949. instr,
  950. _u("Separating array lower bound check\n"));
  951. if (globOpt->DoBoundCheckHoist())
  952. {
  953. // Record the bound check instruction as available
  954. const bool added =
  955. globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(
  956. IntBoundCheck(ZeroValueNumber, indexValue->GetValueNumber(), boundCheck, globOpt->currentBlock)) >= 0;
  957. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  958. }
  959. }
  960. // Update the index value to reflect the bound check
  961. ValueInfo *const newValueInfo =
  962. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  963. indexValue,
  964. indexConstantBounds,
  965. nullptr,
  966. IntConstantBounds(0, 0),
  967. false);
  968. if (newValueInfo)
  969. {
  970. globOpt->ChangeValueInfo(nullptr, indexValue, newValueInfo);
  971. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  972. }
  973. }
  974. void GlobOpt::ArraySrcOpt::DoUpperBoundCheck()
  975. {
  976. eliminatedUpperBoundCheck = true;
  977. GlobOpt::ArrayUpperBoundCheckHoistInfo &hoistInfo = upperBoundCheckHoistInfo;
  978. if (hoistInfo.HasAnyInfo())
  979. {
  980. BasicBlock *hoistBlock;
  981. if (hoistInfo.CompatibleBoundCheckBlock())
  982. {
  983. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  984. TRACE_PHASE_INSTR(
  985. Js::Phase::BoundCheckHoistPhase,
  986. instr,
  987. _u("Hoisting array upper bound check into existing bound check instruction in block %u\n"),
  988. hoistBlock->GetBlockNum());
  989. TESTTRACE_PHASE_INSTR(
  990. Js::Phase::BoundCheckHoistPhase,
  991. instr,
  992. _u("Hoisting array upper bound check into existing bound check instruction\n"));
  993. }
  994. else
  995. {
  996. Assert(hoistInfo.Loop());
  997. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  998. hoistBlock = landingPad;
  999. StackSym *indexIntSym;
  1000. if (hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  1001. {
  1002. if (!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  1003. {
  1004. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize it
  1005. // in this block if it is invariant, as the conversion will be hoisted along with value updates.
  1006. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  1007. IR::Instr *specializeBeforeInstr = nullptr;
  1008. if (!globOpt->CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  1009. globOpt->OptIsInvariant(
  1010. hoistInfo.IndexSym(),
  1011. globOpt->currentBlock,
  1012. hoistInfo.Loop(),
  1013. globOpt->CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  1014. false,
  1015. true))
  1016. {
  1017. specializationBlock = globOpt->currentBlock;
  1018. specializeBeforeInstr = insertBeforeInstr;
  1019. }
  1020. Assert(globOpt->tempBv->IsEmpty());
  1021. globOpt->tempBv->Set(hoistInfo.IndexSym()->m_id);
  1022. globOpt->ToInt32(globOpt->tempBv, specializationBlock, false, specializeBeforeInstr);
  1023. globOpt->tempBv->ClearAll();
  1024. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  1025. }
  1026. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  1027. Assert(indexIntSym);
  1028. }
  1029. else
  1030. {
  1031. indexIntSym = hoistInfo.IndexSym();
  1032. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  1033. }
  1034. Assert(hoistInfo.Loop()->bailOutInfo);
  1035. globOpt->EnsureBailTarget(hoistInfo.Loop());
  1036. bool needsMagnitudeAdjustment = false;
  1037. if (hoistInfo.LoopCount())
  1038. {
  1039. // Generate the loop count and loop count based bound that will be used for the bound check
  1040. if (!hoistInfo.LoopCount()->HasBeenGenerated())
  1041. {
  1042. globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  1043. }
  1044. needsMagnitudeAdjustment = (hoistInfo.MaxMagnitudeChange() > 0)
  1045. ? (hoistInfo.IndexOffset() < hoistInfo.MaxMagnitudeChange())
  1046. : (hoistInfo.IndexOffset() > hoistInfo.MaxMagnitudeChange());
  1047. globOpt->GenerateSecondaryInductionVariableBound(
  1048. hoistInfo.Loop(),
  1049. indexVarSym->GetInt32EquivSym(nullptr),
  1050. hoistInfo.LoopCount(),
  1051. hoistInfo.MaxMagnitudeChange(),
  1052. needsMagnitudeAdjustment,
  1053. hoistInfo.IndexSym());
  1054. }
  1055. IR::Opnd* lowerBound = indexIntSym
  1056. ? static_cast<IR::Opnd *>(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func))
  1057. : IR::IntConstOpnd::New(
  1058. hoistInfo.IndexConstantBounds().LowerBound(),
  1059. TyInt32,
  1060. instr->m_func);
  1061. lowerBound->SetIsJITOptimizedReg(true);
  1062. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  1063. upperBound->SetIsJITOptimizedReg(true);
  1064. int offset = needsMagnitudeAdjustment ? (hoistInfo.IndexOffset() + hoistInfo.Offset()) : hoistInfo.Offset();
  1065. // indexSym <= headSegmentLength + offset (src1 <= src2 + dst)
  1066. IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr(
  1067. lowerBound,
  1068. upperBound,
  1069. offset,
  1070. hoistInfo.IsLoopCountBasedBound()
  1071. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  1072. : IR::BailOutOnFailedHoistedBoundCheck,
  1073. hoistInfo.Loop()->bailOutInfo,
  1074. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  1075. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  1076. if (indexIntSym)
  1077. {
  1078. TRACE_PHASE_INSTR(
  1079. Js::Phase::BoundCheckHoistPhase,
  1080. instr,
  1081. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"),
  1082. hoistInfo.Loop()->GetLoopNumber(),
  1083. landingPad->GetBlockNum(),
  1084. hoistInfo.IndexSym()->m_id,
  1085. headSegmentLengthSym->m_id,
  1086. offset);
  1087. }
  1088. else
  1089. {
  1090. TRACE_PHASE_INSTR(
  1091. Js::Phase::BoundCheckHoistPhase,
  1092. instr,
  1093. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"),
  1094. hoistInfo.Loop()->GetLoopNumber(),
  1095. landingPad->GetBlockNum(),
  1096. hoistInfo.IndexConstantBounds().LowerBound(),
  1097. headSegmentLengthSym->m_id,
  1098. offset);
  1099. }
  1100. TESTTRACE_PHASE_INSTR(
  1101. Js::Phase::BoundCheckHoistPhase,
  1102. instr,
  1103. _u("Hoisting array upper bound check out of loop\n"));
  1104. // Record the bound check instruction as available
  1105. const IntBoundCheck boundCheckInfo(
  1106. hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber,
  1107. hoistInfo.HeadSegmentLengthValue()->GetValueNumber(),
  1108. boundCheck,
  1109. landingPad);
  1110. {
  1111. const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  1112. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1113. }
  1114. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr);
  1115. it.IsValid();
  1116. it.MoveNext())
  1117. {
  1118. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  1119. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1120. }
  1121. }
  1122. // Update values of the syms involved in the bound check to reflect the bound check
  1123. Assert(!hoistInfo.Loop() || hoistBlock != globOpt->currentBlock);
  1124. if (hoistBlock != globOpt->currentBlock)
  1125. {
  1126. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock->next, hoistBlock, nullptr, InvalidValueNumber, true);
  1127. it.IsValid();
  1128. it.MoveNext())
  1129. {
  1130. BasicBlock *const block = it.Block();
  1131. Value *leftValue;
  1132. IntConstantBounds leftConstantBounds;
  1133. if (hoistInfo.IndexSym())
  1134. {
  1135. leftValue = block->globOptData.FindValue(hoistInfo.IndexSym());
  1136. if (!leftValue || leftValue->GetValueNumber() != hoistInfo.IndexValueNumber())
  1137. {
  1138. continue;
  1139. }
  1140. AssertVerify(leftValue->GetValueInfo()->TryGetIntConstantBounds(&leftConstantBounds, true));
  1141. }
  1142. else
  1143. {
  1144. leftValue = nullptr;
  1145. leftConstantBounds = hoistInfo.IndexConstantBounds();
  1146. }
  1147. Value *const rightValue = block->globOptData.FindValue(headSegmentLengthSym);
  1148. if (!rightValue)
  1149. {
  1150. continue;
  1151. }
  1152. Assert(rightValue->GetValueNumber() == headSegmentLengthValue->GetValueNumber());
  1153. IntConstantBounds rightConstantBounds;
  1154. AssertVerify(rightValue->GetValueInfo()->TryGetIntConstantBounds(&rightConstantBounds));
  1155. ValueInfo *const newValueInfoForLessThanOrEqual =
  1156. globOpt->UpdateIntBoundsForLessThanOrEqual(
  1157. leftValue,
  1158. leftConstantBounds,
  1159. rightValue,
  1160. rightConstantBounds,
  1161. hoistInfo.Offset(),
  1162. false);
  1163. if (newValueInfoForLessThanOrEqual)
  1164. {
  1165. globOpt->ChangeValueInfo(nullptr, leftValue, newValueInfoForLessThanOrEqual);
  1166. AssertVerify(newValueInfoForLessThanOrEqual->TryGetIntConstantBounds(&leftConstantBounds, true));
  1167. if (block == globOpt->currentBlock && leftValue == indexValue)
  1168. {
  1169. Assert(newValueInfoForLessThanOrEqual->IsInt());
  1170. indexConstantBounds = leftConstantBounds;
  1171. }
  1172. }
  1173. if (hoistInfo.Offset() != INT32_MIN)
  1174. {
  1175. ValueInfo *const newValueInfoForGreaterThanOrEqual =
  1176. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  1177. rightValue,
  1178. rightConstantBounds,
  1179. leftValue,
  1180. leftConstantBounds,
  1181. -hoistInfo.Offset(),
  1182. false);
  1183. if (newValueInfoForGreaterThanOrEqual)
  1184. {
  1185. globOpt->ChangeValueInfo(nullptr, rightValue, newValueInfoForGreaterThanOrEqual);
  1186. if (block == globOpt->currentBlock)
  1187. {
  1188. Assert(rightValue == headSegmentLengthValue);
  1189. AssertVerify(newValueInfoForGreaterThanOrEqual->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  1190. }
  1191. }
  1192. }
  1193. }
  1194. }
  1195. }
  1196. else
  1197. {
  1198. IR::Opnd * lowerBound = indexOpnd ? indexOpnd : IR::IntConstOpnd::New(baseOwnerIndir->GetOffset(), TyInt32, instr->m_func);
  1199. lowerBound->SetIsJITOptimizedReg(true);
  1200. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  1201. upperBound->SetIsJITOptimizedReg(true);
  1202. const int offset = -1;
  1203. IR::Instr *boundCheck;
  1204. // index <= headSegmentLength - 1 (src1 <= src2 + dst)
  1205. if (shareableBailOutInfo)
  1206. {
  1207. ShareBailOut();
  1208. boundCheck = globOpt->CreateBoundsCheckInstr(
  1209. lowerBound,
  1210. upperBound,
  1211. offset,
  1212. IR::BailOutOnArrayAccessHelperCall,
  1213. shareableBailOutInfo,
  1214. shareableBailOutInfo->bailOutFunc);
  1215. }
  1216. else
  1217. {
  1218. boundCheck = globOpt->CreateBoundsCheckInstr(
  1219. lowerBound,
  1220. upperBound,
  1221. offset,
  1222. instr->m_func);
  1223. }
  1224. boundCheck->SetByteCodeOffset(instr);
  1225. insertBeforeInstr->InsertBefore(boundCheck);
  1226. if (!shareableBailOutInfo)
  1227. {
  1228. globOpt->GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  1229. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  1230. shareableBailOutInfoOriginalOwner = boundCheck;
  1231. }
  1232. instr->extractedUpperBoundCheckWithoutHoisting = true;
  1233. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  1234. {
  1235. TRACE_PHASE_INSTR(
  1236. Js::Phase::BoundCheckEliminationPhase,
  1237. instr,
  1238. _u("Separating array upper bound check, as (s%u < s%u)\n"),
  1239. indexVarSym->m_id,
  1240. headSegmentLengthSym->m_id);
  1241. }
  1242. else
  1243. {
  1244. TRACE_PHASE_INSTR(
  1245. Js::Phase::BoundCheckEliminationPhase,
  1246. instr,
  1247. _u("Separating array upper bound check, as (%d < s%u)\n"),
  1248. indexOpnd ? indexOpnd->AsIntConstOpnd()->AsInt32() : baseOwnerIndir->GetOffset(),
  1249. headSegmentLengthSym->m_id);
  1250. }
  1251. TESTTRACE_PHASE_INSTR(
  1252. Js::Phase::BoundCheckEliminationPhase,
  1253. instr,
  1254. _u("Separating array upper bound check\n"));
  1255. if (globOpt->DoBoundCheckHoist())
  1256. {
  1257. // Record the bound check instruction as available
  1258. const bool added =
  1259. globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(
  1260. IntBoundCheck(
  1261. indexValue ? indexValue->GetValueNumber() : ZeroValueNumber,
  1262. headSegmentLengthValue->GetValueNumber(),
  1263. boundCheck,
  1264. globOpt->currentBlock)) >= 0;
  1265. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1266. }
  1267. }
  1268. // Update the index and head segment length values to reflect the bound check
  1269. ValueInfo *newValueInfo =
  1270. globOpt->UpdateIntBoundsForLessThan(
  1271. indexValue,
  1272. indexConstantBounds,
  1273. headSegmentLengthValue,
  1274. headSegmentLengthConstantBounds,
  1275. false);
  1276. if (newValueInfo)
  1277. {
  1278. globOpt->ChangeValueInfo(nullptr, indexValue, newValueInfo);
  1279. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  1280. }
  1281. newValueInfo =
  1282. globOpt->UpdateIntBoundsForGreaterThan(
  1283. headSegmentLengthValue,
  1284. headSegmentLengthConstantBounds,
  1285. indexValue,
  1286. indexConstantBounds,
  1287. false);
  1288. if (newValueInfo)
  1289. {
  1290. globOpt->ChangeValueInfo(nullptr, headSegmentLengthValue, newValueInfo);
  1291. }
  1292. }
  1293. void GlobOpt::ArraySrcOpt::UpdateHoistedValueInfo()
  1294. {
  1295. // Iterate up to the root loop's landing pad until all necessary value info is updated
  1296. uint hoistItemCount =
  1297. static_cast<uint>(!!hoistChecksOutOfLoop) +
  1298. !!hoistHeadSegmentLoadOutOfLoop +
  1299. !!hoistHeadSegmentLengthLoadOutOfLoop +
  1300. !!hoistLengthLoadOutOfLoop;
  1301. if (hoistItemCount == 0)
  1302. {
  1303. return;
  1304. }
  1305. AnalysisAssert(globOpt->currentBlock->loop != nullptr);
  1306. Loop * rootLoop = nullptr;
  1307. for (Loop *loop = globOpt->currentBlock->loop; loop; loop = loop->parent)
  1308. {
  1309. rootLoop = loop;
  1310. }
  1311. AnalysisAssert(rootLoop != nullptr);
  1312. ValueInfo *valueInfoToHoist = baseValueInfo;
  1313. bool removeHeadSegment, removeHeadSegmentLength, removeLength;
  1314. if (baseArrayValueInfo)
  1315. {
  1316. removeHeadSegment = baseArrayValueInfo->HeadSegmentSym() && !hoistHeadSegmentLoadOutOfLoop;
  1317. removeHeadSegmentLength = baseArrayValueInfo->HeadSegmentLengthSym() && !hoistHeadSegmentLengthLoadOutOfLoop;
  1318. removeLength = baseArrayValueInfo->LengthSym() && !hoistLengthLoadOutOfLoop;
  1319. }
  1320. else
  1321. {
  1322. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  1323. }
  1324. for (InvariantBlockBackwardIterator it(
  1325. globOpt,
  1326. globOpt->currentBlock,
  1327. rootLoop->landingPad,
  1328. baseOpnd->m_sym,
  1329. baseValue->GetValueNumber());
  1330. it.IsValid();
  1331. it.MoveNext())
  1332. {
  1333. if (removeHeadSegment || removeHeadSegmentLength || removeLength)
  1334. {
  1335. // Remove information that shouldn't be there anymore, from the value info
  1336. valueInfoToHoist =
  1337. valueInfoToHoist->AsArrayValueInfo()->Copy(
  1338. globOpt->alloc,
  1339. !removeHeadSegment,
  1340. !removeHeadSegmentLength,
  1341. !removeLength);
  1342. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  1343. }
  1344. BasicBlock *const block = it.Block();
  1345. Value *const blockBaseValue = it.InvariantSymValue();
  1346. globOpt->HoistInvariantValueInfo(valueInfoToHoist, blockBaseValue, block);
  1347. // See if we have completed hoisting value info for one of the items
  1348. if (hoistChecksOutOfLoop && block == hoistChecksOutOfLoop->landingPad)
  1349. {
  1350. // All other items depend on array checks, so we can just stop here
  1351. hoistChecksOutOfLoop = nullptr;
  1352. break;
  1353. }
  1354. if (hoistHeadSegmentLoadOutOfLoop && block == hoistHeadSegmentLoadOutOfLoop->landingPad)
  1355. {
  1356. hoistHeadSegmentLoadOutOfLoop = nullptr;
  1357. if (--hoistItemCount == 0)
  1358. {
  1359. break;
  1360. }
  1361. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentSym())
  1362. {
  1363. removeHeadSegment = true;
  1364. }
  1365. }
  1366. if (hoistHeadSegmentLengthLoadOutOfLoop && block == hoistHeadSegmentLengthLoadOutOfLoop->landingPad)
  1367. {
  1368. hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  1369. if (--hoistItemCount == 0)
  1370. {
  1371. break;
  1372. }
  1373. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentLengthSym())
  1374. {
  1375. removeHeadSegmentLength = true;
  1376. }
  1377. }
  1378. if (hoistLengthLoadOutOfLoop && block == hoistLengthLoadOutOfLoop->landingPad)
  1379. {
  1380. hoistLengthLoadOutOfLoop = nullptr;
  1381. if (--hoistItemCount == 0)
  1382. {
  1383. break;
  1384. }
  1385. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->LengthSym())
  1386. {
  1387. removeLength = true;
  1388. }
  1389. }
  1390. }
  1391. }
  1392. void GlobOpt::ArraySrcOpt::InsertInstrInLandingPad(IR::Instr *const instr, Loop *const hoistOutOfLoop)
  1393. {
  1394. if (hoistOutOfLoop->bailOutInfo->bailOutInstr)
  1395. {
  1396. instr->SetByteCodeOffset(hoistOutOfLoop->bailOutInfo->bailOutInstr);
  1397. hoistOutOfLoop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  1398. }
  1399. else
  1400. {
  1401. instr->SetByteCodeOffset(hoistOutOfLoop->landingPad->GetLastInstr());
  1402. hoistOutOfLoop->landingPad->InsertAfter(instr);
  1403. }
  1404. };
  1405. void GlobOpt::ArraySrcOpt::ShareBailOut()
  1406. {
  1407. Assert(shareableBailOutInfo);
  1408. if (shareableBailOutInfo->bailOutInstr != shareableBailOutInfoOriginalOwner)
  1409. {
  1410. return;
  1411. }
  1412. Assert(shareableBailOutInfoOriginalOwner->GetBailOutInfo() == shareableBailOutInfo);
  1413. IR::Instr *const sharedBailOut = shareableBailOutInfoOriginalOwner->ShareBailOut();
  1414. Assert(sharedBailOut->GetBailOutInfo() == shareableBailOutInfo);
  1415. shareableBailOutInfoOriginalOwner = nullptr;
  1416. sharedBailOut->Unlink();
  1417. insertBeforeInstr->InsertBefore(sharedBailOut);
  1418. insertBeforeInstr = sharedBailOut;
  1419. }
  1420. void GlobOpt::ArraySrcOpt::InsertHeadSegmentLoad()
  1421. {
  1422. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment load\n"));
  1423. Assert(newHeadSegmentSym);
  1424. IR::RegOpnd *const headSegmentOpnd = IR::RegOpnd::New(newHeadSegmentSym, newHeadSegmentSym->GetType(), instr->m_func);
  1425. headSegmentOpnd->SetIsJITOptimizedReg(true);
  1426. IR::RegOpnd *const jitOptimizedBaseOpnd = baseOpnd->Copy(instr->m_func)->AsRegOpnd();
  1427. jitOptimizedBaseOpnd->SetIsJITOptimizedReg(true);
  1428. IR::Instr *loadObjectArray;
  1429. if (baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  1430. {
  1431. loadObjectArray =
  1432. IR::Instr::New(
  1433. Js::OpCode::LdIndir,
  1434. headSegmentOpnd,
  1435. IR::IndirOpnd::New(
  1436. jitOptimizedBaseOpnd,
  1437. Js::DynamicObject::GetOffsetOfObjectArray(),
  1438. jitOptimizedBaseOpnd->GetType(),
  1439. instr->m_func),
  1440. instr->m_func);
  1441. }
  1442. else
  1443. {
  1444. loadObjectArray = nullptr;
  1445. }
  1446. IR::Instr *const loadHeadSegment =
  1447. IR::Instr::New(
  1448. Js::OpCode::LdIndir,
  1449. headSegmentOpnd,
  1450. IR::IndirOpnd::New(
  1451. loadObjectArray ? headSegmentOpnd : jitOptimizedBaseOpnd,
  1452. Lowerer::GetArrayOffsetOfHeadSegment(baseValueType),
  1453. headSegmentOpnd->GetType(),
  1454. instr->m_func),
  1455. instr->m_func);
  1456. if (hoistHeadSegmentLoadOutOfLoop)
  1457. {
  1458. Assert(!(isLikelyJsArray && hoistHeadSegmentLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegments()));
  1459. TRACE_PHASE_INSTR(
  1460. Js::ArraySegmentHoistPhase,
  1461. instr,
  1462. _u("Hoisting array segment load out of loop %u to landing pad block %u\n"),
  1463. hoistHeadSegmentLoadOutOfLoop->GetLoopNumber(),
  1464. hoistHeadSegmentLoadOutOfLoop->landingPad->GetBlockNum());
  1465. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment load out of loop\n"));
  1466. if (loadObjectArray)
  1467. {
  1468. InsertInstrInLandingPad(loadObjectArray, hoistHeadSegmentLoadOutOfLoop);
  1469. }
  1470. InsertInstrInLandingPad(loadHeadSegment, hoistHeadSegmentLoadOutOfLoop);
  1471. }
  1472. else
  1473. {
  1474. if (loadObjectArray)
  1475. {
  1476. loadObjectArray->SetByteCodeOffset(instr);
  1477. insertBeforeInstr->InsertBefore(loadObjectArray);
  1478. }
  1479. loadHeadSegment->SetByteCodeOffset(instr);
  1480. insertBeforeInstr->InsertBefore(loadHeadSegment);
  1481. instr->loadedArrayHeadSegment = true;
  1482. }
  1483. }
  1484. void GlobOpt::ArraySrcOpt::Optimize()
  1485. {
  1486. if (!CheckOpCode())
  1487. {
  1488. return;
  1489. }
  1490. Assert(!(baseOwnerInstr && baseOwnerIndir));
  1491. Assert(!needsHeadSegmentLength || needsHeadSegment);
  1492. TypeSpecIndex();
  1493. if (isProfilableStElem && !globOpt->IsLoopPrePass())
  1494. {
  1495. // If the dead-store pass decides to add the bailout kind IR::BailOutInvalidatedArrayHeadSegment, and the fast path is
  1496. // generated, it may bail out before the operation is done, so this would need to be a pre-op bailout.
  1497. if (instr->HasBailOutInfo())
  1498. {
  1499. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset && instr->GetBailOutInfo()->bailOutOffset <= instr->GetByteCodeOffset());
  1500. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1501. Assert(!(bailOutKind & ~IR::BailOutKindBits) || (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  1502. if (!(bailOutKind & ~IR::BailOutKindBits))
  1503. {
  1504. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  1505. }
  1506. }
  1507. else
  1508. {
  1509. globOpt->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  1510. }
  1511. }
  1512. baseValue = globOpt->CurrentBlockData()->FindValue(baseOpnd->m_sym);
  1513. if (baseValue == nullptr)
  1514. {
  1515. return;
  1516. }
  1517. baseValueInfo = baseValue->GetValueInfo();
  1518. baseValueType = baseValueInfo->Type();
  1519. baseOpnd->SetValueType(baseValueType);
  1520. if (!baseValueType.IsLikelyAnyOptimizedArray())
  1521. {
  1522. return;
  1523. }
  1524. isLikelyJsArray = !baseValueType.IsLikelyTypedArray();
  1525. Assert(isLikelyJsArray == baseValueType.IsLikelyArrayOrObjectWithArray());
  1526. Assert(!isLikelyJsArray == baseValueType.IsLikelyOptimizedTypedArray());
  1527. if (!isLikelyJsArray && instr->m_opcode == Js::OpCode::LdMethodElem)
  1528. {
  1529. // Fast path is not generated in this case since the subsequent call will throw
  1530. return;
  1531. }
  1532. if (!globOpt->DoArrayCheckHoist(baseValueType, globOpt->currentBlock->loop, instr) ||
  1533. (baseOwnerIndir && !globOpt->ShouldExpectConventionalArrayIndexValue(baseOwnerIndir)))
  1534. {
  1535. if (!globOpt->IsLoopPrePass() && baseValueType.IsAnyOptimizedArray())
  1536. {
  1537. globOpt->ProcessNoImplicitCallArrayUses(baseOpnd, nullptr, instr, isLikelyJsArray, isLoad || isStore || instr->m_opcode == Js::OpCode::IsIn);
  1538. }
  1539. return;
  1540. }
  1541. isLikelyVirtualTypedArray = baseValueType.IsLikelyOptimizedVirtualTypedArray();
  1542. Assert(!(isLikelyJsArray && isLikelyVirtualTypedArray));
  1543. newBaseValueType = baseValueType.ToDefiniteObject();
  1544. if (isLikelyJsArray && newBaseValueType.HasNoMissingValues() && !globOpt->DoArrayMissingValueCheckHoist())
  1545. {
  1546. newBaseValueType = newBaseValueType.SetHasNoMissingValues(false);
  1547. }
  1548. Assert((newBaseValueType == baseValueType) == baseValueType.IsObject());
  1549. if (globOpt->IsLoopPrePass())
  1550. {
  1551. if (newBaseValueType != baseValueType)
  1552. {
  1553. if (globOpt->IsSafeToTransferInPrePass(baseOpnd, baseValue))
  1554. {
  1555. UpdateValue(nullptr, nullptr, nullptr);
  1556. }
  1557. else if (isLikelyJsArray && globOpt->IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr) && baseValueInfo->HasNoMissingValues())
  1558. {
  1559. globOpt->ChangeValueType(nullptr, baseValue, baseValueInfo->Type().SetHasNoMissingValues(false), true);
  1560. }
  1561. }
  1562. // For javascript arrays and objects with javascript arrays:
  1563. // - Implicit calls need to be disabled and calls cannot be allowed in the loop since the array vtable may be changed
  1564. // into an ES5 array.
  1565. // For typed arrays:
  1566. // - A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the
  1567. // typed array's length is set to zero. Implicit calls need to be disabled if the typed array's head segment length
  1568. // is going to be loaded and used later.
  1569. // Since we don't know if the loop has kills after this instruction, the kill information may not be complete. If a kill
  1570. // is found later, this information will be updated to not require disabling implicit calls.
  1571. const bool kills = isLikelyJsArray ? globOpt->rootLoopPrePass->jsArrayKills.KillsValueType(newBaseValueType) : globOpt->rootLoopPrePass->jsArrayKills.KillsTypedArrayHeadSegmentLengths();
  1572. if (!kills)
  1573. {
  1574. globOpt->rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  1575. }
  1576. return;
  1577. }
  1578. if (baseValueInfo->IsArrayValueInfo())
  1579. {
  1580. baseArrayValueInfo = baseValueInfo->AsArrayValueInfo();
  1581. }
  1582. doArrayChecks = !baseValueType.IsObject();
  1583. doArraySegmentHoist =
  1584. globOpt->DoArraySegmentHoist(baseValueType) &&
  1585. instr->m_opcode != Js::OpCode::StElemC;
  1586. headSegmentIsAvailable =
  1587. baseArrayValueInfo &&
  1588. baseArrayValueInfo->HeadSegmentSym();
  1589. doHeadSegmentLoad =
  1590. doArraySegmentHoist &&
  1591. needsHeadSegment && !headSegmentIsAvailable;
  1592. doArraySegmentLengthHoist =
  1593. doArraySegmentHoist &&
  1594. (isLikelyJsArray || globOpt->DoTypedArraySegmentLengthHoist(globOpt->currentBlock->loop));
  1595. headSegmentLengthIsAvailable =
  1596. baseArrayValueInfo &&
  1597. baseArrayValueInfo->HeadSegmentLengthSym();
  1598. doHeadSegmentLengthLoad =
  1599. doArraySegmentLengthHoist &&
  1600. (needsHeadSegmentLength || (!isLikelyJsArray && needsLength)) &&
  1601. !headSegmentLengthIsAvailable;
  1602. lengthIsAvailable =
  1603. baseArrayValueInfo &&
  1604. baseArrayValueInfo->LengthSym();
  1605. doLengthLoad =
  1606. globOpt->DoArrayLengthHoist() &&
  1607. needsLength &&
  1608. !lengthIsAvailable &&
  1609. baseValueType.IsLikelyArray() &&
  1610. globOpt->DoLdLenIntSpec(instr->m_opcode == Js::OpCode::LdLen_A ? instr : nullptr, baseValueType);
  1611. newHeadSegmentSym = doHeadSegmentLoad ? StackSym::New(TyMachPtr, instr->m_func) : nullptr;
  1612. newHeadSegmentLengthSym = doHeadSegmentLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  1613. newLengthSym = doLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  1614. if (Js::IsSimd128LoadStore(instr->m_opcode) || instr->m_opcode == Js::OpCode::IsIn)
  1615. {
  1616. // SIMD_JS
  1617. // simd load/store never call helper
  1618. canBailOutOnArrayAccessHelperCall = true;
  1619. }
  1620. else
  1621. {
  1622. canBailOutOnArrayAccessHelperCall =
  1623. (isProfilableLdElem || isProfilableStElem) &&
  1624. globOpt->DoEliminateArrayAccessHelperCall() &&
  1625. !(
  1626. instr->IsProfiledInstr() &&
  1627. (
  1628. isProfilableLdElem
  1629. ? instr->AsProfiledInstr()->u.ldElemInfo->LikelyNeedsHelperCall()
  1630. : instr->AsProfiledInstr()->u.stElemInfo->LikelyNeedsHelperCall()
  1631. )
  1632. );
  1633. }
  1634. CheckVirtualArrayBounds();
  1635. if (needsBoundChecks && globOpt->DoBoundCheckElimination())
  1636. {
  1637. TryEliminiteBoundsCheck();
  1638. }
  1639. if (doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad || doExtractBoundChecks)
  1640. {
  1641. CheckLoops();
  1642. insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  1643. if (doArrayChecks)
  1644. {
  1645. DoArrayChecks();
  1646. }
  1647. if (doLengthLoad)
  1648. {
  1649. DoLengthLoad();
  1650. }
  1651. if (doHeadSegmentLoad && isLikelyJsArray)
  1652. {
  1653. // For javascript arrays, the head segment is required to load the head segment length
  1654. InsertHeadSegmentLoad();
  1655. }
  1656. if (doHeadSegmentLengthLoad)
  1657. {
  1658. DoHeadSegmentLengthLoad();
  1659. }
  1660. if (doExtractBoundChecks)
  1661. {
  1662. DoExtractBoundChecks();
  1663. }
  1664. if (doHeadSegmentLoad && !isLikelyJsArray)
  1665. {
  1666. // For typed arrays, load the length first, followed by the bound checks, and then load the head segment. This
  1667. // allows the length sym to become dead by the time of the head segment load, freeing up the register for use by the
  1668. // head segment sym.
  1669. InsertHeadSegmentLoad();
  1670. }
  1671. if (doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  1672. {
  1673. UpdateValue(newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  1674. baseValueInfo = baseValue->GetValueInfo();
  1675. baseArrayValueInfo = baseValueInfo->IsArrayValueInfo() ? baseValueInfo->AsArrayValueInfo() : nullptr;
  1676. UpdateHoistedValueInfo();
  1677. }
  1678. }
  1679. IR::ArrayRegOpnd * baseArrayOpnd;
  1680. if (baseArrayValueInfo != nullptr)
  1681. {
  1682. // Update the opnd to include the associated syms
  1683. baseArrayOpnd =
  1684. baseArrayValueInfo->CreateOpnd(
  1685. baseOpnd,
  1686. needsHeadSegment,
  1687. needsHeadSegmentLength || (!isLikelyJsArray && needsLength),
  1688. needsLength,
  1689. eliminatedLowerBoundCheck,
  1690. eliminatedUpperBoundCheck,
  1691. instr->m_func);
  1692. if (baseOwnerInstr != nullptr)
  1693. {
  1694. if (baseOwnerInstr->GetSrc1() == baseOpnd)
  1695. {
  1696. baseOwnerInstr->ReplaceSrc1(baseArrayOpnd);
  1697. }
  1698. else
  1699. {
  1700. Assert(baseOwnerInstr->GetSrc2() == baseOpnd);
  1701. baseOwnerInstr->ReplaceSrc2(baseArrayOpnd);
  1702. }
  1703. }
  1704. else
  1705. {
  1706. Assert(baseOwnerIndir);
  1707. Assert(baseOwnerIndir->GetBaseOpnd() == baseOpnd);
  1708. baseOwnerIndir->ReplaceBaseOpnd(baseArrayOpnd);
  1709. }
  1710. baseOpnd = baseArrayOpnd;
  1711. }
  1712. else
  1713. {
  1714. baseArrayOpnd = nullptr;
  1715. }
  1716. globOpt->ProcessNoImplicitCallArrayUses(baseOpnd, baseArrayOpnd, instr, isLikelyJsArray, isLoad || isStore || instr->m_opcode == Js::OpCode::IsIn);
  1717. const auto OnEliminated = [&](const Js::Phase phase, const char *const eliminatedLoad)
  1718. {
  1719. TRACE_TESTTRACE_PHASE_INSTR(phase, instr, _u("Eliminating array %S\n"), eliminatedLoad);
  1720. };
  1721. OnEliminated(Js::Phase::ArrayCheckHoistPhase, "checks");
  1722. if (baseArrayOpnd)
  1723. {
  1724. if (baseArrayOpnd->HeadSegmentSym())
  1725. {
  1726. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment load");
  1727. }
  1728. if (baseArrayOpnd->HeadSegmentLengthSym())
  1729. {
  1730. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment length load");
  1731. }
  1732. if (baseArrayOpnd->LengthSym())
  1733. {
  1734. OnEliminated(Js::Phase::ArrayLengthHoistPhase, "length load");
  1735. }
  1736. if (baseArrayOpnd->EliminatedLowerBoundCheck())
  1737. {
  1738. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "lower bound check");
  1739. }
  1740. if (baseArrayOpnd->EliminatedUpperBoundCheck())
  1741. {
  1742. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "upper bound check");
  1743. }
  1744. }
  1745. if (instr->m_opcode == Js::OpCode::IsIn)
  1746. {
  1747. if (eliminatedLowerBoundCheck && eliminatedUpperBoundCheck)
  1748. {
  1749. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::BoundCheckEliminationPhase, instr, _u("Eliminating IsIn\n"));
  1750. globOpt->CaptureByteCodeSymUses(instr);
  1751. instr->m_opcode = Js::OpCode::Ld_A;
  1752. IR::AddrOpnd * addrOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
  1753. addrOpnd->SetValueType(ValueType::Boolean);
  1754. instr->ReplaceSrc1(addrOpnd);
  1755. instr->FreeSrc2();
  1756. originalIndexOpnd->Free(func);
  1757. originalIndexOpnd = nullptr;
  1758. src1Val = globOpt->GetVarConstantValue(instr->GetSrc1()->AsAddrOpnd());
  1759. src2Val = nullptr;
  1760. }
  1761. return;
  1762. }
  1763. if (!canBailOutOnArrayAccessHelperCall)
  1764. {
  1765. return;
  1766. }
  1767. // Bail out instead of generating a helper call. This helps to remove the array reference when the head segment and head
  1768. // segment length are available, reduces code size, and allows bound checks to be separated.
  1769. if (instr->HasBailOutInfo())
  1770. {
  1771. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1772. Assert(
  1773. !(bailOutKind & ~IR::BailOutKindBits) ||
  1774. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp ||
  1775. (bailOutKind & ~IR::BailOutKindBits) == IR::LazyBailOut);
  1776. instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
  1777. }
  1778. else
  1779. {
  1780. globOpt->GenerateBailAtOperation(&instr, IR::BailOutOnArrayAccessHelperCall);
  1781. }
  1782. }