GlobOptArrays.cpp 76 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7. #define TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  8. if(PHASE_TESTTRACE(phase, this->func)) \
  9. { \
  10. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  11. Output::Print( \
  12. _u("Testtrace: %s function %s (%s): "), \
  13. Js::PhaseNames[phase], \
  14. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  15. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  16. Output::Print(__VA_ARGS__); \
  17. Output::Flush(); \
  18. }
  19. #else
  20. #define TESTTRACE_PHASE_INSTR(phase, instr, ...)
  21. #endif
  22. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  23. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  24. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  25. TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  26. #else
  27. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  28. #endif
  29. GlobOpt::ArraySrcOpt::~ArraySrcOpt()
  30. {
  31. if (originalIndexOpnd != nullptr)
  32. {
  33. Assert(instr->m_opcode == Js::OpCode::IsIn);
  34. instr->ReplaceSrc1(originalIndexOpnd);
  35. }
  36. }
  37. bool GlobOpt::ArraySrcOpt::CheckOpCode()
  38. {
  39. switch (instr->m_opcode)
  40. {
  41. // SIMD_JS
  42. case Js::OpCode::Simd128_LdArr_F4:
  43. case Js::OpCode::Simd128_LdArr_I4:
  44. // no type-spec for Asm.js
  45. if (globOpt->GetIsAsmJSFunc())
  46. {
  47. return false;
  48. }
  49. // fall through
  50. case Js::OpCode::LdElemI_A:
  51. case Js::OpCode::LdMethodElem:
  52. if (!instr->GetSrc1()->IsIndirOpnd())
  53. {
  54. return false;
  55. }
  56. baseOwnerIndir = instr->GetSrc1()->AsIndirOpnd();
  57. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  58. // LdMethodElem is currently not profiled
  59. isProfilableLdElem = instr->m_opcode != Js::OpCode::LdMethodElem;
  60. needsBoundChecks = true;
  61. needsHeadSegmentLength = true;
  62. needsHeadSegment = true;
  63. isLoad = true;
  64. break;
  65. // SIMD_JS
  66. case Js::OpCode::Simd128_StArr_F4:
  67. case Js::OpCode::Simd128_StArr_I4:
  68. // no type-spec for Asm.js
  69. if (globOpt->GetIsAsmJSFunc())
  70. {
  71. return false;
  72. }
  73. // fall through
  74. case Js::OpCode::StElemI_A:
  75. case Js::OpCode::StElemI_A_Strict:
  76. case Js::OpCode::StElemC:
  77. if (!instr->GetDst()->IsIndirOpnd())
  78. {
  79. return false;
  80. }
  81. baseOwnerIndir = instr->GetDst()->AsIndirOpnd();
  82. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  83. isProfilableStElem = instr->m_opcode != Js::OpCode::StElemC;
  84. needsBoundChecks = isProfilableStElem;
  85. needsHeadSegmentLength = true;
  86. needsHeadSegment = true;
  87. isStore = true;
  88. break;
  89. case Js::OpCode::InlineArrayPush:
  90. case Js::OpCode::InlineArrayPop:
  91. {
  92. IR::Opnd * thisOpnd = instr->GetSrc1();
  93. // Abort if it not a LikelyArray or Object with Array - No point in doing array check elimination.
  94. if (!thisOpnd->IsRegOpnd() || !thisOpnd->GetValueType().IsLikelyArrayOrObjectWithArray())
  95. {
  96. return false;
  97. }
  98. baseOwnerInstr = instr;
  99. baseOpnd = thisOpnd->AsRegOpnd();
  100. isLoad = instr->m_opcode == Js::OpCode::InlineArrayPop;
  101. isStore = instr->m_opcode == Js::OpCode::InlineArrayPush;
  102. needsLength = true;
  103. needsHeadSegmentLength = true;
  104. needsHeadSegment = true;
  105. break;
  106. }
  107. case Js::OpCode::LdLen_A:
  108. if (!instr->GetSrc1()->IsRegOpnd())
  109. {
  110. return false;
  111. }
  112. baseOpnd = instr->GetSrc1()->AsRegOpnd();
  113. if (baseOpnd->GetValueType().IsLikelyObject() && baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  114. {
  115. return false;
  116. }
  117. baseOwnerInstr = instr;
  118. needsLength = true;
  119. break;
  120. case Js::OpCode::IsIn:
  121. if (!globOpt->DoArrayMissingValueCheckHoist())
  122. {
  123. return false;
  124. }
  125. if (!instr->GetSrc1()->IsRegOpnd() && !instr->GetSrc1()->IsIntConstOpnd())
  126. {
  127. return false;
  128. }
  129. if (!instr->GetSrc2()->IsRegOpnd())
  130. {
  131. return false;
  132. }
  133. baseOpnd = instr->GetSrc2()->AsRegOpnd();
  134. if (baseOpnd->GetValueType().IsLikelyObject() && baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  135. {
  136. return false;
  137. }
  138. if (!baseOpnd->GetValueType().IsLikelyAnyArray() || (baseOpnd->GetValueType().IsLikelyArrayOrObjectWithArray() && !baseOpnd->GetValueType().HasNoMissingValues()))
  139. {
  140. return false;
  141. }
  142. baseOwnerInstr = instr;
  143. needsBoundChecks = true;
  144. needsHeadSegmentLength = true;
  145. needsHeadSegment = true;
  146. break;
  147. default:
  148. return false;
  149. }
  150. return true;
  151. }
  152. void GlobOpt::ArraySrcOpt::TypeSpecIndex()
  153. {
  154. // Since this happens before type specialization, make sure that any necessary conversions are done, and that the index is int-specialized if possible such that the const flags are correct.
  155. if (!globOpt->IsLoopPrePass())
  156. {
  157. if (baseOwnerIndir)
  158. {
  159. globOpt->ToVarUses(instr, baseOwnerIndir, baseOwnerIndir == instr->GetDst(), nullptr);
  160. }
  161. else if (instr->m_opcode == Js::OpCode::IsIn && instr->GetSrc1()->IsRegOpnd())
  162. {
  163. // If the optimization is unable to eliminate the bounds checks, we need to restore the original var sym.
  164. Assert(originalIndexOpnd == nullptr);
  165. originalIndexOpnd = instr->GetSrc1()->Copy(func)->AsRegOpnd();
  166. globOpt->ToTypeSpecIndex(instr, instr->GetSrc1()->AsRegOpnd(), nullptr);
  167. }
  168. }
  169. if (baseOwnerIndir != nullptr)
  170. {
  171. indexOpnd = baseOwnerIndir->GetIndexOpnd();
  172. }
  173. else if (instr->m_opcode == Js::OpCode::IsIn)
  174. {
  175. indexOpnd = instr->GetSrc1();
  176. }
  177. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  178. {
  179. IR::RegOpnd * regOpnd = indexOpnd->AsRegOpnd();
  180. if (regOpnd->m_sym->IsTypeSpec())
  181. {
  182. Assert(regOpnd->m_sym->IsInt32());
  183. indexVarSym = regOpnd->m_sym->GetVarEquivSym(nullptr);
  184. }
  185. else
  186. {
  187. indexVarSym = regOpnd->m_sym;
  188. }
  189. indexValue = globOpt->CurrentBlockData()->FindValue(indexVarSym);
  190. }
  191. }
  192. void GlobOpt::ArraySrcOpt::UpdateValue(StackSym * newHeadSegmentSym, StackSym * newHeadSegmentLengthSym, StackSym * newLengthSym)
  193. {
  194. Assert(baseValueType.GetObjectType() == newBaseValueType.GetObjectType());
  195. Assert(newBaseValueType.IsObject());
  196. Assert(baseValueType.IsLikelyArray() || !newLengthSym);
  197. if (!(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym))
  198. {
  199. // We're not adding new information to the value other than changing the value type. Preserve any existing
  200. // information and just change the value type.
  201. globOpt->ChangeValueType(globOpt->currentBlock, baseValue, newBaseValueType, true);
  202. return;
  203. }
  204. // Merge the new syms into the value while preserving any existing information, and change the value type
  205. if (baseArrayValueInfo)
  206. {
  207. if (!newHeadSegmentSym)
  208. {
  209. newHeadSegmentSym = baseArrayValueInfo->HeadSegmentSym();
  210. }
  211. if (!newHeadSegmentLengthSym)
  212. {
  213. newHeadSegmentLengthSym = baseArrayValueInfo->HeadSegmentLengthSym();
  214. }
  215. if (!newLengthSym)
  216. {
  217. newLengthSym = baseArrayValueInfo->LengthSym();
  218. }
  219. Assert(!baseArrayValueInfo->HeadSegmentSym() || newHeadSegmentSym == baseArrayValueInfo->HeadSegmentSym());
  220. Assert(!baseArrayValueInfo->HeadSegmentLengthSym() || newHeadSegmentLengthSym == baseArrayValueInfo->HeadSegmentLengthSym());
  221. Assert(!baseArrayValueInfo->LengthSym() || newLengthSym == baseArrayValueInfo->LengthSym());
  222. }
  223. ArrayValueInfo *const newBaseArrayValueInfo =
  224. ArrayValueInfo::New(
  225. globOpt->alloc,
  226. newBaseValueType,
  227. newHeadSegmentSym,
  228. newHeadSegmentLengthSym,
  229. newLengthSym,
  230. baseValueInfo->GetSymStore());
  231. globOpt->ChangeValueInfo(globOpt->currentBlock, baseValue, newBaseArrayValueInfo);
  232. };
  233. void GlobOpt::ArraySrcOpt::CheckVirtualArrayBounds()
  234. {
  235. #if ENABLE_FAST_ARRAYBUFFER
  236. if (baseValueType.IsLikelyOptimizedVirtualTypedArray() && !Js::IsSimd128LoadStore(instr->m_opcode) /*Always extract bounds for SIMD */)
  237. {
  238. if (isProfilableStElem ||
  239. !instr->IsDstNotAlwaysConvertedToInt32() ||
  240. ((baseValueType.GetObjectType() == ObjectType::Float32VirtualArray ||
  241. baseValueType.GetObjectType() == ObjectType::Float64VirtualArray) &&
  242. !instr->IsDstNotAlwaysConvertedToNumber()
  243. )
  244. )
  245. {
  246. // Unless we're in asm.js (where it is guaranteed that virtual typed array accesses cannot read/write beyond 4GB),
  247. // check the range of the index to make sure we won't access beyond the reserved memory beforing eliminating bounds
  248. // checks in jitted code.
  249. if (!globOpt->GetIsAsmJSFunc() && baseOwnerIndir)
  250. {
  251. if (indexOpnd)
  252. {
  253. IntConstantBounds idxConstantBounds;
  254. if (indexValue && indexValue->GetValueInfo()->TryGetIntConstantBounds(&idxConstantBounds))
  255. {
  256. BYTE indirScale = Lowerer::GetArrayIndirScale(baseValueType);
  257. int32 upperBound = idxConstantBounds.UpperBound();
  258. int32 lowerBound = idxConstantBounds.LowerBound();
  259. if (lowerBound >= 0 && ((static_cast<uint64>(upperBound) << indirScale) < MAX_ASMJS_ARRAYBUFFER_LENGTH))
  260. {
  261. eliminatedLowerBoundCheck = true;
  262. eliminatedUpperBoundCheck = true;
  263. canBailOutOnArrayAccessHelperCall = false;
  264. }
  265. }
  266. }
  267. }
  268. else
  269. {
  270. if (baseOwnerIndir == nullptr)
  271. {
  272. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush ||
  273. instr->m_opcode == Js::OpCode::InlineArrayPop ||
  274. instr->m_opcode == Js::OpCode::LdLen_A ||
  275. instr->m_opcode == Js::OpCode::IsIn);
  276. }
  277. eliminatedLowerBoundCheck = true;
  278. eliminatedUpperBoundCheck = true;
  279. canBailOutOnArrayAccessHelperCall = false;
  280. }
  281. }
  282. }
  283. #endif
  284. }
  285. void GlobOpt::ArraySrcOpt::TryEliminiteBoundsCheck()
  286. {
  287. AnalysisAssert(indexOpnd != nullptr || baseOwnerIndir != nullptr);
  288. Assert(needsHeadSegmentLength);
  289. // Bound checks can be separated from the instruction only if it can bail out instead of making a helper call when a
  290. // bound check fails. And only if it would bail out, can we use a bound check to eliminate redundant bound checks later
  291. // on that path.
  292. doExtractBoundChecks = (headSegmentLengthIsAvailable || doHeadSegmentLengthLoad) && canBailOutOnArrayAccessHelperCall;
  293. // Get the index value
  294. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  295. {
  296. if (indexOpnd->AsRegOpnd()->m_sym->IsTypeSpec())
  297. {
  298. Assert(indexVarSym);
  299. Assert(indexValue);
  300. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  301. Assert(indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  302. Assert(
  303. (indexOpnd->GetType() == TyUint32) ==
  304. ValueInfo::IsGreaterThanOrEqualTo(
  305. indexValue,
  306. indexConstantBounds.LowerBound(),
  307. indexConstantBounds.UpperBound(),
  308. nullptr,
  309. 0,
  310. 0));
  311. if (indexOpnd->GetType() == TyUint32)
  312. {
  313. eliminatedLowerBoundCheck = true;
  314. }
  315. }
  316. else
  317. {
  318. doExtractBoundChecks = false; // Bound check instruction operates only on int-specialized operands
  319. if (!indexValue || !indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds))
  320. {
  321. return;
  322. }
  323. if (ValueInfo::IsGreaterThanOrEqualTo(
  324. indexValue,
  325. indexConstantBounds.LowerBound(),
  326. indexConstantBounds.UpperBound(),
  327. nullptr,
  328. 0,
  329. 0))
  330. {
  331. eliminatedLowerBoundCheck = true;
  332. }
  333. }
  334. if (!eliminatedLowerBoundCheck &&
  335. ValueInfo::IsLessThan(
  336. indexValue,
  337. indexConstantBounds.LowerBound(),
  338. indexConstantBounds.UpperBound(),
  339. nullptr,
  340. 0,
  341. 0))
  342. {
  343. eliminatedUpperBoundCheck = true;
  344. doExtractBoundChecks = false;
  345. return;
  346. }
  347. }
  348. else
  349. {
  350. const int32 indexConstantValue = indexOpnd ? indexOpnd->AsIntConstOpnd()->AsInt32() : baseOwnerIndir->GetOffset();
  351. if (indexConstantValue < 0)
  352. {
  353. eliminatedUpperBoundCheck = true;
  354. doExtractBoundChecks = false;
  355. return;
  356. }
  357. if (indexConstantValue == INT32_MAX)
  358. {
  359. eliminatedLowerBoundCheck = true;
  360. doExtractBoundChecks = false;
  361. return;
  362. }
  363. indexConstantBounds = IntConstantBounds(indexConstantValue, indexConstantValue);
  364. eliminatedLowerBoundCheck = true;
  365. }
  366. if (!headSegmentLengthIsAvailable)
  367. {
  368. return;
  369. }
  370. headSegmentLengthValue = globOpt->CurrentBlockData()->FindValue(baseArrayValueInfo->HeadSegmentLengthSym());
  371. if (!headSegmentLengthValue)
  372. {
  373. if (doExtractBoundChecks)
  374. {
  375. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  376. }
  377. return;
  378. }
  379. AssertVerify(headSegmentLengthValue->GetValueInfo()->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  380. if (ValueInfo::IsLessThanOrEqualTo(
  381. indexValue,
  382. indexConstantBounds.LowerBound(),
  383. indexConstantBounds.UpperBound(),
  384. headSegmentLengthValue,
  385. headSegmentLengthConstantBounds.LowerBound(),
  386. headSegmentLengthConstantBounds.UpperBound(),
  387. -1
  388. ))
  389. {
  390. eliminatedUpperBoundCheck = true;
  391. if (eliminatedLowerBoundCheck)
  392. {
  393. doExtractBoundChecks = false;
  394. }
  395. }
  396. }
  397. void GlobOpt::ArraySrcOpt::CheckLoops()
  398. {
  399. if (!doArrayChecks && !doHeadSegmentLoad && !doHeadSegmentLengthLoad && !doLengthLoad)
  400. {
  401. return;
  402. }
  403. // Find the loops out of which array checks and head segment loads need to be hoisted
  404. for (Loop *loop = globOpt->currentBlock->loop; loop; loop = loop->parent)
  405. {
  406. const JsArrayKills loopKills(loop->jsArrayKills);
  407. Value *baseValueInLoopLandingPad = nullptr;
  408. if (((isLikelyJsArray || isLikelyVirtualTypedArray) && loopKills.KillsValueType(newBaseValueType)) ||
  409. !globOpt->OptIsInvariant(baseOpnd->m_sym, globOpt->currentBlock, loop, baseValue, true, true, &baseValueInLoopLandingPad) ||
  410. !(doArrayChecks || baseValueInLoopLandingPad->GetValueInfo()->IsObject()))
  411. {
  412. break;
  413. }
  414. // The value types should be the same, except:
  415. // - The value type in the landing pad is a type that can merge to a specific object type. Typically, these
  416. // cases will use BailOnNoProfile, but that can be disabled due to excessive bailouts. Those value types
  417. // merge aggressively to the other side's object type, so the value type may have started off as
  418. // Uninitialized, [Likely]Undefined|Null, [Likely]UninitializedObject, etc., and changed in the loop to an
  419. // array type during a prepass.
  420. // - StElems in the loop can kill the no-missing-values info.
  421. // - The native array type may be made more conservative based on profile data by an instruction in the loop.
  422. #if DBG
  423. if (!baseValueInLoopLandingPad->GetValueInfo()->CanMergeToSpecificObjectType())
  424. {
  425. ValueType landingPadValueType = baseValueInLoopLandingPad->GetValueInfo()->Type();
  426. Assert(landingPadValueType.IsSimilar(baseValueType)
  427. || (landingPadValueType.IsLikelyNativeArray() && landingPadValueType.Merge(baseValueType).IsSimilar(baseValueType))
  428. || (baseValueType.IsLikelyNativeArray() && baseValueType.Merge(landingPadValueType).IsSimilar(landingPadValueType))
  429. );
  430. }
  431. #endif
  432. if (doArrayChecks)
  433. {
  434. hoistChecksOutOfLoop = loop;
  435. }
  436. if (isLikelyJsArray && loopKills.KillsArrayHeadSegments())
  437. {
  438. Assert(loopKills.KillsArrayHeadSegmentLengths());
  439. if (!(doArrayChecks || doLengthLoad))
  440. {
  441. break;
  442. }
  443. }
  444. else
  445. {
  446. if (doHeadSegmentLoad || headSegmentIsAvailable)
  447. {
  448. // If the head segment is already available, we may need to rehoist the value including other
  449. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  450. // the head segment length is not being loaded here.
  451. hoistHeadSegmentLoadOutOfLoop = loop;
  452. }
  453. if (isLikelyJsArray
  454. ? loopKills.KillsArrayHeadSegmentLengths()
  455. : loopKills.KillsTypedArrayHeadSegmentLengths())
  456. {
  457. if (!(doArrayChecks || doHeadSegmentLoad || doLengthLoad))
  458. {
  459. break;
  460. }
  461. }
  462. else if (doHeadSegmentLengthLoad || headSegmentLengthIsAvailable)
  463. {
  464. // If the head segment length is already available, we may need to rehoist the value including other
  465. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  466. // the head segment length is not being loaded here.
  467. hoistHeadSegmentLengthLoadOutOfLoop = loop;
  468. }
  469. }
  470. if (isLikelyJsArray && loopKills.KillsArrayLengths())
  471. {
  472. if (!(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad))
  473. {
  474. break;
  475. }
  476. }
  477. else if (doLengthLoad || lengthIsAvailable)
  478. {
  479. // If the length is already available, we may need to rehoist the value including other information. So,
  480. // need to track the loop out of which the head segment length can be hoisted even if the length is not
  481. // being loaded here.
  482. hoistLengthLoadOutOfLoop = loop;
  483. }
  484. }
  485. }
  486. void GlobOpt::ArraySrcOpt::DoArrayChecks()
  487. {
  488. TRACE_TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Separating array checks with bailout\n"));
  489. IR::Instr *bailOnNotArray = IR::Instr::New(Js::OpCode::BailOnNotArray, instr->m_func);
  490. bailOnNotArray->SetSrc1(baseOpnd);
  491. bailOnNotArray->GetSrc1()->SetIsJITOptimizedReg(true);
  492. const IR::BailOutKind bailOutKind = newBaseValueType.IsLikelyNativeArray() ? IR::BailOutOnNotNativeArray : IR::BailOutOnNotArray;
  493. if (hoistChecksOutOfLoop)
  494. {
  495. Assert(!(isLikelyJsArray && hoistChecksOutOfLoop->jsArrayKills.KillsValueType(newBaseValueType)));
  496. TRACE_PHASE_INSTR(
  497. Js::ArrayCheckHoistPhase,
  498. instr,
  499. _u("Hoisting array checks with bailout out of loop %u to landing pad block %u\n"),
  500. hoistChecksOutOfLoop->GetLoopNumber(),
  501. hoistChecksOutOfLoop->landingPad->GetBlockNum());
  502. TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Hoisting array checks with bailout out of loop\n"));
  503. Assert(hoistChecksOutOfLoop->bailOutInfo);
  504. globOpt->EnsureBailTarget(hoistChecksOutOfLoop);
  505. InsertInstrInLandingPad(bailOnNotArray, hoistChecksOutOfLoop);
  506. bailOnNotArray = bailOnNotArray->ConvertToBailOutInstr(hoistChecksOutOfLoop->bailOutInfo, bailOutKind);
  507. }
  508. else
  509. {
  510. bailOnNotArray->SetByteCodeOffset(instr);
  511. insertBeforeInstr->InsertBefore(bailOnNotArray);
  512. globOpt->GenerateBailAtOperation(&bailOnNotArray, bailOutKind);
  513. shareableBailOutInfo = bailOnNotArray->GetBailOutInfo();
  514. shareableBailOutInfoOriginalOwner = bailOnNotArray;
  515. }
  516. baseValueType = newBaseValueType;
  517. baseOpnd->SetValueType(newBaseValueType);
  518. }
  519. void GlobOpt::ArraySrcOpt::DoLengthLoad()
  520. {
  521. Assert(baseValueType.IsArray());
  522. Assert(newLengthSym);
  523. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Separating array length load\n"));
  524. // Create an initial value for the length
  525. globOpt->CurrentBlockData()->liveVarSyms->Set(newLengthSym->m_id);
  526. Value *const lengthValue = globOpt->NewIntRangeValue(0, INT32_MAX, false);
  527. globOpt->CurrentBlockData()->SetValue(lengthValue, newLengthSym);
  528. // SetValue above would have set the sym store to newLengthSym. This sym won't be used for copy-prop though, so
  529. // remove it as the sym store.
  530. globOpt->SetSymStoreDirect(lengthValue->GetValueInfo(), nullptr);
  531. // length = [array + offsetOf(length)]
  532. IR::Instr *const loadLength =
  533. IR::Instr::New(
  534. Js::OpCode::LdIndir,
  535. IR::RegOpnd::New(newLengthSym, newLengthSym->GetType(), instr->m_func),
  536. IR::IndirOpnd::New(
  537. baseOpnd,
  538. Js::JavascriptArray::GetOffsetOfLength(),
  539. newLengthSym->GetType(),
  540. instr->m_func),
  541. instr->m_func);
  542. loadLength->GetDst()->SetIsJITOptimizedReg(true);
  543. loadLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  544. // BailOnNegative length (BailOutOnIrregularLength)
  545. IR::Instr *bailOnIrregularLength = IR::Instr::New(Js::OpCode::BailOnNegative, instr->m_func);
  546. bailOnIrregularLength->SetSrc1(loadLength->GetDst());
  547. const IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  548. if (hoistLengthLoadOutOfLoop)
  549. {
  550. Assert(!hoistLengthLoadOutOfLoop->jsArrayKills.KillsArrayLengths());
  551. TRACE_PHASE_INSTR(
  552. Js::Phase::ArrayLengthHoistPhase,
  553. instr,
  554. _u("Hoisting array length load out of loop %u to landing pad block %u\n"),
  555. hoistLengthLoadOutOfLoop->GetLoopNumber(),
  556. hoistLengthLoadOutOfLoop->landingPad->GetBlockNum());
  557. TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Hoisting array length load out of loop\n"));
  558. Assert(hoistLengthLoadOutOfLoop->bailOutInfo);
  559. globOpt->EnsureBailTarget(hoistLengthLoadOutOfLoop);
  560. InsertInstrInLandingPad(loadLength, hoistLengthLoadOutOfLoop);
  561. InsertInstrInLandingPad(bailOnIrregularLength, hoistLengthLoadOutOfLoop);
  562. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(hoistLengthLoadOutOfLoop->bailOutInfo, bailOutKind);
  563. // Hoist the length value
  564. for (InvariantBlockBackwardIterator it(
  565. globOpt,
  566. globOpt->currentBlock,
  567. hoistLengthLoadOutOfLoop->landingPad,
  568. baseOpnd->m_sym,
  569. baseValue->GetValueNumber());
  570. it.IsValid();
  571. it.MoveNext())
  572. {
  573. BasicBlock *const block = it.Block();
  574. block->globOptData.liveVarSyms->Set(newLengthSym->m_id);
  575. Assert(!block->globOptData.FindValue(newLengthSym));
  576. Value *const lengthValueCopy = globOpt->CopyValue(lengthValue, lengthValue->GetValueNumber());
  577. block->globOptData.SetValue(lengthValueCopy, newLengthSym);
  578. globOpt->SetSymStoreDirect(lengthValueCopy->GetValueInfo(), nullptr);
  579. }
  580. }
  581. else
  582. {
  583. loadLength->SetByteCodeOffset(instr);
  584. insertBeforeInstr->InsertBefore(loadLength);
  585. bailOnIrregularLength->SetByteCodeOffset(instr);
  586. insertBeforeInstr->InsertBefore(bailOnIrregularLength);
  587. if (shareableBailOutInfo)
  588. {
  589. ShareBailOut();
  590. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(shareableBailOutInfo, bailOutKind);
  591. }
  592. else
  593. {
  594. globOpt->GenerateBailAtOperation(&bailOnIrregularLength, bailOutKind);
  595. shareableBailOutInfo = bailOnIrregularLength->GetBailOutInfo();
  596. shareableBailOutInfoOriginalOwner = bailOnIrregularLength;
  597. }
  598. }
  599. }
  600. void GlobOpt::ArraySrcOpt::DoHeadSegmentLengthLoad()
  601. {
  602. Assert(!isLikelyJsArray || newHeadSegmentSym || baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym());
  603. Assert(newHeadSegmentLengthSym);
  604. Assert(!headSegmentLengthValue);
  605. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment length load\n"));
  606. // Create an initial value for the head segment length
  607. globOpt->CurrentBlockData()->liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  608. headSegmentLengthValue = globOpt->NewIntRangeValue(0, Js::SparseArraySegmentBase::MaxLength, false);
  609. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  610. globOpt->CurrentBlockData()->SetValue(headSegmentLengthValue, newHeadSegmentLengthSym);
  611. // SetValue above would have set the sym store to newHeadSegmentLengthSym. This sym won't be used for copy-prop
  612. // though, so remove it as the sym store.
  613. globOpt->SetSymStoreDirect(headSegmentLengthValue->GetValueInfo(), nullptr);
  614. StackSym *const headSegmentSym = isLikelyJsArray ? newHeadSegmentSym ? newHeadSegmentSym : baseArrayValueInfo->HeadSegmentSym() : nullptr;
  615. IR::Instr *const loadHeadSegmentLength =
  616. IR::Instr::New(
  617. Js::OpCode::LdIndir,
  618. IR::RegOpnd::New(newHeadSegmentLengthSym, newHeadSegmentLengthSym->GetType(), instr->m_func),
  619. IR::IndirOpnd::New(
  620. isLikelyJsArray ? IR::RegOpnd::New(headSegmentSym, headSegmentSym->GetType(), instr->m_func) : baseOpnd,
  621. isLikelyJsArray
  622. ? Js::SparseArraySegmentBase::GetOffsetOfLength()
  623. : Lowerer::GetArrayOffsetOfLength(baseValueType),
  624. newHeadSegmentLengthSym->GetType(),
  625. instr->m_func),
  626. instr->m_func);
  627. loadHeadSegmentLength->GetDst()->SetIsJITOptimizedReg(true);
  628. loadHeadSegmentLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  629. // We don't check the head segment length for negative (very large uint32) values. For JS arrays, the bound checks
  630. // cover that. For typed arrays, we currently don't allocate array buffers with more than 1 GB elements.
  631. if (hoistHeadSegmentLengthLoadOutOfLoop)
  632. {
  633. Assert(
  634. !(
  635. isLikelyJsArray
  636. ? hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegmentLengths()
  637. : hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  638. ));
  639. TRACE_PHASE_INSTR(
  640. Js::ArraySegmentHoistPhase,
  641. instr,
  642. _u("Hoisting array segment length load out of loop %u to landing pad block %u\n"),
  643. hoistHeadSegmentLengthLoadOutOfLoop->GetLoopNumber(),
  644. hoistHeadSegmentLengthLoadOutOfLoop->landingPad->GetBlockNum());
  645. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment length load out of loop\n"));
  646. InsertInstrInLandingPad(loadHeadSegmentLength, hoistHeadSegmentLengthLoadOutOfLoop);
  647. // Hoist the head segment length value
  648. for (InvariantBlockBackwardIterator it(
  649. globOpt,
  650. globOpt->currentBlock,
  651. hoistHeadSegmentLengthLoadOutOfLoop->landingPad,
  652. baseOpnd->m_sym,
  653. baseValue->GetValueNumber());
  654. it.IsValid();
  655. it.MoveNext())
  656. {
  657. BasicBlock *const block = it.Block();
  658. block->globOptData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  659. Assert(!block->globOptData.FindValue(newHeadSegmentLengthSym));
  660. Value *const headSegmentLengthValueCopy = globOpt->CopyValue(headSegmentLengthValue, headSegmentLengthValue->GetValueNumber());
  661. block->globOptData.SetValue(headSegmentLengthValueCopy, newHeadSegmentLengthSym);
  662. globOpt->SetSymStoreDirect(headSegmentLengthValueCopy->GetValueInfo(), nullptr);
  663. }
  664. }
  665. else
  666. {
  667. loadHeadSegmentLength->SetByteCodeOffset(instr);
  668. insertBeforeInstr->InsertBefore(loadHeadSegmentLength);
  669. instr->loadedArrayHeadSegmentLength = true;
  670. }
  671. }
  672. void GlobOpt::ArraySrcOpt::DoExtractBoundChecks()
  673. {
  674. Assert(!(eliminatedLowerBoundCheck && eliminatedUpperBoundCheck));
  675. Assert(baseOwnerIndir != nullptr || indexOpnd != nullptr);
  676. Assert(indexOpnd == nullptr || indexOpnd->IsIntConstOpnd() || indexOpnd->AsRegOpnd()->m_sym->IsTypeSpec());
  677. Assert(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable);
  678. Assert(canBailOutOnArrayAccessHelperCall);
  679. Assert(!isStore || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || Js::IsSimd128LoadStore(instr->m_opcode));
  680. headSegmentLengthSym = headSegmentLengthIsAvailable ? baseArrayValueInfo->HeadSegmentLengthSym() : newHeadSegmentLengthSym;
  681. Assert(headSegmentLengthSym);
  682. Assert(headSegmentLengthValue);
  683. if (globOpt->DoBoundCheckHoist())
  684. {
  685. if (indexVarSym)
  686. {
  687. TRACE_PHASE_INSTR_VERBOSE(
  688. Js::Phase::BoundCheckHoistPhase,
  689. instr,
  690. _u("Determining array bound check hoistability for index s%u\n"),
  691. indexVarSym->m_id);
  692. }
  693. else
  694. {
  695. TRACE_PHASE_INSTR_VERBOSE(
  696. Js::Phase::BoundCheckHoistPhase,
  697. instr,
  698. _u("Determining array bound check hoistability for index %d\n"),
  699. indexConstantBounds.LowerBound());
  700. }
  701. globOpt->DetermineArrayBoundCheckHoistability(
  702. !eliminatedLowerBoundCheck,
  703. !eliminatedUpperBoundCheck,
  704. lowerBoundCheckHoistInfo,
  705. upperBoundCheckHoistInfo,
  706. isLikelyJsArray,
  707. indexVarSym,
  708. indexValue,
  709. indexConstantBounds,
  710. headSegmentLengthSym,
  711. headSegmentLengthValue,
  712. headSegmentLengthConstantBounds,
  713. hoistHeadSegmentLengthLoadOutOfLoop,
  714. failedToUpdateCompatibleLowerBoundCheck,
  715. failedToUpdateCompatibleUpperBoundCheck);
  716. }
  717. if (!eliminatedLowerBoundCheck)
  718. {
  719. DoLowerBoundCheck();
  720. }
  721. if (!eliminatedUpperBoundCheck)
  722. {
  723. DoUpperBoundCheck();
  724. }
  725. }
  726. void GlobOpt::ArraySrcOpt::DoLowerBoundCheck()
  727. {
  728. eliminatedLowerBoundCheck = true;
  729. Assert(indexVarSym);
  730. Assert(indexOpnd);
  731. Assert(indexValue);
  732. GlobOpt::ArrayLowerBoundCheckHoistInfo &hoistInfo = lowerBoundCheckHoistInfo;
  733. if (hoistInfo.HasAnyInfo())
  734. {
  735. BasicBlock *hoistBlock;
  736. if (hoistInfo.CompatibleBoundCheckBlock())
  737. {
  738. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  739. TRACE_PHASE_INSTR(
  740. Js::Phase::BoundCheckHoistPhase,
  741. instr,
  742. _u("Hoisting array lower bound check into existing bound check instruction in block %u\n"),
  743. hoistBlock->GetBlockNum());
  744. TESTTRACE_PHASE_INSTR(
  745. Js::Phase::BoundCheckHoistPhase,
  746. instr,
  747. _u("Hoisting array lower bound check into existing bound check instruction\n"));
  748. }
  749. else
  750. {
  751. Assert(hoistInfo.Loop());
  752. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  753. hoistBlock = landingPad;
  754. StackSym *indexIntSym;
  755. if (hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  756. {
  757. if (!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  758. {
  759. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize
  760. // it in this block if it is invariant, as the conversion will be hoisted along with value
  761. // updates.
  762. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  763. IR::Instr *specializeBeforeInstr = nullptr;
  764. if (!globOpt->CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  765. globOpt->OptIsInvariant(
  766. hoistInfo.IndexSym(),
  767. globOpt->currentBlock,
  768. hoistInfo.Loop(),
  769. globOpt->CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  770. false,
  771. true))
  772. {
  773. specializationBlock = globOpt->currentBlock;
  774. specializeBeforeInstr = insertBeforeInstr;
  775. }
  776. Assert(globOpt->tempBv->IsEmpty());
  777. globOpt->tempBv->Set(hoistInfo.IndexSym()->m_id);
  778. globOpt->ToInt32(globOpt->tempBv, specializationBlock, false, specializeBeforeInstr);
  779. globOpt->tempBv->ClearAll();
  780. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  781. }
  782. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  783. Assert(indexIntSym);
  784. }
  785. else
  786. {
  787. indexIntSym = hoistInfo.IndexSym();
  788. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  789. }
  790. if (hoistInfo.IndexSym())
  791. {
  792. Assert(hoistInfo.Loop()->bailOutInfo);
  793. globOpt->EnsureBailTarget(hoistInfo.Loop());
  794. bool needsMagnitudeAdjustment = false;
  795. if (hoistInfo.LoopCount())
  796. {
  797. // Generate the loop count and loop count based bound that will be used for the bound check
  798. if (!hoistInfo.LoopCount()->HasBeenGenerated())
  799. {
  800. globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  801. }
  802. needsMagnitudeAdjustment = (hoistInfo.MaxMagnitudeChange() > 0)
  803. ? (hoistInfo.IndexOffset() < hoistInfo.MaxMagnitudeChange())
  804. : (hoistInfo.IndexOffset() > hoistInfo.MaxMagnitudeChange());
  805. globOpt->GenerateSecondaryInductionVariableBound(
  806. hoistInfo.Loop(),
  807. indexVarSym->GetInt32EquivSym(nullptr),
  808. hoistInfo.LoopCount(),
  809. hoistInfo.MaxMagnitudeChange(),
  810. needsMagnitudeAdjustment,
  811. hoistInfo.IndexSym());
  812. }
  813. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  814. IR::Opnd* upperBound = IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func);
  815. int offset = needsMagnitudeAdjustment ? (hoistInfo.IndexOffset() - hoistInfo.Offset()) : hoistInfo.Offset();
  816. upperBound->SetIsJITOptimizedReg(true);
  817. // 0 <= indexSym + offset (src1 <= src2 + dst)
  818. IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr(
  819. lowerBound,
  820. upperBound,
  821. offset,
  822. hoistInfo.IsLoopCountBasedBound()
  823. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  824. : IR::BailOutOnFailedHoistedBoundCheck,
  825. hoistInfo.Loop()->bailOutInfo,
  826. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  827. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  828. TRACE_PHASE_INSTR(
  829. Js::Phase::BoundCheckHoistPhase,
  830. instr,
  831. _u("Hoisting array lower bound check out of loop %u to landing pad block %u, as (0 <= s%u + %d)\n"),
  832. hoistInfo.Loop()->GetLoopNumber(),
  833. landingPad->GetBlockNum(),
  834. hoistInfo.IndexSym()->m_id,
  835. hoistInfo.Offset());
  836. TESTTRACE_PHASE_INSTR(
  837. Js::Phase::BoundCheckHoistPhase,
  838. instr,
  839. _u("Hoisting array lower bound check out of loop\n"));
  840. // Record the bound check instruction as available
  841. const IntBoundCheck boundCheckInfo(
  842. ZeroValueNumber,
  843. hoistInfo.IndexValueNumber(),
  844. boundCheck,
  845. landingPad);
  846. {
  847. const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  848. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  849. }
  850. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr);
  851. it.IsValid();
  852. it.MoveNext())
  853. {
  854. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  855. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  856. }
  857. }
  858. }
  859. // Update values of the syms involved in the bound check to reflect the bound check
  860. if (hoistBlock != globOpt->currentBlock && hoistInfo.IndexSym() && hoistInfo.Offset() != INT32_MIN)
  861. {
  862. for (InvariantBlockBackwardIterator it(
  863. globOpt,
  864. globOpt->currentBlock->next,
  865. hoistBlock,
  866. hoistInfo.IndexSym(),
  867. hoistInfo.IndexValueNumber(),
  868. true);
  869. it.IsValid();
  870. it.MoveNext())
  871. {
  872. Value *const value = it.InvariantSymValue();
  873. IntConstantBounds constantBounds;
  874. AssertVerify(value->GetValueInfo()->TryGetIntConstantBounds(&constantBounds, true));
  875. ValueInfo *const newValueInfo =
  876. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  877. value,
  878. constantBounds,
  879. nullptr,
  880. IntConstantBounds(-hoistInfo.Offset(), -hoistInfo.Offset()),
  881. false);
  882. if (newValueInfo)
  883. {
  884. globOpt->ChangeValueInfo(nullptr, value, newValueInfo);
  885. if (it.Block() == globOpt->currentBlock && value == indexValue)
  886. {
  887. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  888. }
  889. }
  890. }
  891. }
  892. }
  893. else
  894. {
  895. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  896. IR::Opnd* upperBound = indexOpnd;
  897. upperBound->SetIsJITOptimizedReg(true);
  898. const int offset = 0;
  899. IR::Instr *boundCheck;
  900. if (shareableBailOutInfo)
  901. {
  902. ShareBailOut();
  903. boundCheck = globOpt->CreateBoundsCheckInstr(
  904. lowerBound,
  905. upperBound,
  906. offset,
  907. IR::BailOutOnArrayAccessHelperCall,
  908. shareableBailOutInfo,
  909. shareableBailOutInfo->bailOutFunc);
  910. }
  911. else
  912. {
  913. boundCheck = globOpt->CreateBoundsCheckInstr(
  914. lowerBound,
  915. upperBound,
  916. offset,
  917. instr->m_func);
  918. }
  919. boundCheck->SetByteCodeOffset(instr);
  920. insertBeforeInstr->InsertBefore(boundCheck);
  921. if (!shareableBailOutInfo)
  922. {
  923. globOpt->GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  924. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  925. shareableBailOutInfoOriginalOwner = boundCheck;
  926. }
  927. TRACE_PHASE_INSTR(
  928. Js::Phase::BoundCheckEliminationPhase,
  929. instr,
  930. _u("Separating array lower bound check, as (0 <= s%u)\n"),
  931. indexVarSym->m_id);
  932. TESTTRACE_PHASE_INSTR(
  933. Js::Phase::BoundCheckEliminationPhase,
  934. instr,
  935. _u("Separating array lower bound check\n"));
  936. if (globOpt->DoBoundCheckHoist())
  937. {
  938. // Record the bound check instruction as available
  939. const bool added =
  940. globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(
  941. IntBoundCheck(ZeroValueNumber, indexValue->GetValueNumber(), boundCheck, globOpt->currentBlock)) >= 0;
  942. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  943. }
  944. }
  945. // Update the index value to reflect the bound check
  946. ValueInfo *const newValueInfo =
  947. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  948. indexValue,
  949. indexConstantBounds,
  950. nullptr,
  951. IntConstantBounds(0, 0),
  952. false);
  953. if (newValueInfo)
  954. {
  955. globOpt->ChangeValueInfo(nullptr, indexValue, newValueInfo);
  956. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  957. }
  958. }
  959. void GlobOpt::ArraySrcOpt::DoUpperBoundCheck()
  960. {
  961. eliminatedUpperBoundCheck = true;
  962. GlobOpt::ArrayUpperBoundCheckHoistInfo &hoistInfo = upperBoundCheckHoistInfo;
  963. if (hoistInfo.HasAnyInfo())
  964. {
  965. BasicBlock *hoistBlock;
  966. if (hoistInfo.CompatibleBoundCheckBlock())
  967. {
  968. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  969. TRACE_PHASE_INSTR(
  970. Js::Phase::BoundCheckHoistPhase,
  971. instr,
  972. _u("Hoisting array upper bound check into existing bound check instruction in block %u\n"),
  973. hoistBlock->GetBlockNum());
  974. TESTTRACE_PHASE_INSTR(
  975. Js::Phase::BoundCheckHoistPhase,
  976. instr,
  977. _u("Hoisting array upper bound check into existing bound check instruction\n"));
  978. }
  979. else
  980. {
  981. Assert(hoistInfo.Loop());
  982. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  983. hoistBlock = landingPad;
  984. StackSym *indexIntSym;
  985. if (hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  986. {
  987. if (!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  988. {
  989. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize it
  990. // in this block if it is invariant, as the conversion will be hoisted along with value updates.
  991. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  992. IR::Instr *specializeBeforeInstr = nullptr;
  993. if (!globOpt->CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  994. globOpt->OptIsInvariant(
  995. hoistInfo.IndexSym(),
  996. globOpt->currentBlock,
  997. hoistInfo.Loop(),
  998. globOpt->CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  999. false,
  1000. true))
  1001. {
  1002. specializationBlock = globOpt->currentBlock;
  1003. specializeBeforeInstr = insertBeforeInstr;
  1004. }
  1005. Assert(globOpt->tempBv->IsEmpty());
  1006. globOpt->tempBv->Set(hoistInfo.IndexSym()->m_id);
  1007. globOpt->ToInt32(globOpt->tempBv, specializationBlock, false, specializeBeforeInstr);
  1008. globOpt->tempBv->ClearAll();
  1009. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  1010. }
  1011. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  1012. Assert(indexIntSym);
  1013. }
  1014. else
  1015. {
  1016. indexIntSym = hoistInfo.IndexSym();
  1017. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  1018. }
  1019. Assert(hoistInfo.Loop()->bailOutInfo);
  1020. globOpt->EnsureBailTarget(hoistInfo.Loop());
  1021. bool needsMagnitudeAdjustment = false;
  1022. if (hoistInfo.LoopCount())
  1023. {
  1024. // Generate the loop count and loop count based bound that will be used for the bound check
  1025. if (!hoistInfo.LoopCount()->HasBeenGenerated())
  1026. {
  1027. globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  1028. }
  1029. needsMagnitudeAdjustment = (hoistInfo.MaxMagnitudeChange() > 0)
  1030. ? (hoistInfo.IndexOffset() < hoistInfo.MaxMagnitudeChange())
  1031. : (hoistInfo.IndexOffset() > hoistInfo.MaxMagnitudeChange());
  1032. globOpt->GenerateSecondaryInductionVariableBound(
  1033. hoistInfo.Loop(),
  1034. indexVarSym->GetInt32EquivSym(nullptr),
  1035. hoistInfo.LoopCount(),
  1036. hoistInfo.MaxMagnitudeChange(),
  1037. needsMagnitudeAdjustment,
  1038. hoistInfo.IndexSym());
  1039. }
  1040. IR::Opnd* lowerBound = indexIntSym
  1041. ? static_cast<IR::Opnd *>(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func))
  1042. : IR::IntConstOpnd::New(
  1043. hoistInfo.IndexConstantBounds().LowerBound(),
  1044. TyInt32,
  1045. instr->m_func);
  1046. lowerBound->SetIsJITOptimizedReg(true);
  1047. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  1048. upperBound->SetIsJITOptimizedReg(true);
  1049. int offset = needsMagnitudeAdjustment ? (hoistInfo.IndexOffset() + hoistInfo.Offset()) : hoistInfo.Offset();
  1050. // indexSym <= headSegmentLength + offset (src1 <= src2 + dst)
  1051. IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr(
  1052. lowerBound,
  1053. upperBound,
  1054. offset,
  1055. hoistInfo.IsLoopCountBasedBound()
  1056. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  1057. : IR::BailOutOnFailedHoistedBoundCheck,
  1058. hoistInfo.Loop()->bailOutInfo,
  1059. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  1060. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  1061. if (indexIntSym)
  1062. {
  1063. TRACE_PHASE_INSTR(
  1064. Js::Phase::BoundCheckHoistPhase,
  1065. instr,
  1066. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"),
  1067. hoistInfo.Loop()->GetLoopNumber(),
  1068. landingPad->GetBlockNum(),
  1069. hoistInfo.IndexSym()->m_id,
  1070. headSegmentLengthSym->m_id,
  1071. offset);
  1072. }
  1073. else
  1074. {
  1075. TRACE_PHASE_INSTR(
  1076. Js::Phase::BoundCheckHoistPhase,
  1077. instr,
  1078. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"),
  1079. hoistInfo.Loop()->GetLoopNumber(),
  1080. landingPad->GetBlockNum(),
  1081. hoistInfo.IndexConstantBounds().LowerBound(),
  1082. headSegmentLengthSym->m_id,
  1083. offset);
  1084. }
  1085. TESTTRACE_PHASE_INSTR(
  1086. Js::Phase::BoundCheckHoistPhase,
  1087. instr,
  1088. _u("Hoisting array upper bound check out of loop\n"));
  1089. // Record the bound check instruction as available
  1090. const IntBoundCheck boundCheckInfo(
  1091. hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber,
  1092. hoistInfo.HeadSegmentLengthValue()->GetValueNumber(),
  1093. boundCheck,
  1094. landingPad);
  1095. {
  1096. const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  1097. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1098. }
  1099. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr);
  1100. it.IsValid();
  1101. it.MoveNext())
  1102. {
  1103. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  1104. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1105. }
  1106. }
  1107. // Update values of the syms involved in the bound check to reflect the bound check
  1108. Assert(!hoistInfo.Loop() || hoistBlock != globOpt->currentBlock);
  1109. if (hoistBlock != globOpt->currentBlock)
  1110. {
  1111. for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock->next, hoistBlock, nullptr, InvalidValueNumber, true);
  1112. it.IsValid();
  1113. it.MoveNext())
  1114. {
  1115. BasicBlock *const block = it.Block();
  1116. Value *leftValue;
  1117. IntConstantBounds leftConstantBounds;
  1118. if (hoistInfo.IndexSym())
  1119. {
  1120. leftValue = block->globOptData.FindValue(hoistInfo.IndexSym());
  1121. if (!leftValue || leftValue->GetValueNumber() != hoistInfo.IndexValueNumber())
  1122. {
  1123. continue;
  1124. }
  1125. AssertVerify(leftValue->GetValueInfo()->TryGetIntConstantBounds(&leftConstantBounds, true));
  1126. }
  1127. else
  1128. {
  1129. leftValue = nullptr;
  1130. leftConstantBounds = hoistInfo.IndexConstantBounds();
  1131. }
  1132. Value *const rightValue = block->globOptData.FindValue(headSegmentLengthSym);
  1133. if (!rightValue)
  1134. {
  1135. continue;
  1136. }
  1137. Assert(rightValue->GetValueNumber() == headSegmentLengthValue->GetValueNumber());
  1138. IntConstantBounds rightConstantBounds;
  1139. AssertVerify(rightValue->GetValueInfo()->TryGetIntConstantBounds(&rightConstantBounds));
  1140. ValueInfo *const newValueInfoForLessThanOrEqual =
  1141. globOpt->UpdateIntBoundsForLessThanOrEqual(
  1142. leftValue,
  1143. leftConstantBounds,
  1144. rightValue,
  1145. rightConstantBounds,
  1146. hoistInfo.Offset(),
  1147. false);
  1148. if (newValueInfoForLessThanOrEqual)
  1149. {
  1150. globOpt->ChangeValueInfo(nullptr, leftValue, newValueInfoForLessThanOrEqual);
  1151. AssertVerify(newValueInfoForLessThanOrEqual->TryGetIntConstantBounds(&leftConstantBounds, true));
  1152. if (block == globOpt->currentBlock && leftValue == indexValue)
  1153. {
  1154. Assert(newValueInfoForLessThanOrEqual->IsInt());
  1155. indexConstantBounds = leftConstantBounds;
  1156. }
  1157. }
  1158. if (hoistInfo.Offset() != INT32_MIN)
  1159. {
  1160. ValueInfo *const newValueInfoForGreaterThanOrEqual =
  1161. globOpt->UpdateIntBoundsForGreaterThanOrEqual(
  1162. rightValue,
  1163. rightConstantBounds,
  1164. leftValue,
  1165. leftConstantBounds,
  1166. -hoistInfo.Offset(),
  1167. false);
  1168. if (newValueInfoForGreaterThanOrEqual)
  1169. {
  1170. globOpt->ChangeValueInfo(nullptr, rightValue, newValueInfoForGreaterThanOrEqual);
  1171. if (block == globOpt->currentBlock)
  1172. {
  1173. Assert(rightValue == headSegmentLengthValue);
  1174. AssertVerify(newValueInfoForGreaterThanOrEqual->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  1175. }
  1176. }
  1177. }
  1178. }
  1179. }
  1180. }
  1181. else
  1182. {
  1183. IR::Opnd * lowerBound = indexOpnd ? indexOpnd : IR::IntConstOpnd::New(baseOwnerIndir->GetOffset(), TyInt32, instr->m_func);
  1184. lowerBound->SetIsJITOptimizedReg(true);
  1185. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  1186. upperBound->SetIsJITOptimizedReg(true);
  1187. const int offset = -1;
  1188. IR::Instr *boundCheck;
  1189. // index <= headSegmentLength - 1 (src1 <= src2 + dst)
  1190. if (shareableBailOutInfo)
  1191. {
  1192. ShareBailOut();
  1193. boundCheck = globOpt->CreateBoundsCheckInstr(
  1194. lowerBound,
  1195. upperBound,
  1196. offset,
  1197. IR::BailOutOnArrayAccessHelperCall,
  1198. shareableBailOutInfo,
  1199. shareableBailOutInfo->bailOutFunc);
  1200. }
  1201. else
  1202. {
  1203. boundCheck = globOpt->CreateBoundsCheckInstr(
  1204. lowerBound,
  1205. upperBound,
  1206. offset,
  1207. instr->m_func);
  1208. }
  1209. boundCheck->SetByteCodeOffset(instr);
  1210. insertBeforeInstr->InsertBefore(boundCheck);
  1211. if (!shareableBailOutInfo)
  1212. {
  1213. globOpt->GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  1214. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  1215. shareableBailOutInfoOriginalOwner = boundCheck;
  1216. }
  1217. instr->extractedUpperBoundCheckWithoutHoisting = true;
  1218. if (indexOpnd != nullptr && indexOpnd->IsRegOpnd())
  1219. {
  1220. TRACE_PHASE_INSTR(
  1221. Js::Phase::BoundCheckEliminationPhase,
  1222. instr,
  1223. _u("Separating array upper bound check, as (s%u < s%u)\n"),
  1224. indexVarSym->m_id,
  1225. headSegmentLengthSym->m_id);
  1226. }
  1227. else
  1228. {
  1229. TRACE_PHASE_INSTR(
  1230. Js::Phase::BoundCheckEliminationPhase,
  1231. instr,
  1232. _u("Separating array upper bound check, as (%d < s%u)\n"),
  1233. indexOpnd ? indexOpnd->AsIntConstOpnd()->AsInt32() : baseOwnerIndir->GetOffset(),
  1234. headSegmentLengthSym->m_id);
  1235. }
  1236. TESTTRACE_PHASE_INSTR(
  1237. Js::Phase::BoundCheckEliminationPhase,
  1238. instr,
  1239. _u("Separating array upper bound check\n"));
  1240. if (globOpt->DoBoundCheckHoist())
  1241. {
  1242. // Record the bound check instruction as available
  1243. const bool added =
  1244. globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(
  1245. IntBoundCheck(
  1246. indexValue ? indexValue->GetValueNumber() : ZeroValueNumber,
  1247. headSegmentLengthValue->GetValueNumber(),
  1248. boundCheck,
  1249. globOpt->currentBlock)) >= 0;
  1250. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  1251. }
  1252. }
  1253. // Update the index and head segment length values to reflect the bound check
  1254. ValueInfo *newValueInfo =
  1255. globOpt->UpdateIntBoundsForLessThan(
  1256. indexValue,
  1257. indexConstantBounds,
  1258. headSegmentLengthValue,
  1259. headSegmentLengthConstantBounds,
  1260. false);
  1261. if (newValueInfo)
  1262. {
  1263. globOpt->ChangeValueInfo(nullptr, indexValue, newValueInfo);
  1264. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  1265. }
  1266. newValueInfo =
  1267. globOpt->UpdateIntBoundsForGreaterThan(
  1268. headSegmentLengthValue,
  1269. headSegmentLengthConstantBounds,
  1270. indexValue,
  1271. indexConstantBounds,
  1272. false);
  1273. if (newValueInfo)
  1274. {
  1275. globOpt->ChangeValueInfo(nullptr, headSegmentLengthValue, newValueInfo);
  1276. }
  1277. }
  1278. void GlobOpt::ArraySrcOpt::UpdateHoistedValueInfo()
  1279. {
  1280. // Iterate up to the root loop's landing pad until all necessary value info is updated
  1281. uint hoistItemCount =
  1282. static_cast<uint>(!!hoistChecksOutOfLoop) +
  1283. !!hoistHeadSegmentLoadOutOfLoop +
  1284. !!hoistHeadSegmentLengthLoadOutOfLoop +
  1285. !!hoistLengthLoadOutOfLoop;
  1286. if (hoistItemCount == 0)
  1287. {
  1288. return;
  1289. }
  1290. AnalysisAssert(globOpt->currentBlock->loop != nullptr);
  1291. Loop * rootLoop = nullptr;
  1292. for (Loop *loop = globOpt->currentBlock->loop; loop; loop = loop->parent)
  1293. {
  1294. rootLoop = loop;
  1295. }
  1296. AnalysisAssert(rootLoop != nullptr);
  1297. ValueInfo *valueInfoToHoist = baseValueInfo;
  1298. bool removeHeadSegment, removeHeadSegmentLength, removeLength;
  1299. if (baseArrayValueInfo)
  1300. {
  1301. removeHeadSegment = baseArrayValueInfo->HeadSegmentSym() && !hoistHeadSegmentLoadOutOfLoop;
  1302. removeHeadSegmentLength = baseArrayValueInfo->HeadSegmentLengthSym() && !hoistHeadSegmentLengthLoadOutOfLoop;
  1303. removeLength = baseArrayValueInfo->LengthSym() && !hoistLengthLoadOutOfLoop;
  1304. }
  1305. else
  1306. {
  1307. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  1308. }
  1309. for (InvariantBlockBackwardIterator it(
  1310. globOpt,
  1311. globOpt->currentBlock,
  1312. rootLoop->landingPad,
  1313. baseOpnd->m_sym,
  1314. baseValue->GetValueNumber());
  1315. it.IsValid();
  1316. it.MoveNext())
  1317. {
  1318. if (removeHeadSegment || removeHeadSegmentLength || removeLength)
  1319. {
  1320. // Remove information that shouldn't be there anymore, from the value info
  1321. valueInfoToHoist =
  1322. valueInfoToHoist->AsArrayValueInfo()->Copy(
  1323. globOpt->alloc,
  1324. !removeHeadSegment,
  1325. !removeHeadSegmentLength,
  1326. !removeLength);
  1327. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  1328. }
  1329. BasicBlock *const block = it.Block();
  1330. Value *const blockBaseValue = it.InvariantSymValue();
  1331. globOpt->HoistInvariantValueInfo(valueInfoToHoist, blockBaseValue, block);
  1332. // See if we have completed hoisting value info for one of the items
  1333. if (hoistChecksOutOfLoop && block == hoistChecksOutOfLoop->landingPad)
  1334. {
  1335. // All other items depend on array checks, so we can just stop here
  1336. hoistChecksOutOfLoop = nullptr;
  1337. break;
  1338. }
  1339. if (hoistHeadSegmentLoadOutOfLoop && block == hoistHeadSegmentLoadOutOfLoop->landingPad)
  1340. {
  1341. hoistHeadSegmentLoadOutOfLoop = nullptr;
  1342. if (--hoistItemCount == 0)
  1343. {
  1344. break;
  1345. }
  1346. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentSym())
  1347. {
  1348. removeHeadSegment = true;
  1349. }
  1350. }
  1351. if (hoistHeadSegmentLengthLoadOutOfLoop && block == hoistHeadSegmentLengthLoadOutOfLoop->landingPad)
  1352. {
  1353. hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  1354. if (--hoistItemCount == 0)
  1355. {
  1356. break;
  1357. }
  1358. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentLengthSym())
  1359. {
  1360. removeHeadSegmentLength = true;
  1361. }
  1362. }
  1363. if (hoistLengthLoadOutOfLoop && block == hoistLengthLoadOutOfLoop->landingPad)
  1364. {
  1365. hoistLengthLoadOutOfLoop = nullptr;
  1366. if (--hoistItemCount == 0)
  1367. {
  1368. break;
  1369. }
  1370. if (valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->LengthSym())
  1371. {
  1372. removeLength = true;
  1373. }
  1374. }
  1375. }
  1376. }
  1377. void GlobOpt::ArraySrcOpt::InsertInstrInLandingPad(IR::Instr *const instr, Loop *const hoistOutOfLoop)
  1378. {
  1379. if (hoistOutOfLoop->bailOutInfo->bailOutInstr)
  1380. {
  1381. instr->SetByteCodeOffset(hoistOutOfLoop->bailOutInfo->bailOutInstr);
  1382. hoistOutOfLoop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  1383. }
  1384. else
  1385. {
  1386. instr->SetByteCodeOffset(hoistOutOfLoop->landingPad->GetLastInstr());
  1387. hoistOutOfLoop->landingPad->InsertAfter(instr);
  1388. }
  1389. };
  1390. void GlobOpt::ArraySrcOpt::ShareBailOut()
  1391. {
  1392. Assert(shareableBailOutInfo);
  1393. if (shareableBailOutInfo->bailOutInstr != shareableBailOutInfoOriginalOwner)
  1394. {
  1395. return;
  1396. }
  1397. Assert(shareableBailOutInfoOriginalOwner->GetBailOutInfo() == shareableBailOutInfo);
  1398. IR::Instr *const sharedBailOut = shareableBailOutInfoOriginalOwner->ShareBailOut();
  1399. Assert(sharedBailOut->GetBailOutInfo() == shareableBailOutInfo);
  1400. shareableBailOutInfoOriginalOwner = nullptr;
  1401. sharedBailOut->Unlink();
  1402. insertBeforeInstr->InsertBefore(sharedBailOut);
  1403. insertBeforeInstr = sharedBailOut;
  1404. }
  1405. void GlobOpt::ArraySrcOpt::InsertHeadSegmentLoad()
  1406. {
  1407. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment load\n"));
  1408. Assert(newHeadSegmentSym);
  1409. IR::RegOpnd *const headSegmentOpnd = IR::RegOpnd::New(newHeadSegmentSym, newHeadSegmentSym->GetType(), instr->m_func);
  1410. headSegmentOpnd->SetIsJITOptimizedReg(true);
  1411. IR::RegOpnd *const jitOptimizedBaseOpnd = baseOpnd->Copy(instr->m_func)->AsRegOpnd();
  1412. jitOptimizedBaseOpnd->SetIsJITOptimizedReg(true);
  1413. IR::Instr *loadObjectArray;
  1414. if (baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  1415. {
  1416. loadObjectArray =
  1417. IR::Instr::New(
  1418. Js::OpCode::LdIndir,
  1419. headSegmentOpnd,
  1420. IR::IndirOpnd::New(
  1421. jitOptimizedBaseOpnd,
  1422. Js::DynamicObject::GetOffsetOfObjectArray(),
  1423. jitOptimizedBaseOpnd->GetType(),
  1424. instr->m_func),
  1425. instr->m_func);
  1426. }
  1427. else
  1428. {
  1429. loadObjectArray = nullptr;
  1430. }
  1431. IR::Instr *const loadHeadSegment =
  1432. IR::Instr::New(
  1433. Js::OpCode::LdIndir,
  1434. headSegmentOpnd,
  1435. IR::IndirOpnd::New(
  1436. loadObjectArray ? headSegmentOpnd : jitOptimizedBaseOpnd,
  1437. Lowerer::GetArrayOffsetOfHeadSegment(baseValueType),
  1438. headSegmentOpnd->GetType(),
  1439. instr->m_func),
  1440. instr->m_func);
  1441. if (hoistHeadSegmentLoadOutOfLoop)
  1442. {
  1443. Assert(!(isLikelyJsArray && hoistHeadSegmentLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegments()));
  1444. TRACE_PHASE_INSTR(
  1445. Js::ArraySegmentHoistPhase,
  1446. instr,
  1447. _u("Hoisting array segment load out of loop %u to landing pad block %u\n"),
  1448. hoistHeadSegmentLoadOutOfLoop->GetLoopNumber(),
  1449. hoistHeadSegmentLoadOutOfLoop->landingPad->GetBlockNum());
  1450. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment load out of loop\n"));
  1451. if (loadObjectArray)
  1452. {
  1453. InsertInstrInLandingPad(loadObjectArray, hoistHeadSegmentLoadOutOfLoop);
  1454. }
  1455. InsertInstrInLandingPad(loadHeadSegment, hoistHeadSegmentLoadOutOfLoop);
  1456. }
  1457. else
  1458. {
  1459. if (loadObjectArray)
  1460. {
  1461. loadObjectArray->SetByteCodeOffset(instr);
  1462. insertBeforeInstr->InsertBefore(loadObjectArray);
  1463. }
  1464. loadHeadSegment->SetByteCodeOffset(instr);
  1465. insertBeforeInstr->InsertBefore(loadHeadSegment);
  1466. instr->loadedArrayHeadSegment = true;
  1467. }
  1468. }
  1469. void GlobOpt::ArraySrcOpt::Optimize()
  1470. {
  1471. if (!CheckOpCode())
  1472. {
  1473. return;
  1474. }
  1475. Assert(!(baseOwnerInstr && baseOwnerIndir));
  1476. Assert(!needsHeadSegmentLength || needsHeadSegment);
  1477. TypeSpecIndex();
  1478. if (isProfilableStElem && !globOpt->IsLoopPrePass())
  1479. {
  1480. // If the dead-store pass decides to add the bailout kind IR::BailOutInvalidatedArrayHeadSegment, and the fast path is
  1481. // generated, it may bail out before the operation is done, so this would need to be a pre-op bailout.
  1482. if (instr->HasBailOutInfo())
  1483. {
  1484. Assert(instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset && instr->GetBailOutInfo()->bailOutOffset <= instr->GetByteCodeOffset());
  1485. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1486. Assert(!(bailOutKind & ~IR::BailOutKindBits) || (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  1487. if (!(bailOutKind & ~IR::BailOutKindBits))
  1488. {
  1489. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  1490. }
  1491. }
  1492. else
  1493. {
  1494. globOpt->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  1495. }
  1496. }
  1497. baseValue = globOpt->CurrentBlockData()->FindValue(baseOpnd->m_sym);
  1498. if (baseValue == nullptr)
  1499. {
  1500. return;
  1501. }
  1502. baseValueInfo = baseValue->GetValueInfo();
  1503. baseValueType = baseValueInfo->Type();
  1504. baseOpnd->SetValueType(baseValueType);
  1505. if (!baseValueType.IsLikelyAnyOptimizedArray() ||
  1506. !globOpt->DoArrayCheckHoist(baseValueType, globOpt->currentBlock->loop, instr) ||
  1507. (baseOwnerIndir && !globOpt->ShouldExpectConventionalArrayIndexValue(baseOwnerIndir)))
  1508. {
  1509. return;
  1510. }
  1511. isLikelyJsArray = !baseValueType.IsLikelyTypedArray();
  1512. Assert(isLikelyJsArray == baseValueType.IsLikelyArrayOrObjectWithArray());
  1513. Assert(!isLikelyJsArray == baseValueType.IsLikelyOptimizedTypedArray());
  1514. if (!isLikelyJsArray && instr->m_opcode == Js::OpCode::LdMethodElem)
  1515. {
  1516. // Fast path is not generated in this case since the subsequent call will throw
  1517. return;
  1518. }
  1519. isLikelyVirtualTypedArray = baseValueType.IsLikelyOptimizedVirtualTypedArray();
  1520. Assert(!(isLikelyJsArray && isLikelyVirtualTypedArray));
  1521. newBaseValueType = baseValueType.ToDefiniteObject();
  1522. if (isLikelyJsArray && newBaseValueType.HasNoMissingValues() && !globOpt->DoArrayMissingValueCheckHoist())
  1523. {
  1524. newBaseValueType = newBaseValueType.SetHasNoMissingValues(false);
  1525. }
  1526. Assert((newBaseValueType == baseValueType) == baseValueType.IsObject());
  1527. if (globOpt->IsLoopPrePass())
  1528. {
  1529. if (newBaseValueType != baseValueType)
  1530. {
  1531. if (globOpt->IsSafeToTransferInPrePass(baseOpnd, baseValue))
  1532. {
  1533. UpdateValue(nullptr, nullptr, nullptr);
  1534. }
  1535. else if (isLikelyJsArray && globOpt->IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr) && baseValueInfo->HasNoMissingValues())
  1536. {
  1537. globOpt->ChangeValueType(nullptr, baseValue, baseValueInfo->Type().SetHasNoMissingValues(false), true);
  1538. }
  1539. }
  1540. // For javascript arrays and objects with javascript arrays:
  1541. // - Implicit calls need to be disabled and calls cannot be allowed in the loop since the array vtable may be changed
  1542. // into an ES5 array.
  1543. // For typed arrays:
  1544. // - A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the
  1545. // typed array's length is set to zero. Implicit calls need to be disabled if the typed array's head segment length
  1546. // is going to be loaded and used later.
  1547. // Since we don't know if the loop has kills after this instruction, the kill information may not be complete. If a kill
  1548. // is found later, this information will be updated to not require disabling implicit calls.
  1549. const bool kills = isLikelyJsArray ? globOpt->rootLoopPrePass->jsArrayKills.KillsValueType(newBaseValueType) : globOpt->rootLoopPrePass->jsArrayKills.KillsTypedArrayHeadSegmentLengths();
  1550. if (!kills)
  1551. {
  1552. globOpt->rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  1553. }
  1554. return;
  1555. }
  1556. if (baseValueInfo->IsArrayValueInfo())
  1557. {
  1558. baseArrayValueInfo = baseValueInfo->AsArrayValueInfo();
  1559. }
  1560. doArrayChecks = !baseValueType.IsObject();
  1561. doArraySegmentHoist =
  1562. globOpt->DoArraySegmentHoist(baseValueType) &&
  1563. instr->m_opcode != Js::OpCode::StElemC;
  1564. headSegmentIsAvailable =
  1565. baseArrayValueInfo &&
  1566. baseArrayValueInfo->HeadSegmentSym();
  1567. doHeadSegmentLoad =
  1568. doArraySegmentHoist &&
  1569. needsHeadSegment && !headSegmentIsAvailable;
  1570. doArraySegmentLengthHoist =
  1571. doArraySegmentHoist &&
  1572. (isLikelyJsArray || globOpt->DoTypedArraySegmentLengthHoist(globOpt->currentBlock->loop));
  1573. headSegmentLengthIsAvailable =
  1574. baseArrayValueInfo &&
  1575. baseArrayValueInfo->HeadSegmentLengthSym();
  1576. doHeadSegmentLengthLoad =
  1577. doArraySegmentLengthHoist &&
  1578. (needsHeadSegmentLength || (!isLikelyJsArray && needsLength)) &&
  1579. !headSegmentLengthIsAvailable;
  1580. lengthIsAvailable =
  1581. baseArrayValueInfo &&
  1582. baseArrayValueInfo->LengthSym();
  1583. doLengthLoad =
  1584. globOpt->DoArrayLengthHoist() &&
  1585. needsLength &&
  1586. !lengthIsAvailable &&
  1587. baseValueType.IsLikelyArray() &&
  1588. globOpt->DoLdLenIntSpec(instr->m_opcode == Js::OpCode::LdLen_A ? instr : nullptr, baseValueType);
  1589. newHeadSegmentSym = doHeadSegmentLoad ? StackSym::New(TyMachPtr, instr->m_func) : nullptr;
  1590. newHeadSegmentLengthSym = doHeadSegmentLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  1591. newLengthSym = doLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  1592. if (Js::IsSimd128LoadStore(instr->m_opcode) || instr->m_opcode == Js::OpCode::IsIn)
  1593. {
  1594. // SIMD_JS
  1595. // simd load/store never call helper
  1596. canBailOutOnArrayAccessHelperCall = true;
  1597. }
  1598. else
  1599. {
  1600. canBailOutOnArrayAccessHelperCall =
  1601. (isProfilableLdElem || isProfilableStElem) &&
  1602. globOpt->DoEliminateArrayAccessHelperCall() &&
  1603. !(
  1604. instr->IsProfiledInstr() &&
  1605. (
  1606. isProfilableLdElem
  1607. ? instr->AsProfiledInstr()->u.ldElemInfo->LikelyNeedsHelperCall()
  1608. : instr->AsProfiledInstr()->u.stElemInfo->LikelyNeedsHelperCall()
  1609. )
  1610. );
  1611. }
  1612. CheckVirtualArrayBounds();
  1613. if (needsBoundChecks && globOpt->DoBoundCheckElimination())
  1614. {
  1615. TryEliminiteBoundsCheck();
  1616. }
  1617. if (doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad || doExtractBoundChecks)
  1618. {
  1619. CheckLoops();
  1620. insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  1621. if (doArrayChecks)
  1622. {
  1623. DoArrayChecks();
  1624. }
  1625. if (doLengthLoad)
  1626. {
  1627. DoLengthLoad();
  1628. }
  1629. if (doHeadSegmentLoad && isLikelyJsArray)
  1630. {
  1631. // For javascript arrays, the head segment is required to load the head segment length
  1632. InsertHeadSegmentLoad();
  1633. }
  1634. if (doHeadSegmentLengthLoad)
  1635. {
  1636. DoHeadSegmentLengthLoad();
  1637. }
  1638. if (doExtractBoundChecks)
  1639. {
  1640. DoExtractBoundChecks();
  1641. }
  1642. if (doHeadSegmentLoad && !isLikelyJsArray)
  1643. {
  1644. // For typed arrays, load the length first, followed by the bound checks, and then load the head segment. This
  1645. // allows the length sym to become dead by the time of the head segment load, freeing up the register for use by the
  1646. // head segment sym.
  1647. InsertHeadSegmentLoad();
  1648. }
  1649. if (doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  1650. {
  1651. UpdateValue(newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  1652. baseValueInfo = baseValue->GetValueInfo();
  1653. baseArrayValueInfo = baseValueInfo->IsArrayValueInfo() ? baseValueInfo->AsArrayValueInfo() : nullptr;
  1654. UpdateHoistedValueInfo();
  1655. }
  1656. }
  1657. IR::ArrayRegOpnd * baseArrayOpnd;
  1658. if (baseArrayValueInfo != nullptr)
  1659. {
  1660. // Update the opnd to include the associated syms
  1661. baseArrayOpnd =
  1662. baseArrayValueInfo->CreateOpnd(
  1663. baseOpnd,
  1664. needsHeadSegment,
  1665. needsHeadSegmentLength || (!isLikelyJsArray && needsLength),
  1666. needsLength,
  1667. eliminatedLowerBoundCheck,
  1668. eliminatedUpperBoundCheck,
  1669. instr->m_func);
  1670. if (baseOwnerInstr != nullptr)
  1671. {
  1672. if (baseOwnerInstr->GetSrc1() == baseOpnd)
  1673. {
  1674. baseOwnerInstr->ReplaceSrc1(baseArrayOpnd);
  1675. }
  1676. else
  1677. {
  1678. Assert(baseOwnerInstr->GetSrc2() == baseOpnd);
  1679. baseOwnerInstr->ReplaceSrc2(baseArrayOpnd);
  1680. }
  1681. }
  1682. else
  1683. {
  1684. Assert(baseOwnerIndir);
  1685. Assert(baseOwnerIndir->GetBaseOpnd() == baseOpnd);
  1686. baseOwnerIndir->ReplaceBaseOpnd(baseArrayOpnd);
  1687. }
  1688. baseOpnd = baseArrayOpnd;
  1689. }
  1690. else
  1691. {
  1692. baseArrayOpnd = nullptr;
  1693. }
  1694. globOpt->ProcessNoImplicitCallArrayUses(baseOpnd, baseArrayOpnd, instr, isLikelyJsArray, isLoad || isStore || instr->m_opcode == Js::OpCode::IsIn);
  1695. const auto OnEliminated = [&](const Js::Phase phase, const char *const eliminatedLoad)
  1696. {
  1697. TRACE_TESTTRACE_PHASE_INSTR(phase, instr, _u("Eliminating array %S\n"), eliminatedLoad);
  1698. };
  1699. OnEliminated(Js::Phase::ArrayCheckHoistPhase, "checks");
  1700. if (baseArrayOpnd)
  1701. {
  1702. if (baseArrayOpnd->HeadSegmentSym())
  1703. {
  1704. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment load");
  1705. }
  1706. if (baseArrayOpnd->HeadSegmentLengthSym())
  1707. {
  1708. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment length load");
  1709. }
  1710. if (baseArrayOpnd->LengthSym())
  1711. {
  1712. OnEliminated(Js::Phase::ArrayLengthHoistPhase, "length load");
  1713. }
  1714. if (baseArrayOpnd->EliminatedLowerBoundCheck())
  1715. {
  1716. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "lower bound check");
  1717. }
  1718. if (baseArrayOpnd->EliminatedUpperBoundCheck())
  1719. {
  1720. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "upper bound check");
  1721. }
  1722. }
  1723. if (instr->m_opcode == Js::OpCode::IsIn)
  1724. {
  1725. if (eliminatedLowerBoundCheck && eliminatedUpperBoundCheck)
  1726. {
  1727. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::BoundCheckEliminationPhase, instr, _u("Eliminating IsIn\n"));
  1728. globOpt->CaptureByteCodeSymUses(instr);
  1729. instr->m_opcode = Js::OpCode::Ld_A;
  1730. IR::AddrOpnd * addrOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
  1731. addrOpnd->SetValueType(ValueType::Boolean);
  1732. instr->ReplaceSrc1(addrOpnd);
  1733. instr->FreeSrc2();
  1734. originalIndexOpnd->Free(func);
  1735. originalIndexOpnd = nullptr;
  1736. src1Val = globOpt->GetVarConstantValue(instr->GetSrc1()->AsAddrOpnd());
  1737. src2Val = nullptr;
  1738. }
  1739. return;
  1740. }
  1741. if (!canBailOutOnArrayAccessHelperCall)
  1742. {
  1743. return;
  1744. }
  1745. // Bail out instead of generating a helper call. This helps to remove the array reference when the head segment and head
  1746. // segment length are available, reduces code size, and allows bound checks to be separated.
  1747. if (instr->HasBailOutInfo())
  1748. {
  1749. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  1750. Assert(
  1751. !(bailOutKind & ~IR::BailOutKindBits) ||
  1752. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  1753. instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
  1754. }
  1755. else
  1756. {
  1757. globOpt->GenerateBailAtOperation(&instr, IR::BailOutOnArrayAccessHelperCall);
  1758. }
  1759. }