FlowGraph.cpp 105 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. FlowGraph *
  7. FlowGraph::New(Func * func, JitArenaAllocator * alloc)
  8. {
  9. FlowGraph * graph;
  10. graph = JitAnew(alloc, FlowGraph, func, alloc);
  11. return graph;
  12. }
  13. ///----------------------------------------------------------------------------
  14. ///
  15. /// FlowGraph::Build
  16. ///
  17. /// Construct flow graph and loop structures for the current state of the function.
  18. ///
  19. ///----------------------------------------------------------------------------
  20. void
  21. FlowGraph::Build(void)
  22. {
  23. Func * func = this->func;
  24. BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  25. this->RunPeeps();
  26. END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  27. // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
  28. // bailing out from a simple jitted function to do a full jit of a loop body in the function
  29. // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to catch.
  30. if (this->func->HasTry() &&
  31. (this->func->DoOptimizeTryCatch() ||
  32. this->func->IsSimpleJit() && this->func->GetJnFunction()->DoJITLoopBody()
  33. )
  34. )
  35. {
  36. this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  37. }
  38. IR::Instr * currLastInstr = nullptr;
  39. BasicBlock * block = nullptr;
  40. BasicBlock * nextBlock = nullptr;
  41. bool hasCall = false;
  42. FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
  43. {
  44. if (currLastInstr == nullptr || instr->EndsBasicBlock())
  45. {
  46. // Start working on a new block.
  47. // If we're currently processing a block, then wrap it up before beginning a new one.
  48. if (currLastInstr != nullptr)
  49. {
  50. nextBlock = block;
  51. block = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
  52. block->hasCall = hasCall;
  53. hasCall = false;
  54. }
  55. currLastInstr = instr;
  56. }
  57. if (instr->StartsBasicBlock())
  58. {
  59. // Insert a BrOnException after the loop top if we are in a try-catch. This is required to
  60. // model flow from the loop to the catch block for loops that don't have a break condition.
  61. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop &&
  62. this->catchLabelStack && !this->catchLabelStack->Empty() &&
  63. instr->m_next->m_opcode != Js::OpCode::BrOnException)
  64. {
  65. IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
  66. instr->InsertAfter(brOnException);
  67. instrPrev = brOnException; // process BrOnException before adding a new block for loop top label.
  68. continue;
  69. }
  70. // Wrap up the current block and get ready to process a new one.
  71. nextBlock = block;
  72. block = this->AddBlock(instr, currLastInstr, nextBlock);
  73. block->hasCall = hasCall;
  74. hasCall = false;
  75. currLastInstr = nullptr;
  76. }
  77. switch (instr->m_opcode)
  78. {
  79. case Js::OpCode::Catch:
  80. Assert(instr->m_prev->IsLabelInstr());
  81. if (this->catchLabelStack)
  82. {
  83. this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
  84. }
  85. break;
  86. case Js::OpCode::TryCatch:
  87. if (this->catchLabelStack)
  88. {
  89. this->catchLabelStack->Pop();
  90. }
  91. break;
  92. case Js::OpCode::CloneBlockScope:
  93. case Js::OpCode::CloneInnerScopeSlots:
  94. // It would be nice to do this in IRBuilder, but doing so gives us
  95. // trouble when doing the DoSlotArrayCheck since it assume single def
  96. // of the sym to do its check properly. So instead we assign the dst
  97. // here in FlowGraph.
  98. instr->SetDst(instr->GetSrc1());
  99. break;
  100. }
  101. if (OpCodeAttr::UseAllFields(instr->m_opcode))
  102. {
  103. // UseAllFields opcode are call instruction or opcode that would call.
  104. hasCall = true;
  105. if (OpCodeAttr::CallInstr(instr->m_opcode))
  106. {
  107. if (!instr->isCallInstrProtectedByNoProfileBailout)
  108. {
  109. instr->m_func->SetHasCallsOnSelfAndParents();
  110. }
  111. // For ARM & X64 because of their register calling convention
  112. // the ArgOuts need to be moved next to the call.
  113. #if defined(_M_ARM) || defined(_M_X64)
  114. IR::Instr* argInsertInstr = instr;
  115. instr->IterateArgInstrs([&](IR::Instr* argInstr)
  116. {
  117. if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
  118. argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
  119. argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
  120. argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
  121. {
  122. // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
  123. if (!argInstr->m_func->GetJnFunction()->GetIsAsmjsMode())
  124. {
  125. // Need to always generate byte code arg out capture,
  126. // because bailout can't restore from the arg out as it is
  127. // replaced by new sym for register calling convention in lower
  128. argInstr->GenerateBytecodeArgOutCapture();
  129. }
  130. // Check if the instruction is already next
  131. if (argInstr != argInsertInstr->m_prev)
  132. {
  133. // It is not, move it.
  134. argInstr->Move(argInsertInstr);
  135. }
  136. argInsertInstr = argInstr;
  137. }
  138. return false;
  139. });
  140. #endif
  141. }
  142. }
  143. }
  144. NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
  145. this->func->isFlowGraphValid = true;
  146. Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
  147. // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
  148. // forward to number the blocks in lexical order.
  149. unsigned int blockNum = 0;
  150. FOREACH_BLOCK(block, this)
  151. {
  152. block->SetBlockNum(blockNum++);
  153. }NEXT_BLOCK;
  154. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  155. this->RemoveUnreachableBlocks();
  156. this->FindLoops();
  157. bool breakBlocksRelocated = this->CanonicalizeLoops();
  158. #if DBG
  159. this->VerifyLoopGraph();
  160. #endif
  161. // Renumber the blocks. Break block remove code has likely inserted new basic blocks.
  162. blockNum = 0;
  163. // Regions need to be assigned before Globopt because:
  164. // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
  165. // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
  166. // in the try region to be restored correctly by the bailout, it should not be removed from the
  167. // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
  168. //
  169. // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
  170. // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
  171. // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
  172. bool assignRegionsBeforeGlobopt = this->func->HasTry() &&
  173. (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout));
  174. Region ** blockToRegion = nullptr;
  175. if (assignRegionsBeforeGlobopt)
  176. {
  177. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  178. }
  179. FOREACH_BLOCK_ALL(block, this)
  180. {
  181. block->SetBlockNum(blockNum++);
  182. if (assignRegionsBeforeGlobopt)
  183. {
  184. if (block->isDeleted && !block->isDead)
  185. {
  186. continue;
  187. }
  188. this->UpdateRegionForBlock(block, blockToRegion);
  189. }
  190. } NEXT_BLOCK_ALL;
  191. AssertMsg (blockNum == this->blockCount, "Block count is out of whack");
  192. if (breakBlocksRelocated)
  193. {
  194. // Sort loop lists only if there is break block removal.
  195. SortLoopLists();
  196. }
  197. #if DBG_DUMP
  198. this->Dump(false, nullptr);
  199. #endif
  200. }
  201. void
  202. FlowGraph::SortLoopLists()
  203. {
  204. // Sort the blocks in loopList
  205. for (Loop *loop = this->loopList; loop; loop = loop->next)
  206. {
  207. unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
  208. // Insertion sort as the blockList is almost sorted in the loop.
  209. FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
  210. {
  211. if (lastBlockNumber <= block->GetBlockNum())
  212. {
  213. lastBlockNumber = block->GetBlockNum();
  214. }
  215. else
  216. {
  217. iter.UnlinkCurrent();
  218. FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
  219. {
  220. if (insertBlock->GetBlockNum() > block->GetBlockNum())
  221. {
  222. break;
  223. }
  224. }NEXT_BLOCK_IN_LOOP_EDITING;
  225. newIter.InsertBefore(block);
  226. }
  227. }NEXT_BLOCK_IN_LOOP_EDITING;
  228. }
  229. }
  230. void
  231. FlowGraph::RunPeeps()
  232. {
  233. if (this->func->HasTry())
  234. {
  235. return;
  236. }
  237. if (PHASE_OFF(Js::FGPeepsPhase, this->func))
  238. {
  239. return;
  240. }
  241. IR::Instr * instrCm = nullptr;
  242. bool tryUnsignedCmpPeep = false;
  243. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  244. {
  245. switch(instr->m_opcode)
  246. {
  247. case Js::OpCode::Br:
  248. case Js::OpCode::BrEq_I4:
  249. case Js::OpCode::BrGe_I4:
  250. case Js::OpCode::BrGt_I4:
  251. case Js::OpCode::BrLt_I4:
  252. case Js::OpCode::BrLe_I4:
  253. case Js::OpCode::BrUnGe_I4:
  254. case Js::OpCode::BrUnGt_I4:
  255. case Js::OpCode::BrUnLt_I4:
  256. case Js::OpCode::BrUnLe_I4:
  257. case Js::OpCode::BrNeq_I4:
  258. case Js::OpCode::BrEq_A:
  259. case Js::OpCode::BrGe_A:
  260. case Js::OpCode::BrGt_A:
  261. case Js::OpCode::BrLt_A:
  262. case Js::OpCode::BrLe_A:
  263. case Js::OpCode::BrUnGe_A:
  264. case Js::OpCode::BrUnGt_A:
  265. case Js::OpCode::BrUnLt_A:
  266. case Js::OpCode::BrUnLe_A:
  267. case Js::OpCode::BrNotEq_A:
  268. case Js::OpCode::BrNotNeq_A:
  269. case Js::OpCode::BrSrNotEq_A:
  270. case Js::OpCode::BrSrNotNeq_A:
  271. case Js::OpCode::BrNotGe_A:
  272. case Js::OpCode::BrNotGt_A:
  273. case Js::OpCode::BrNotLt_A:
  274. case Js::OpCode::BrNotLe_A:
  275. case Js::OpCode::BrNeq_A:
  276. case Js::OpCode::BrNotNull_A:
  277. case Js::OpCode::BrNotAddr_A:
  278. case Js::OpCode::BrAddr_A:
  279. case Js::OpCode::BrSrEq_A:
  280. case Js::OpCode::BrSrNeq_A:
  281. case Js::OpCode::BrOnHasProperty:
  282. case Js::OpCode::BrOnNoProperty:
  283. case Js::OpCode::BrHasSideEffects:
  284. case Js::OpCode::BrNotHasSideEffects:
  285. case Js::OpCode::BrFncEqApply:
  286. case Js::OpCode::BrFncNeqApply:
  287. case Js::OpCode::BrOnEmpty:
  288. case Js::OpCode::BrOnNotEmpty:
  289. case Js::OpCode::BrFncCachedScopeEq:
  290. case Js::OpCode::BrFncCachedScopeNeq:
  291. case Js::OpCode::BrOnObject_A:
  292. case Js::OpCode::BrOnClassConstructor:
  293. if (tryUnsignedCmpPeep)
  294. {
  295. this->UnsignedCmpPeep(instr);
  296. }
  297. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  298. break;
  299. case Js::OpCode::MultiBr:
  300. // TODO: Run peeps on these as well...
  301. break;
  302. case Js::OpCode::BrTrue_I4:
  303. case Js::OpCode::BrFalse_I4:
  304. case Js::OpCode::BrTrue_A:
  305. case Js::OpCode::BrFalse_A:
  306. if (instrCm)
  307. {
  308. if (instrCm->GetDst()->IsInt32())
  309. {
  310. Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
  311. instrNext = this->PeepTypedCm(instrCm);
  312. }
  313. else
  314. {
  315. instrNext = this->PeepCm(instrCm);
  316. }
  317. instrCm = nullptr;
  318. if (instrNext == nullptr)
  319. {
  320. // Set instrNext back to the current instr.
  321. instrNext = instr;
  322. }
  323. }
  324. else
  325. {
  326. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  327. }
  328. break;
  329. case Js::OpCode::CmEq_I4:
  330. case Js::OpCode::CmGe_I4:
  331. case Js::OpCode::CmGt_I4:
  332. case Js::OpCode::CmLt_I4:
  333. case Js::OpCode::CmLe_I4:
  334. case Js::OpCode::CmNeq_I4:
  335. case Js::OpCode::CmEq_A:
  336. case Js::OpCode::CmGe_A:
  337. case Js::OpCode::CmGt_A:
  338. case Js::OpCode::CmLt_A:
  339. case Js::OpCode::CmLe_A:
  340. case Js::OpCode::CmNeq_A:
  341. case Js::OpCode::CmSrEq_A:
  342. case Js::OpCode::CmSrNeq_A:
  343. if (tryUnsignedCmpPeep)
  344. {
  345. this->UnsignedCmpPeep(instr);
  346. }
  347. case Js::OpCode::CmUnGe_I4:
  348. case Js::OpCode::CmUnGt_I4:
  349. case Js::OpCode::CmUnLt_I4:
  350. case Js::OpCode::CmUnLe_I4:
  351. case Js::OpCode::CmUnGe_A:
  352. case Js::OpCode::CmUnGt_A:
  353. case Js::OpCode::CmUnLt_A:
  354. case Js::OpCode::CmUnLe_A:
  355. // There may be useless branches between the Cm instr and the branch that uses the result.
  356. // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
  357. instrCm = instr;
  358. break;
  359. case Js::OpCode::Label:
  360. if (instr->AsLabelInstr()->IsUnreferenced())
  361. {
  362. instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
  363. }
  364. break;
  365. case Js::OpCode::StatementBoundary:
  366. instr->ClearByteCodeOffset();
  367. instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
  368. break;
  369. case Js::OpCode::ShrU_I4:
  370. case Js::OpCode::ShrU_A:
  371. if (tryUnsignedCmpPeep)
  372. {
  373. break;
  374. }
  375. if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
  376. && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
  377. && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
  378. {
  379. tryUnsignedCmpPeep = true;
  380. }
  381. break;
  382. default:
  383. Assert(!instr->IsBranchInstr());
  384. }
  385. } NEXT_INSTR_IN_FUNC_EDITING;
  386. }
  387. void
  388. Loop::InsertLandingPad(FlowGraph *fg)
  389. {
  390. BasicBlock *headBlock = this->GetHeadBlock();
  391. // Always create a landing pad. This allows globopt to easily hoist instructions
  392. // and re-optimize the block if needed.
  393. BasicBlock *landingPad = BasicBlock::New(fg);
  394. this->landingPad = landingPad;
  395. IR::Instr * headInstr = headBlock->GetFirstInstr();
  396. IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
  397. landingPadLabel->SetByteCodeOffset(headInstr);
  398. headInstr->InsertBefore(landingPadLabel);
  399. landingPadLabel->SetBasicBlock(landingPad);
  400. landingPad->SetBlockNum(fg->blockCount++);
  401. landingPad->SetFirstInstr(landingPadLabel);
  402. landingPad->SetLastInstr(landingPadLabel);
  403. landingPad->prev = headBlock->prev;
  404. landingPad->prev->next = landingPad;
  405. landingPad->next = headBlock;
  406. headBlock->prev = landingPad;
  407. Loop *parentLoop = this->parent;
  408. landingPad->loop = parentLoop;
  409. // We need to add this block to the block list of the parent loops
  410. while (parentLoop)
  411. {
  412. // Find the head block in the block list of the parent loop
  413. FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
  414. {
  415. if (block == headBlock)
  416. {
  417. // Add the landing pad to the block list
  418. iter.InsertBefore(landingPad);
  419. break;
  420. }
  421. } NEXT_BLOCK_IN_LOOP_EDITING;
  422. parentLoop = parentLoop->parent;
  423. }
  424. // Fix predecessor flow edges
  425. FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
  426. {
  427. // Make sure it isn't a back-edge
  428. if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
  429. {
  430. if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
  431. {
  432. IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
  433. branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
  434. }
  435. headBlock->UnlinkPred(edge->GetPred(), false);
  436. landingPad->AddPred(edge, fg);
  437. edge->SetSucc(landingPad);
  438. }
  439. } NEXT_PREDECESSOR_EDGE_EDITING;
  440. fg->AddEdge(landingPad, headBlock);
  441. }
  442. bool
  443. Loop::RemoveBreakBlocks(FlowGraph *fg)
  444. {
  445. bool breakBlockRelocated = false;
  446. if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
  447. {
  448. return false;
  449. }
  450. BasicBlock *loopTailBlock = nullptr;
  451. FOREACH_BLOCK_IN_LOOP(block, this)
  452. {
  453. loopTailBlock = block;
  454. }NEXT_BLOCK_IN_LOOP;
  455. AnalysisAssert(loopTailBlock);
  456. FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
  457. {
  458. while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
  459. {
  460. // Found at least one break block;
  461. breakBlockRelocated = true;
  462. #if DBG
  463. breakBlockEnd->isBreakBlock = true;
  464. #endif
  465. // Find the first block in this break block sequence.
  466. BasicBlock *breakBlockStart = breakBlockEnd;
  467. BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
  468. // Walk back the blocks until we find a block which belongs to that block.
  469. // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
  470. // Algorithm works on one loop at a time.
  471. while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
  472. {
  473. breakBlockStart = breakBlockStartPrev;
  474. breakBlockStartPrev = breakBlockStartPrev->GetPrev();
  475. }
  476. #if DBG
  477. breakBlockStart->isBreakBlock = true; // Mark the first block as well.
  478. #endif
  479. BasicBlock *exitLoopTail = loopTailBlock;
  480. // Move these break blocks to the tail of the loop.
  481. fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
  482. #if DBG_DUMP
  483. fg->Dump(true /*needs verbose flag*/, L"\n After Each iteration of canonicalization \n");
  484. #endif
  485. // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
  486. breakBlockEnd = loopTailBlock;
  487. blockPrev = breakBlockEnd->prev;
  488. }
  489. } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
  490. return breakBlockRelocated;
  491. }
  492. void
  493. FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
  494. {
  495. BasicBlock *srcPredBlock = blockStart->prev;
  496. BasicBlock *srcNextBlock = blockEnd->next;
  497. BasicBlock *dstPredBlock = insertBlock->prev;
  498. IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
  499. IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
  500. // Fix block linkage
  501. srcPredBlock->next = srcNextBlock;
  502. srcNextBlock->prev = srcPredBlock;
  503. dstPredBlock->next = blockStart;
  504. insertBlock->prev = blockEnd;
  505. blockStart->prev = dstPredBlock;
  506. blockEnd->next = insertBlock;
  507. // Fix instruction linkage
  508. IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
  509. // Fix instruction flow
  510. IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
  511. if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
  512. {
  513. // There was a fallthrough in the break blocks original position.
  514. IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
  515. IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
  516. // Save the target and invert the branch.
  517. IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
  518. srcPredBlock->InvertBranch(srcBranch);
  519. IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
  520. // Point the inverted branch to break block.
  521. srcBranch->SetTarget(srcLabel);
  522. if (srcBranchNextInstr != srcBranchTarget)
  523. {
  524. FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
  525. Assert(srcEdge);
  526. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
  527. Assert(compensationBlock);
  528. }
  529. }
  530. IR::Instr *dstLastInstr = dstPredBlockLastInstr;
  531. if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
  532. {
  533. //There is a fallthrough in the block after which break block is inserted.
  534. FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
  535. Assert(dstEdge);
  536. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
  537. Assert(compensationBlock);
  538. }
  539. }
  540. FlowEdge *
  541. FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
  542. {
  543. FlowEdge *srcEdge = nullptr;
  544. FOREACH_SUCCESSOR_EDGE(edge, predBlock)
  545. {
  546. if (edge->GetSucc() == succBlock)
  547. {
  548. srcEdge = edge;
  549. break;
  550. }
  551. } NEXT_SUCCESSOR_EDGE;
  552. return srcEdge;
  553. }
  554. void
  555. BasicBlock::InvertBranch(IR::BranchInstr *branch)
  556. {
  557. Assert(this->GetLastInstr() == branch);
  558. Assert(this->GetSuccList()->HasTwo());
  559. branch->Invert();
  560. this->GetSuccList()->Reverse();
  561. }
  562. bool
  563. FlowGraph::CanonicalizeLoops()
  564. {
  565. if (this->func->HasProfileInfo())
  566. {
  567. this->implicitCallFlags = this->func->GetProfileInfo()->GetImplicitCallFlags();
  568. for (Loop *loop = this->loopList; loop; loop = loop->next)
  569. {
  570. this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
  571. }
  572. }
  573. #if DBG_DUMP
  574. this->Dump(true, L"\n Before cannonicalizeLoops \n");
  575. #endif
  576. bool breakBlockRelocated = false;
  577. for (Loop *loop = this->loopList; loop; loop = loop->next)
  578. {
  579. loop->InsertLandingPad(this);
  580. if (!this->func->HasTry() || this->func->DoOptimizeTryCatch())
  581. {
  582. bool relocated = loop->RemoveBreakBlocks(this);
  583. if (!breakBlockRelocated && relocated)
  584. {
  585. breakBlockRelocated = true;
  586. }
  587. }
  588. }
  589. #if DBG_DUMP
  590. this->Dump(true, L"\n After cannonicalizeLoops \n");
  591. #endif
  592. return breakBlockRelocated;
  593. }
  594. // Find the loops in this function, build the loop structure, and build a linked
  595. // list of the basic blocks in this loop (including blocks of inner loops). The
  596. // list preserves the reverse post-order of the blocks in the flowgraph block list.
  597. void
  598. FlowGraph::FindLoops()
  599. {
  600. if (!this->hasLoop)
  601. {
  602. return;
  603. }
  604. Func * func = this->func;
  605. FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
  606. {
  607. if (block->loop != nullptr)
  608. {
  609. // Block already visited
  610. continue;
  611. }
  612. FOREACH_SUCCESSOR_BLOCK(succ, block)
  613. {
  614. if (succ->isLoopHeader && succ->loop == nullptr)
  615. {
  616. // Found a loop back-edge
  617. BuildLoop(succ, block);
  618. }
  619. } NEXT_SUCCESSOR_BLOCK;
  620. if (block->isLoopHeader && block->loop == nullptr)
  621. {
  622. // We would have built a loop for it if it was a loop...
  623. block->isLoopHeader = false;
  624. block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  625. }
  626. } NEXT_BLOCK_BACKWARD_IN_FUNC;
  627. }
  628. void
  629. FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
  630. {
  631. // This function is recursive, so when jitting in the foreground, probe the stack
  632. if(!func->IsBackgroundJIT())
  633. {
  634. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  635. }
  636. if (tailBlock->number < headBlock->number)
  637. {
  638. // Not a loop. We didn't see any back-edge.
  639. headBlock->isLoopHeader = false;
  640. headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  641. return;
  642. }
  643. Assert(headBlock->isLoopHeader);
  644. Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
  645. loop->next = this->loopList;
  646. this->loopList = loop;
  647. headBlock->loop = loop;
  648. loop->headBlock = headBlock;
  649. loop->int32SymsOnEntry = nullptr;
  650. loop->lossyInt32SymsOnEntry = nullptr;
  651. // If parentLoop is a parent of loop, it's headBlock better appear first.
  652. if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
  653. {
  654. loop->parent = parentLoop;
  655. parentLoop->isLeaf = false;
  656. }
  657. loop->hasDeadStoreCollectionPass = false;
  658. loop->hasDeadStorePrepass = false;
  659. loop->memOpInfo = nullptr;
  660. NoRecoverMemoryJitArenaAllocator tempAlloc(L"BE-LoopBuilder", this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  661. WalkLoopBlocks(tailBlock, loop, &tempAlloc);
  662. Assert(loop->GetHeadBlock() == headBlock);
  663. IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
  664. firstInstr->SetLoop(loop);
  665. if (firstInstr->IsProfiledLabelInstr())
  666. {
  667. loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
  668. loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
  669. }
  670. else
  671. {
  672. // Didn't collect profile information, don't do optimizations
  673. loop->SetImplicitCallFlags(Js::ImplicitCall_All);
  674. }
  675. }
  676. Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
  677. {
  678. Assert(this->IsMemCopy());
  679. return (Loop::MemCopyCandidate*)this;
  680. }
  681. Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
  682. {
  683. Assert(this->IsMemSet());
  684. return (Loop::MemSetCandidate*)this;
  685. }
  686. bool Loop::EnsureMemOpVariablesInitialized()
  687. {
  688. if (this->memOpInfo == nullptr)
  689. {
  690. JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
  691. this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
  692. this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
  693. this->memOpInfo->startIndexOpndCache[0] = nullptr;
  694. this->memOpInfo->startIndexOpndCache[1] = nullptr;
  695. this->memOpInfo->startIndexOpndCache[2] = nullptr;
  696. this->memOpInfo->startIndexOpndCache[3] = nullptr;
  697. if (this->GetLoopFlags().isInterpreted && !this->GetLoopFlags().memopMinCountReached)
  698. {
  699. #if DBG_DUMP
  700. Func* func = this->GetFunc();
  701. if (Js::Configuration::Global.flags.Verbose && PHASE_TRACE(Js::MemOpPhase, func))
  702. {
  703. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  704. Output::Print(L"MemOp skipped: minimum loop count not reached: Function: %s %s, Loop: %d\n",
  705. func->GetJnFunction()->GetDisplayName(),
  706. func->GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  707. this->GetLoopNumber()
  708. );
  709. }
  710. #endif
  711. this->memOpInfo->doMemOp = false;
  712. this->memOpInfo->inductionVariableChangeInfoMap = nullptr;
  713. this->memOpInfo->inductionVariableOpndPerUnrollMap = nullptr;
  714. this->memOpInfo->candidates = nullptr;
  715. return false;
  716. }
  717. this->memOpInfo->doMemOp = true;
  718. this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
  719. this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
  720. this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
  721. }
  722. return true;
  723. }
  724. // Walk the basic blocks backwards until we find the loop header.
  725. // Mark basic blocks in the loop by looking at the predecessors
  726. // of blocks known to be in the loop.
  727. // Recurse on inner loops.
  728. void
  729. FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
  730. {
  731. AnalysisAssert(loop);
  732. BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
  733. BasicBlock *tailBlock = block;
  734. BasicBlock *lastBlock;
  735. loopBlocksBv->Set(block->GetBlockNum());
  736. this->AddBlockToLoop(block, loop);
  737. if (block == loop->headBlock)
  738. {
  739. // Single block loop, we're done
  740. return;
  741. }
  742. do
  743. {
  744. BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
  745. FOREACH_SUCCESSOR_BLOCK(succ, block)
  746. {
  747. if (succ->isLoopHeader)
  748. {
  749. // Found a loop back-edge
  750. if (loop->headBlock == succ)
  751. {
  752. isInLoop = true;
  753. }
  754. else if (succ->loop == nullptr || succ->loop->headBlock != succ)
  755. {
  756. // Recurse on inner loop
  757. BuildLoop(succ, block, isInLoop ? loop : nullptr);
  758. }
  759. }
  760. } NEXT_SUCCESSOR_BLOCK;
  761. if (isInLoop)
  762. {
  763. // This block is in the loop. All of it's predecessors should be contained in the loop as well.
  764. FOREACH_PREDECESSOR_BLOCK(pred, block)
  765. {
  766. // Fix up loop parent if it isn't set already.
  767. // If pred->loop != loop, we're looking at an inner loop, which was already visited.
  768. // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
  769. // loop, so this must be an immediate child.
  770. if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
  771. && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
  772. {
  773. pred->loop->parent = loop;
  774. loop->isLeaf = false;
  775. if (pred->loop->hasCall)
  776. {
  777. loop->SetHasCall();
  778. }
  779. loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
  780. }
  781. // Add pred to loop bit vector
  782. loopBlocksBv->Set(pred->GetBlockNum());
  783. } NEXT_PREDECESSOR_BLOCK;
  784. if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
  785. {
  786. block->loop = loop;
  787. }
  788. if (block != tailBlock)
  789. {
  790. this->AddBlockToLoop(block, loop);
  791. }
  792. }
  793. lastBlock = block;
  794. block = block->GetPrev();
  795. } while (lastBlock != loop->headBlock);
  796. }
  797. // Add block to this loop, and it's parent loops.
  798. void
  799. FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
  800. {
  801. loop->blockList.Prepend(block);
  802. if (block->hasCall)
  803. {
  804. loop->SetHasCall();
  805. }
  806. }
  807. ///----------------------------------------------------------------------------
  808. ///
  809. /// FlowGraph::AddBlock
  810. ///
  811. /// Finish processing of a new block: hook up successor arcs, note loops, etc.
  812. ///
  813. ///----------------------------------------------------------------------------
  814. BasicBlock *
  815. FlowGraph::AddBlock(
  816. IR::Instr * firstInstr,
  817. IR::Instr * lastInstr,
  818. BasicBlock * nextBlock)
  819. {
  820. BasicBlock * block;
  821. IR::LabelInstr * labelInstr;
  822. if (firstInstr->IsLabelInstr())
  823. {
  824. labelInstr = firstInstr->AsLabelInstr();
  825. }
  826. else
  827. {
  828. labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
  829. labelInstr->SetByteCodeOffset(firstInstr);
  830. if (firstInstr->IsEntryInstr())
  831. {
  832. firstInstr->InsertAfter(labelInstr);
  833. }
  834. else
  835. {
  836. firstInstr->InsertBefore(labelInstr);
  837. }
  838. firstInstr = labelInstr;
  839. }
  840. block = labelInstr->GetBasicBlock();
  841. if (block == nullptr)
  842. {
  843. block = BasicBlock::New(this);
  844. labelInstr->SetBasicBlock(block);
  845. // Remember last block in function to target successor of RETs.
  846. if (!this->tailBlock)
  847. {
  848. this->tailBlock = block;
  849. }
  850. }
  851. // Hook up the successor edges
  852. if (lastInstr->EndsBasicBlock())
  853. {
  854. BasicBlock * blockTarget = nullptr;
  855. if (lastInstr->IsBranchInstr())
  856. {
  857. // Hook up a successor edge to the branch target.
  858. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  859. if(branchInstr->IsMultiBranch())
  860. {
  861. BasicBlock * blockMultiBrTarget;
  862. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  863. multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
  864. {
  865. blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
  866. this->AddEdge(block, blockMultiBrTarget);
  867. });
  868. }
  869. else
  870. {
  871. IR::LabelInstr * labelInstr = branchInstr->GetTarget();
  872. blockTarget = SetBlockTargetAndLoopFlag(labelInstr);
  873. if (branchInstr->IsConditional())
  874. {
  875. IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
  876. if (instrNext->IsLabelInstr())
  877. {
  878. SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
  879. }
  880. }
  881. }
  882. }
  883. else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
  884. {
  885. blockTarget = this->tailBlock;
  886. }
  887. if (blockTarget)
  888. {
  889. this->AddEdge(block, blockTarget);
  890. }
  891. }
  892. if (lastInstr->HasFallThrough())
  893. {
  894. // Add a branch to next instruction so that we don't have to update the flow graph
  895. // when the glob opt tries to insert instructions.
  896. // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
  897. if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
  898. {
  899. IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
  900. lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
  901. instr->SetByteCodeOffset(lastInstr->m_next);
  902. lastInstr->InsertAfter(instr);
  903. lastInstr = instr;
  904. }
  905. this->AddEdge(block, nextBlock);
  906. }
  907. block->SetBlockNum(this->blockCount++);
  908. block->SetFirstInstr(firstInstr);
  909. block->SetLastInstr(lastInstr);
  910. if (this->blockList)
  911. {
  912. this->blockList->prev = block;
  913. }
  914. block->next = this->blockList;
  915. this->blockList = block;
  916. return block;
  917. }
  918. BasicBlock *
  919. FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
  920. {
  921. BasicBlock * blockTarget = nullptr;
  922. blockTarget = labelInstr->GetBasicBlock();
  923. if (blockTarget == nullptr)
  924. {
  925. blockTarget = BasicBlock::New(this);
  926. labelInstr->SetBasicBlock(blockTarget);
  927. }
  928. if (labelInstr->m_isLoopTop)
  929. {
  930. blockTarget->isLoopHeader = true;
  931. this->hasLoop = true;
  932. }
  933. return blockTarget;
  934. }
  935. ///----------------------------------------------------------------------------
  936. ///
  937. /// FlowGraph::AddEdge
  938. ///
  939. /// Add an edge connecting the two given blocks.
  940. ///
  941. ///----------------------------------------------------------------------------
  942. FlowEdge *
  943. FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
  944. {
  945. FlowEdge * edge = FlowEdge::New(this);
  946. edge->SetPred(blockPred);
  947. edge->SetSucc(blockSucc);
  948. blockPred->AddSucc(edge, this);
  949. blockSucc->AddPred(edge, this);
  950. return edge;
  951. }
  952. ///----------------------------------------------------------------------------
  953. ///
  954. /// FlowGraph::Destroy
  955. ///
  956. /// Remove all references to FG structures from the IR in preparation for freeing
  957. /// the FG.
  958. ///
  959. ///----------------------------------------------------------------------------
  960. void
  961. FlowGraph::Destroy(void)
  962. {
  963. BOOL fHasTry = this->func->HasTry();
  964. Region ** blockToRegion = nullptr;
  965. if (fHasTry)
  966. {
  967. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  968. // Do unreachable code removal up front to avoid problems
  969. // with unreachable back edges, etc.
  970. this->RemoveUnreachableBlocks();
  971. }
  972. FOREACH_BLOCK_ALL(block, this)
  973. {
  974. IR::Instr * firstInstr = block->GetFirstInstr();
  975. if (block->isDeleted && !block->isDead)
  976. {
  977. if (firstInstr->IsLabelInstr())
  978. {
  979. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  980. labelInstr->UnlinkBasicBlock();
  981. // Removing the label for non try blocks as we have a deleted block which has the label instruction
  982. // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
  983. // i.e. helper introduced by polymorphic inlining bailout.
  984. // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
  985. if (!fHasTry)
  986. {
  987. if (this->func->GetJnFunction()->IsGenerator())
  988. {
  989. // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
  990. this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
  991. {
  992. if (labelInstr == yorl.Second())
  993. {
  994. labelInstr->m_hasNonBranchRef = false;
  995. this->func->RemoveYieldOffsetResumeLabel(yorl);
  996. return true;
  997. }
  998. return false;
  999. });
  1000. }
  1001. Assert(labelInstr->IsUnreferenced());
  1002. labelInstr->Remove();
  1003. }
  1004. }
  1005. continue;
  1006. }
  1007. if (block->isLoopHeader && !block->isDead)
  1008. {
  1009. // Mark the tail block of this loop (the last back-edge). The register allocator
  1010. // uses this to lexically find loops.
  1011. BasicBlock *loopTail = nullptr;
  1012. AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
  1013. "Label not marked as loop top...");
  1014. FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
  1015. {
  1016. FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
  1017. {
  1018. if (succ == block)
  1019. {
  1020. loopTail = loopBlock;
  1021. break;
  1022. }
  1023. } NEXT_SUCCESSOR_BLOCK;
  1024. } NEXT_BLOCK_IN_LOOP;
  1025. if (loopTail)
  1026. {
  1027. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  1028. block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
  1029. }
  1030. else
  1031. {
  1032. // This loop doesn't have a back-edge: that is, it is not a loop
  1033. // anymore...
  1034. firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
  1035. }
  1036. }
  1037. if (fHasTry)
  1038. {
  1039. this->UpdateRegionForBlock(block, blockToRegion);
  1040. }
  1041. if (firstInstr->IsLabelInstr())
  1042. {
  1043. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1044. labelInstr->UnlinkBasicBlock();
  1045. if (labelInstr->IsUnreferenced() && !fHasTry)
  1046. {
  1047. // This is an unreferenced label, probably added by FG building.
  1048. // Delete it now to make extended basic blocks visible.
  1049. if (firstInstr == block->GetLastInstr())
  1050. {
  1051. labelInstr->Remove();
  1052. continue;
  1053. }
  1054. else
  1055. {
  1056. labelInstr->Remove();
  1057. }
  1058. }
  1059. }
  1060. // We don't run the globopt with try/catch, don't need to remove branch to next for fall through blocks
  1061. IR::Instr * lastInstr = block->GetLastInstr();
  1062. if (!fHasTry && lastInstr->IsBranchInstr())
  1063. {
  1064. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1065. if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
  1066. {
  1067. // Remove branch to next
  1068. branchInstr->Remove();
  1069. }
  1070. }
  1071. }
  1072. NEXT_BLOCK;
  1073. #if DBG
  1074. if (fHasTry)
  1075. {
  1076. // Now that all blocks have regions, we should see consistently propagated regions at all
  1077. // block boundaries.
  1078. FOREACH_BLOCK(block, this)
  1079. {
  1080. Region * region = blockToRegion[block->GetBlockNum()];
  1081. Region * predRegion = nullptr;
  1082. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1083. {
  1084. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1085. if (predBlock->GetLastInstr() == nullptr)
  1086. {
  1087. AssertMsg(region == predRegion, "Bad region propagation through empty block");
  1088. }
  1089. else
  1090. {
  1091. switch (predBlock->GetLastInstr()->m_opcode)
  1092. {
  1093. case Js::OpCode::TryCatch:
  1094. case Js::OpCode::TryFinally:
  1095. AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
  1096. if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
  1097. {
  1098. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
  1099. {
  1100. AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
  1101. }
  1102. else
  1103. {
  1104. AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
  1105. }
  1106. }
  1107. else
  1108. {
  1109. AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
  1110. }
  1111. break;
  1112. case Js::OpCode::Leave:
  1113. case Js::OpCode::LeaveNull:
  1114. AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1115. break;
  1116. // If the try region has a branch out of the loop,
  1117. // - the branch is moved out of the loop as part of break block removal, and
  1118. // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
  1119. // Otherwise,
  1120. // - FullJit: BrOnException is removed in the forward pass.
  1121. case Js::OpCode::BrOnException:
  1122. Assert(!this->func->DoGlobOpt());
  1123. case Js::OpCode::BrOnNoException:
  1124. Assert(this->func->HasTry() &&
  1125. ((!this->func->HasFinally() && !this->func->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this->func)) ||
  1126. (this->func->IsSimpleJit() && this->func->GetJnFunction()->DoJITLoopBody()))); // should be relaxed as more bailouts are added in Simple Jit
  1127. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
  1128. if (region->GetType() == RegionTypeCatch)
  1129. {
  1130. Assert((predRegion->GetType() == RegionTypeTry) || (predRegion->GetType() == RegionTypeCatch));
  1131. }
  1132. else if (region->GetType() == RegionTypeTry)
  1133. {
  1134. Assert(region == predRegion);
  1135. }
  1136. break;
  1137. case Js::OpCode::Br:
  1138. if (region->GetType() == RegionTypeCatch && region != predRegion)
  1139. {
  1140. AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1141. }
  1142. else
  1143. {
  1144. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1145. }
  1146. break;
  1147. default:
  1148. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1149. break;
  1150. }
  1151. }
  1152. }
  1153. NEXT_PREDECESSOR_BLOCK;
  1154. switch (region->GetType())
  1155. {
  1156. case RegionTypeRoot:
  1157. Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion());
  1158. break;
  1159. case RegionTypeTry:
  1160. Assert(!(region->GetMatchingCatchRegion() && region->GetMatchingFinallyRegion()));
  1161. break;
  1162. case RegionTypeCatch:
  1163. case RegionTypeFinally:
  1164. Assert(region->GetMatchingTryRegion());
  1165. break;
  1166. }
  1167. }
  1168. NEXT_BLOCK;
  1169. FOREACH_BLOCK_DEAD_OR_ALIVE(block, this)
  1170. {
  1171. if (block->GetFirstInstr()->IsLabelInstr())
  1172. {
  1173. IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
  1174. if (labelInstr->IsUnreferenced())
  1175. {
  1176. // This is an unreferenced label, probably added by FG building.
  1177. // Delete it now to make extended basic blocks visible.
  1178. labelInstr->Remove();
  1179. }
  1180. }
  1181. } NEXT_BLOCK_DEAD_OR_ALIVE;
  1182. }
  1183. #endif
  1184. this->func->isFlowGraphValid = false;
  1185. }
  1186. // Propagate the region forward from the block's predecessor(s), tracking the effect
  1187. // of the flow transition. Record the region in the block-to-region map provided
  1188. // and on the label at the entry to the block (if any).
  1189. void
  1190. FlowGraph::UpdateRegionForBlock(BasicBlock * block, Region ** blockToRegion)
  1191. {
  1192. Region *region;
  1193. Region * predRegion = nullptr;
  1194. IR::Instr * tryInstr = nullptr;
  1195. IR::Instr * firstInstr = block->GetFirstInstr();
  1196. if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1197. {
  1198. Assert(this->func->HasTry() && (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1199. blockToRegion[block->GetBlockNum()] = firstInstr->AsLabelInstr()->GetRegion();
  1200. return;
  1201. }
  1202. if (block == this->blockList)
  1203. {
  1204. // Head of the graph: create the root region.
  1205. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1206. }
  1207. else
  1208. {
  1209. // Propagate the region forward by finding a predecessor we've already processed.
  1210. // We require that there be one, since we've already removed unreachable blocks.
  1211. region = nullptr;
  1212. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1213. {
  1214. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1215. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1216. if (predRegion != nullptr)
  1217. {
  1218. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1219. break;
  1220. }
  1221. }
  1222. NEXT_PREDECESSOR_BLOCK;
  1223. }
  1224. AnalysisAssertMsg(region != nullptr, "Failed to find region for block");
  1225. if (!region->ehBailoutData)
  1226. {
  1227. region->AllocateEHBailoutData(this->func, tryInstr);
  1228. }
  1229. // Record the region in the block-to-region map.
  1230. blockToRegion[block->GetBlockNum()] = region;
  1231. if (firstInstr->IsLabelInstr())
  1232. {
  1233. // Record the region on the label and make sure it stays around as a region
  1234. // marker if we're entering a region at this point.
  1235. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1236. labelInstr->SetRegion(region);
  1237. if (region != predRegion)
  1238. {
  1239. labelInstr->m_hasNonBranchRef = true;
  1240. }
  1241. }
  1242. }
  1243. Region *
  1244. FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
  1245. {
  1246. // Propagate predRegion to region, looking at the flow transition for an opcode
  1247. // that affects the region.
  1248. Region * region = nullptr;
  1249. IR::Instr * predLastInstr = predBlock->GetLastInstr();
  1250. IR::Instr * firstInstr = block->GetFirstInstr();
  1251. if (predLastInstr == nullptr)
  1252. {
  1253. // Empty block: trivially propagate the region.
  1254. region = predRegion;
  1255. }
  1256. else
  1257. {
  1258. Region * tryRegion = nullptr;
  1259. IR::LabelInstr * tryInstrNext = nullptr;
  1260. switch (predLastInstr->m_opcode)
  1261. {
  1262. case Js::OpCode::TryCatch:
  1263. // Entry to a try-catch. See whether we're entering the try or the catch
  1264. // by looking for the handler label.
  1265. Assert(predLastInstr->m_next->IsLabelInstr());
  1266. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1267. tryRegion = tryInstrNext->GetRegion();
  1268. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1269. {
  1270. region = Region::New(RegionTypeCatch, predRegion, this->func);
  1271. Assert(tryRegion);
  1272. region->SetMatchingTryRegion(tryRegion);
  1273. tryRegion->SetMatchingCatchRegion(region);
  1274. }
  1275. else
  1276. {
  1277. region = Region::New(RegionTypeTry, predRegion, this->func);
  1278. tryInstr = predLastInstr;
  1279. }
  1280. break;
  1281. case Js::OpCode::TryFinally:
  1282. // Entry to a try-finally. See whether we're entering the try or the finally
  1283. // by looking for the handler label.
  1284. Assert(predLastInstr->m_next->IsLabelInstr());
  1285. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1286. tryRegion = tryInstrNext->GetRegion();
  1287. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1288. {
  1289. region = Region::New(RegionTypeFinally, predRegion, this->func);
  1290. Assert(tryRegion);
  1291. region->SetMatchingTryRegion(tryRegion);
  1292. tryRegion->SetMatchingFinallyRegion(region);
  1293. }
  1294. else
  1295. {
  1296. region = Region::New(RegionTypeTry, predRegion, this->func);
  1297. tryInstr = predLastInstr;
  1298. }
  1299. break;
  1300. case Js::OpCode::Leave:
  1301. case Js::OpCode::LeaveNull:
  1302. // Exiting a try or handler. Retrieve the current region's parent.
  1303. region = predRegion->GetParent();
  1304. if (region == nullptr)
  1305. {
  1306. // We found a Leave in the root region- this can only happen when a jitted loop body
  1307. // in a try block has a return statement.
  1308. Assert(this->func->IsLoopBodyInTry());
  1309. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1310. region = predRegion;
  1311. }
  1312. break;
  1313. default:
  1314. // Normal (non-EH) transition: just propagate the region.
  1315. region = predRegion;
  1316. break;
  1317. }
  1318. }
  1319. return region;
  1320. }
  1321. void
  1322. FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
  1323. {
  1324. if (loop)
  1325. {
  1326. bool found = false;
  1327. FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
  1328. {
  1329. if (loopBlock == targetBlock)
  1330. {
  1331. found = true;
  1332. break;
  1333. }
  1334. } NEXT_BLOCK_IN_LOOP_EDITING;
  1335. if (found)
  1336. {
  1337. if (postTarget)
  1338. {
  1339. iter.Next();
  1340. }
  1341. iter.InsertBefore(compBlock);
  1342. }
  1343. InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
  1344. }
  1345. }
  1346. // Insert a block on the given edge
  1347. BasicBlock *
  1348. FlowGraph::InsertAirlockBlock(FlowEdge * edge)
  1349. {
  1350. BasicBlock * airlockBlock = BasicBlock::New(this);
  1351. BasicBlock * sourceBlock = edge->GetPred();
  1352. BasicBlock * sinkBlock = edge->GetSucc();
  1353. BasicBlock * sinkPrevBlock = sinkBlock->prev;
  1354. IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
  1355. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1356. airlockBlock->loop = sinkBlock->loop;
  1357. airlockBlock->SetBlockNum(this->blockCount++);
  1358. #ifdef DBG
  1359. airlockBlock->isAirLockBlock = true;
  1360. #endif
  1361. //
  1362. // Fixup block linkage
  1363. //
  1364. // airlock block is inserted right before sourceBlock
  1365. airlockBlock->prev = sinkBlock->prev;
  1366. sinkBlock->prev = airlockBlock;
  1367. airlockBlock->next = sinkBlock;
  1368. airlockBlock->prev->next = airlockBlock;
  1369. //
  1370. // Fixup flow edges
  1371. //
  1372. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1373. // Add sourceBlock -> airlockBlock
  1374. this->AddEdge(sourceBlock, airlockBlock);
  1375. // Add airlockBlock -> sinkBlock
  1376. edge->SetPred(airlockBlock);
  1377. airlockBlock->AddSucc(edge, this);
  1378. // Fixup data use count
  1379. airlockBlock->SetDataUseCount(1);
  1380. sourceBlock->DecrementDataUseCount();
  1381. //
  1382. // Fixup IR
  1383. //
  1384. // Maintain the instruction region for inlining
  1385. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1386. Func * sinkLabelFunc = sinkLabel->m_func;
  1387. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1388. sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
  1389. airlockBlock->SetFirstInstr(airlockLabel);
  1390. airlockLabel->SetBasicBlock(airlockBlock);
  1391. // Add br to sinkBlock from airlock block
  1392. IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1393. airlockBr->SetByteCodeOffset(sinkLabel);
  1394. airlockLabel->InsertAfter(airlockBr);
  1395. airlockBlock->SetLastInstr(airlockBr);
  1396. airlockLabel->SetByteCodeOffset(sinkLabel);
  1397. // Fixup flow out of sourceBlock
  1398. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1399. if (sourceBr->IsMultiBranch())
  1400. {
  1401. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
  1402. Assert(replaced);
  1403. }
  1404. else if (sourceBr->GetTarget() == sinkLabel)
  1405. {
  1406. sourceBr->SetTarget(airlockLabel);
  1407. }
  1408. if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
  1409. {
  1410. if (!sinkPrevBlock->isDeleted)
  1411. {
  1412. FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
  1413. if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
  1414. {
  1415. BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
  1416. compensationBlock->IncrementDataUseCount();
  1417. // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
  1418. compensationBlock->isAirLockCompensationBlock = true;
  1419. }
  1420. }
  1421. }
  1422. #if DBG_DUMP
  1423. this->Dump(true, L"\n After insertion of airlock block \n");
  1424. #endif
  1425. return airlockBlock;
  1426. }
  1427. // Insert a block on the given edge
  1428. BasicBlock *
  1429. FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
  1430. {
  1431. BasicBlock * compBlock = BasicBlock::New(this);
  1432. BasicBlock * sourceBlock = edge->GetPred();
  1433. BasicBlock * sinkBlock = edge->GetSucc();
  1434. BasicBlock * fallthroughBlock = sourceBlock->next;
  1435. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1436. compBlock->SetBlockNum(this->blockCount++);
  1437. if (insertToLoopList)
  1438. {
  1439. // For flow graph edits in
  1440. if (sinkBlockLoop)
  1441. {
  1442. if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
  1443. {
  1444. // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
  1445. // Insert it to all the parent loop lists.
  1446. compBlock->loop = sinkBlock->loop->parent;
  1447. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
  1448. }
  1449. else
  1450. {
  1451. compBlock->loop = sinkBlock->loop;
  1452. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
  1453. }
  1454. #ifdef DBG
  1455. compBlock->isBreakCompensationBlockAtSink = true;
  1456. #endif
  1457. }
  1458. else
  1459. {
  1460. compBlock->loop = sourceBlock->loop;
  1461. InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
  1462. #ifdef DBG
  1463. compBlock->isBreakCompensationBlockAtSource = true;
  1464. #endif
  1465. }
  1466. }
  1467. //
  1468. // Fixup block linkage
  1469. //
  1470. // compensation block is inserted right after sourceBlock
  1471. compBlock->next = fallthroughBlock;
  1472. fallthroughBlock->prev = compBlock;
  1473. compBlock->prev = sourceBlock;
  1474. sourceBlock->next = compBlock;
  1475. //
  1476. // Fixup flow edges
  1477. //
  1478. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1479. // Add sourceBlock -> airlockBlock
  1480. this->AddEdge(sourceBlock, compBlock);
  1481. // Add airlockBlock -> sinkBlock
  1482. edge->SetPred(compBlock);
  1483. compBlock->AddSucc(edge, this);
  1484. //
  1485. // Fixup IR
  1486. //
  1487. // Maintain the instruction region for inlining
  1488. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1489. Func * sinkLabelFunc = sinkLabel->m_func;
  1490. IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1491. sourceLastInstr->InsertAfter(compLabel);
  1492. compBlock->SetFirstInstr(compLabel);
  1493. compLabel->SetBasicBlock(compBlock);
  1494. // Add br to sinkBlock from compensation block
  1495. IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1496. compBr->SetByteCodeOffset(sinkLabel);
  1497. compLabel->InsertAfter(compBr);
  1498. compBlock->SetLastInstr(compBr);
  1499. compLabel->SetByteCodeOffset(sinkLabel);
  1500. // Fixup flow out of sourceBlock
  1501. if (sourceLastInstr->IsBranchInstr())
  1502. {
  1503. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1504. Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
  1505. if (sourceBr->IsMultiBranch())
  1506. {
  1507. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
  1508. Assert(replaced);
  1509. }
  1510. }
  1511. return compBlock;
  1512. }
  1513. void
  1514. FlowGraph::RemoveUnreachableBlocks()
  1515. {
  1516. AnalysisAssert(this->blockList);
  1517. FOREACH_BLOCK(block, this)
  1518. {
  1519. block->isVisited = false;
  1520. }
  1521. NEXT_BLOCK;
  1522. this->blockList->isVisited = true;
  1523. FOREACH_BLOCK_EDITING(block, this)
  1524. {
  1525. if (block->isVisited)
  1526. {
  1527. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1528. {
  1529. succ->isVisited = true;
  1530. } NEXT_SUCCESSOR_BLOCK;
  1531. }
  1532. else
  1533. {
  1534. this->RemoveBlock(block);
  1535. }
  1536. }
  1537. NEXT_BLOCK_EDITING;
  1538. }
  1539. // If block has no predecessor, remove it.
  1540. bool
  1541. FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
  1542. {
  1543. bool isDead = false;
  1544. if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
  1545. {
  1546. isDead = true;
  1547. }
  1548. else if (block->isLoopHeader)
  1549. {
  1550. // A dead loop still has back-edges pointing to it...
  1551. isDead = true;
  1552. FOREACH_PREDECESSOR_BLOCK(pred, block)
  1553. {
  1554. if (!block->loop->IsDescendentOrSelf(pred->loop))
  1555. {
  1556. isDead = false;
  1557. }
  1558. } NEXT_PREDECESSOR_BLOCK;
  1559. }
  1560. if (isDead)
  1561. {
  1562. this->RemoveBlock(block, globOpt);
  1563. return true;
  1564. }
  1565. return false;
  1566. }
  1567. IR::Instr *
  1568. FlowGraph::PeepTypedCm(IR::Instr *instr)
  1569. {
  1570. // Basic pattern, peep:
  1571. // t1 = CmEq a, b
  1572. // BrTrue_I4 $L1, t1
  1573. // Into:
  1574. // t1 = 1
  1575. // BrEq $L1, a, b
  1576. // t1 = 0
  1577. IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
  1578. // find intermediate Lds
  1579. IR::Instr * instrLd = nullptr;
  1580. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1581. {
  1582. instrLd = instrNext;
  1583. instrNext = instrNext->GetNextRealInstrOrLabel();
  1584. }
  1585. IR::Instr * instrLd2 = nullptr;
  1586. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1587. {
  1588. instrLd2 = instrNext;
  1589. instrNext = instrNext->GetNextRealInstrOrLabel();
  1590. }
  1591. // Find BrTrue/BrFalse
  1592. IR::Instr *instrBr;
  1593. bool brIsTrue;
  1594. if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
  1595. {
  1596. instrBr = instrNext;
  1597. brIsTrue = true;
  1598. }
  1599. else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
  1600. {
  1601. instrBr = instrNext;
  1602. brIsTrue = false;
  1603. }
  1604. else
  1605. {
  1606. return nullptr;
  1607. }
  1608. // if we have intermediate Lds, then make sure pattern is:
  1609. // t1 = CmEq a, b
  1610. // t2 = Ld_A t1
  1611. // BrTrue $L1, t2
  1612. if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
  1613. {
  1614. return nullptr;
  1615. }
  1616. if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
  1617. {
  1618. return nullptr;
  1619. }
  1620. // Make sure we have:
  1621. // t1 = CmEq a, b
  1622. // BrTrue/BrFalse t1
  1623. if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
  1624. {
  1625. return nullptr;
  1626. }
  1627. IR::Opnd * src1 = instr->UnlinkSrc1();
  1628. IR::Opnd * src2 = instr->UnlinkSrc2();
  1629. IR::Instr * instrNew;
  1630. IR::Opnd * tmpOpnd;
  1631. if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
  1632. {
  1633. Assert(src1->IsInt32());
  1634. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1635. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
  1636. instrNew->SetByteCodeOffset(instr);
  1637. instr->InsertBefore(instrNew);
  1638. src1 = tmpOpnd;
  1639. }
  1640. if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
  1641. {
  1642. Assert(src2->IsInt32());
  1643. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1644. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
  1645. instrNew->SetByteCodeOffset(instr);
  1646. instr->InsertBefore(instrNew);
  1647. src2 = tmpOpnd;
  1648. }
  1649. instrBr->ReplaceSrc1(src1);
  1650. instrBr->SetSrc2(src2);
  1651. Js::OpCode newOpcode;
  1652. switch (instr->m_opcode)
  1653. {
  1654. case Js::OpCode::CmEq_I4:
  1655. newOpcode = Js::OpCode::BrEq_I4;
  1656. break;
  1657. case Js::OpCode::CmGe_I4:
  1658. newOpcode = Js::OpCode::BrGe_I4;
  1659. break;
  1660. case Js::OpCode::CmGt_I4:
  1661. newOpcode = Js::OpCode::BrGt_I4;
  1662. break;
  1663. case Js::OpCode::CmLt_I4:
  1664. newOpcode = Js::OpCode::BrLt_I4;
  1665. break;
  1666. case Js::OpCode::CmLe_I4:
  1667. newOpcode = Js::OpCode::BrLe_I4;
  1668. break;
  1669. case Js::OpCode::CmUnGe_I4:
  1670. newOpcode = Js::OpCode::BrUnGe_I4;
  1671. break;
  1672. case Js::OpCode::CmUnGt_I4:
  1673. newOpcode = Js::OpCode::BrUnGt_I4;
  1674. break;
  1675. case Js::OpCode::CmUnLt_I4:
  1676. newOpcode = Js::OpCode::BrUnLt_I4;
  1677. break;
  1678. case Js::OpCode::CmUnLe_I4:
  1679. newOpcode = Js::OpCode::BrUnLe_I4;
  1680. break;
  1681. case Js::OpCode::CmNeq_I4:
  1682. newOpcode = Js::OpCode::BrNeq_I4;
  1683. break;
  1684. case Js::OpCode::CmEq_A:
  1685. newOpcode = Js::OpCode::BrEq_A;
  1686. break;
  1687. case Js::OpCode::CmGe_A:
  1688. newOpcode = Js::OpCode::BrGe_A;
  1689. break;
  1690. case Js::OpCode::CmGt_A:
  1691. newOpcode = Js::OpCode::BrGt_A;
  1692. break;
  1693. case Js::OpCode::CmLt_A:
  1694. newOpcode = Js::OpCode::BrLt_A;
  1695. break;
  1696. case Js::OpCode::CmLe_A:
  1697. newOpcode = Js::OpCode::BrLe_A;
  1698. break;
  1699. case Js::OpCode::CmUnGe_A:
  1700. newOpcode = Js::OpCode::BrUnGe_A;
  1701. break;
  1702. case Js::OpCode::CmUnGt_A:
  1703. newOpcode = Js::OpCode::BrUnGt_A;
  1704. break;
  1705. case Js::OpCode::CmUnLt_A:
  1706. newOpcode = Js::OpCode::BrUnLt_A;
  1707. break;
  1708. case Js::OpCode::CmUnLe_A:
  1709. newOpcode = Js::OpCode::BrUnLe_A;
  1710. break;
  1711. case Js::OpCode::CmNeq_A:
  1712. newOpcode = Js::OpCode::BrNeq_A;
  1713. break;
  1714. case Js::OpCode::CmSrEq_A:
  1715. newOpcode = Js::OpCode::BrSrEq_A;
  1716. break;
  1717. case Js::OpCode::CmSrNeq_A:
  1718. newOpcode = Js::OpCode::BrSrNeq_A;
  1719. break;
  1720. default:
  1721. newOpcode = Js::OpCode::InvalidOpCode;
  1722. Assume(UNREACHED);
  1723. }
  1724. instrBr->m_opcode = newOpcode;
  1725. if (brIsTrue)
  1726. {
  1727. instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1728. instr->m_opcode = Js::OpCode::Ld_I4;
  1729. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1730. instrNew->SetByteCodeOffset(instrBr);
  1731. instrBr->InsertAfter(instrNew);
  1732. if (instrLd)
  1733. {
  1734. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1735. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1736. instrNew->SetByteCodeOffset(instrBr);
  1737. instrBr->InsertAfter(instrNew);
  1738. if (instrLd2)
  1739. {
  1740. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1741. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1742. instrNew->SetByteCodeOffset(instrBr);
  1743. instrBr->InsertAfter(instrNew);
  1744. }
  1745. }
  1746. }
  1747. else
  1748. {
  1749. instrBr->AsBranchInstr()->Invert();
  1750. instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1751. instr->m_opcode = Js::OpCode::Ld_I4;
  1752. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1753. instrNew->SetByteCodeOffset(instrBr);
  1754. instrBr->InsertAfter(instrNew);
  1755. if (instrLd)
  1756. {
  1757. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1758. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1759. instrNew->SetByteCodeOffset(instrBr);
  1760. instrBr->InsertAfter(instrNew);
  1761. if (instrLd2)
  1762. {
  1763. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1764. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1765. instrNew->SetByteCodeOffset(instrBr);
  1766. instrBr->InsertAfter(instrNew);
  1767. }
  1768. }
  1769. }
  1770. return instrBr;
  1771. }
  1772. IR::Instr *
  1773. FlowGraph::PeepCm(IR::Instr *instr)
  1774. {
  1775. // Basic pattern, peep:
  1776. // t1 = CmEq a, b
  1777. // t2 = Ld_A t1
  1778. // BrTrue $L1, t2
  1779. // Into:
  1780. // t1 = True
  1781. // t2 = True
  1782. // BrEq $L1, a, b
  1783. // t1 = False
  1784. // t2 = False
  1785. //
  1786. // The true/false Ld_A's will most likely end up being dead-stores...
  1787. // Alternate Pattern
  1788. // t1= CmEq a, b
  1789. // BrTrue $L1, t1
  1790. // Into:
  1791. // BrEq $L1, a, b
  1792. Func *func = instr->m_func;
  1793. // Find Ld_A
  1794. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  1795. IR::Instr *inlineeEndInstr = nullptr;
  1796. IR::Instr *instrNew;
  1797. IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
  1798. IR::Instr *instrByteCode = instrNext;
  1799. bool ldFound = false;
  1800. IR::Opnd *brSrc = instr->GetDst();
  1801. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
  1802. {
  1803. ldFound = true;
  1804. instrLd = instrNext;
  1805. brSrc = instrNext->GetDst();
  1806. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1807. {
  1808. return nullptr;
  1809. }
  1810. instrNext = instrLd->GetNextRealInstrOrLabel();
  1811. // Is there a second Ld_A?
  1812. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
  1813. {
  1814. // We have:
  1815. // t1 = Cm
  1816. // t2 = t1 // ldSrc = t1
  1817. // t3 = t2 // ldDst = t3
  1818. // BrTrue/BrFalse t3
  1819. instrLd2 = instrNext;
  1820. brSrc = instrLd2->GetDst();
  1821. instrNext = instrLd2->GetNextRealInstrOrLabel();
  1822. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1823. {
  1824. return nullptr;
  1825. }
  1826. }
  1827. }
  1828. // Skip over InlineeEnd
  1829. if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
  1830. {
  1831. inlineeEndInstr = instrNext;
  1832. instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
  1833. }
  1834. // Find BrTrue/BrFalse
  1835. bool brIsTrue;
  1836. if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
  1837. {
  1838. brIsTrue = true;
  1839. }
  1840. else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
  1841. {
  1842. brIsTrue = false;
  1843. }
  1844. else
  1845. {
  1846. return nullptr;
  1847. }
  1848. IR::Instr *instrBr = instrNext;
  1849. // Make sure we have:
  1850. // t1 = Ld_A
  1851. // BrTrue/BrFalse t1
  1852. if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
  1853. {
  1854. return nullptr;
  1855. }
  1856. //
  1857. // We have a match. Generate the new branch
  1858. //
  1859. // BrTrue/BrFalse t1
  1860. // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
  1861. // and sym id of t1 for proper restoration on a bailout before the branch.
  1862. Func* origBrFunc = instrBr->m_func;
  1863. uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
  1864. uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
  1865. instrBr->Unlink();
  1866. instr->InsertBefore(instrBr);
  1867. instrBr->ClearByteCodeOffset();
  1868. instrBr->SetByteCodeOffset(instr);
  1869. instrBr->FreeSrc1();
  1870. instrBr->SetSrc1(instr->UnlinkSrc1());
  1871. instrBr->SetSrc2(instr->UnlinkSrc2());
  1872. instrBr->m_func = instr->m_func;
  1873. Js::OpCode newOpcode;
  1874. switch(instr->m_opcode)
  1875. {
  1876. case Js::OpCode::CmEq_A:
  1877. newOpcode = Js::OpCode::BrEq_A;
  1878. break;
  1879. case Js::OpCode::CmGe_A:
  1880. newOpcode = Js::OpCode::BrGe_A;
  1881. break;
  1882. case Js::OpCode::CmGt_A:
  1883. newOpcode = Js::OpCode::BrGt_A;
  1884. break;
  1885. case Js::OpCode::CmLt_A:
  1886. newOpcode = Js::OpCode::BrLt_A;
  1887. break;
  1888. case Js::OpCode::CmLe_A:
  1889. newOpcode = Js::OpCode::BrLe_A;
  1890. break;
  1891. case Js::OpCode::CmUnGe_A:
  1892. newOpcode = Js::OpCode::BrUnGe_A;
  1893. break;
  1894. case Js::OpCode::CmUnGt_A:
  1895. newOpcode = Js::OpCode::BrUnGt_A;
  1896. break;
  1897. case Js::OpCode::CmUnLt_A:
  1898. newOpcode = Js::OpCode::BrUnLt_A;
  1899. break;
  1900. case Js::OpCode::CmUnLe_A:
  1901. newOpcode = Js::OpCode::BrUnLe_A;
  1902. break;
  1903. case Js::OpCode::CmNeq_A:
  1904. newOpcode = Js::OpCode::BrNeq_A;
  1905. break;
  1906. case Js::OpCode::CmSrEq_A:
  1907. newOpcode = Js::OpCode::BrSrEq_A;
  1908. break;
  1909. case Js::OpCode::CmSrNeq_A:
  1910. newOpcode = Js::OpCode::BrSrNeq_A;
  1911. break;
  1912. default:
  1913. Assert(UNREACHED);
  1914. __assume(UNREACHED);
  1915. }
  1916. instrBr->m_opcode = newOpcode;
  1917. IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContext()->GetLibrary()->GetTrue(), IR::AddrOpndKindDynamicVar, func, true);
  1918. IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContext()->GetLibrary()->GetFalse(), IR::AddrOpndKindDynamicVar, func, true);
  1919. trueOpnd->SetValueType(ValueType::Boolean);
  1920. falseOpnd->SetValueType(ValueType::Boolean);
  1921. if (ldFound)
  1922. {
  1923. // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
  1924. if (brIsTrue)
  1925. {
  1926. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1927. instrNew->SetByteCodeOffset(instrBr);
  1928. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1929. instrBr->InsertBefore(instrNew);
  1930. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
  1931. instrNew->SetByteCodeOffset(instrBr);
  1932. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1933. instrBr->InsertBefore(instrNew);
  1934. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
  1935. instrLd->InsertBefore(instrNew);
  1936. instrNew->SetByteCodeOffset(instrLd);
  1937. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1938. instrLd->ReplaceSrc1(falseOpnd);
  1939. if (instrLd2)
  1940. {
  1941. instrLd2->ReplaceSrc1(falseOpnd);
  1942. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
  1943. instrBr->InsertBefore(instrNew);
  1944. instrNew->SetByteCodeOffset(instrBr);
  1945. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1946. }
  1947. }
  1948. else
  1949. {
  1950. instrBr->AsBranchInstr()->Invert();
  1951. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
  1952. instrBr->InsertBefore(instrNew);
  1953. instrNew->SetByteCodeOffset(instrBr);
  1954. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1955. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
  1956. instrBr->InsertBefore(instrNew);
  1957. instrNew->SetByteCodeOffset(instrBr);
  1958. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1959. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
  1960. instrLd->InsertBefore(instrNew);
  1961. instrNew->SetByteCodeOffset(instrLd);
  1962. instrLd->ReplaceSrc1(trueOpnd);
  1963. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1964. if (instrLd2)
  1965. {
  1966. instrLd2->ReplaceSrc1(trueOpnd);
  1967. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1968. instrBr->InsertBefore(instrNew);
  1969. instrNew->SetByteCodeOffset(instrBr);
  1970. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1971. }
  1972. }
  1973. }
  1974. // Fix InlineeEnd
  1975. if (inlineeEndInstr)
  1976. {
  1977. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcSymId);
  1978. }
  1979. if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  1980. {
  1981. Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
  1982. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  1983. instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
  1984. }
  1985. instr->Remove();
  1986. //
  1987. // Try optimizing through a second branch.
  1988. // Peep:
  1989. //
  1990. // t2 = True;
  1991. // BrTrue $L1
  1992. // ...
  1993. // L1:
  1994. // t1 = Ld_A t2
  1995. // BrTrue $L2
  1996. //
  1997. // Into:
  1998. // t2 = True;
  1999. // t1 = True;
  2000. // BrTrue $L2 <---
  2001. // ...
  2002. // L1:
  2003. // t1 = Ld_A t2
  2004. // BrTrue $L2
  2005. //
  2006. // This cleanup helps expose second level Cm peeps.
  2007. IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2008. // Skip over branch to branch
  2009. while (instrLd3->m_opcode == Js::OpCode::Br)
  2010. {
  2011. instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2012. }
  2013. // Find Ld_A
  2014. if (instrLd3->m_opcode != Js::OpCode::Ld_A)
  2015. {
  2016. return instrBr;
  2017. }
  2018. IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
  2019. IR::Instr *inlineeEndInstr2 = nullptr;
  2020. // InlineeEnd?
  2021. // REVIEW: Can we handle 2 inlineeEnds?
  2022. if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
  2023. {
  2024. inlineeEndInstr2 = instrBr2;
  2025. instrBr2 = instrBr2->GetNextRealInstrOrLabel();
  2026. }
  2027. // Find branch
  2028. bool brIsTrue2;
  2029. if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
  2030. {
  2031. brIsTrue2 = true;
  2032. }
  2033. else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
  2034. {
  2035. brIsTrue2 = false;
  2036. }
  2037. else
  2038. {
  2039. return nullptr;
  2040. }
  2041. // Make sure Ld_A operates on the right tmps.
  2042. if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
  2043. {
  2044. return nullptr;
  2045. }
  2046. if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
  2047. {
  2048. return nullptr;
  2049. }
  2050. // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
  2051. if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
  2052. {
  2053. StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
  2054. if (IR::Instr::FindRegUseInRange(symLd3, instrBr->m_next, instrLd3))
  2055. {
  2056. return nullptr;
  2057. }
  2058. }
  2059. //
  2060. // We have a match!
  2061. //
  2062. if(inlineeEndInstr2)
  2063. {
  2064. origBrFunc = instrBr2->m_func;
  2065. origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
  2066. origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
  2067. }
  2068. // Fix Ld_A
  2069. if (brIsTrue)
  2070. {
  2071. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
  2072. instrBr->InsertBefore(instrNew);
  2073. instrNew->SetByteCodeOffset(instrBr);
  2074. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2075. }
  2076. else
  2077. {
  2078. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
  2079. instrBr->InsertBefore(instrNew);
  2080. instrNew->SetByteCodeOffset(instrBr);
  2081. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2082. }
  2083. IR::LabelInstr *brTarget2;
  2084. // Retarget branch
  2085. if (brIsTrue2 == brIsTrue)
  2086. {
  2087. brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
  2088. }
  2089. else
  2090. {
  2091. brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
  2092. brTarget2->SetByteCodeOffset(instrBr2->m_next);
  2093. instrBr2->InsertAfter(brTarget2);
  2094. }
  2095. instrBr->AsBranchInstr()->SetTarget(brTarget2);
  2096. // InlineeEnd?
  2097. if (inlineeEndInstr2)
  2098. {
  2099. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcSymId);
  2100. }
  2101. return instrBr;
  2102. }
  2103. void
  2104. FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, uint32 origBranchSrcSymId)
  2105. {
  2106. // Helper for PeepsCm code.
  2107. //
  2108. // We've skipped some InlineeEnd. Globopt expects to see these
  2109. // on all flow paths out of the inlinee. Insert an InlineeEnd
  2110. // on the new path:
  2111. // BrEq $L1, a, b
  2112. // Becomes:
  2113. // BrNeq $L2, a, b
  2114. // InlineeEnd
  2115. // Br $L1
  2116. // L2:
  2117. instrBr->AsBranchInstr()->Invert();
  2118. IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
  2119. newBr->SetByteCodeOffset(origByteCodeOffset);
  2120. instrBr->InsertAfter(newBr);
  2121. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
  2122. newLabel->SetByteCodeOffset(instrBytecode);
  2123. newBr->InsertAfter(newLabel);
  2124. instrBr->AsBranchInstr()->SetTarget(newLabel);
  2125. IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
  2126. newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
  2127. newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
  2128. newInlineeEnd->SetByteCodeOffset(instrBytecode);
  2129. newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
  2130. newBr->InsertBefore(newInlineeEnd);
  2131. IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc);
  2132. useOrigBranchSrcInstr->SetByteCodeOffset(origByteCodeOffset);
  2133. useOrigBranchSrcInstr->byteCodeUpwardExposedUsed = JitAnew(origBrFunc->m_alloc, BVSparse<JitArenaAllocator>,origBrFunc->m_alloc);
  2134. useOrigBranchSrcInstr->byteCodeUpwardExposedUsed->Set(origBranchSrcSymId);
  2135. newBr->InsertBefore(useOrigBranchSrcInstr);
  2136. uint newBrFnNumber = newBr->m_func->m_workItem->GetFunctionNumber();
  2137. Assert(newBrFnNumber == origBrFunc->m_workItem->GetFunctionNumber());
  2138. // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
  2139. // Only case when they can be same is recursive calls - inlinee and inliner are the same function
  2140. Assert(newBrFnNumber != inlineeEndInstr->m_func->m_workItem->GetFunctionNumber() ||
  2141. newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->m_workItem->GetFunctionNumber());
  2142. }
  2143. BasicBlock *
  2144. BasicBlock::New(FlowGraph * graph)
  2145. {
  2146. BasicBlock * block;
  2147. block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
  2148. return block;
  2149. }
  2150. void
  2151. BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
  2152. {
  2153. this->predList.Prepend(graph->alloc, edge);
  2154. }
  2155. void
  2156. BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
  2157. {
  2158. this->succList.Prepend(graph->alloc, edge);
  2159. }
  2160. void
  2161. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
  2162. {
  2163. this->RemovePred(block, graph, true, false);
  2164. }
  2165. void
  2166. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
  2167. {
  2168. this->RemoveSucc(block, graph, true, false);
  2169. }
  2170. void
  2171. BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
  2172. {
  2173. this->RemovePred(block, graph, true, true);
  2174. }
  2175. void
  2176. BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
  2177. {
  2178. this->RemoveSucc(block, graph, true, true);
  2179. }
  2180. void
  2181. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
  2182. {
  2183. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2184. {
  2185. if (edge->GetPred() == block)
  2186. {
  2187. BasicBlock *blockSucc = edge->GetSucc();
  2188. if (moveToDead)
  2189. {
  2190. iter.MoveCurrentTo(this->GetDeadPredList());
  2191. }
  2192. else
  2193. {
  2194. iter.RemoveCurrent(graph->alloc);
  2195. }
  2196. if (doCleanSucc)
  2197. {
  2198. block->RemoveSucc(this, graph, false, moveToDead);
  2199. }
  2200. if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
  2201. {
  2202. Loop *loop = blockSucc->loop;
  2203. loop->isDead = true;
  2204. }
  2205. return;
  2206. }
  2207. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2208. AssertMsg(UNREACHED, "Edge not found.");
  2209. }
  2210. void
  2211. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
  2212. {
  2213. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2214. {
  2215. if (edge->GetSucc() == block)
  2216. {
  2217. if (moveToDead)
  2218. {
  2219. iter.MoveCurrentTo(this->GetDeadSuccList());
  2220. }
  2221. else
  2222. {
  2223. iter.RemoveCurrent(graph->alloc);
  2224. }
  2225. if (doCleanPred)
  2226. {
  2227. block->RemovePred(this, graph, false, moveToDead);
  2228. }
  2229. if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
  2230. {
  2231. Loop *loop = block->loop;
  2232. loop->isDead = true;
  2233. }
  2234. return;
  2235. }
  2236. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2237. AssertMsg(UNREACHED, "Edge not found.");
  2238. }
  2239. void
  2240. BasicBlock::UnlinkPred(BasicBlock *block)
  2241. {
  2242. this->UnlinkPred(block, true);
  2243. }
  2244. void
  2245. BasicBlock::UnlinkSucc(BasicBlock *block)
  2246. {
  2247. this->UnlinkSucc(block, true);
  2248. }
  2249. void
  2250. BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
  2251. {
  2252. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2253. {
  2254. if (edge->GetPred() == block)
  2255. {
  2256. iter.UnlinkCurrent();
  2257. if (doCleanSucc)
  2258. {
  2259. block->UnlinkSucc(this, false);
  2260. }
  2261. return;
  2262. }
  2263. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2264. AssertMsg(UNREACHED, "Edge not found.");
  2265. }
  2266. void
  2267. BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
  2268. {
  2269. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2270. {
  2271. if (edge->GetSucc() == block)
  2272. {
  2273. iter.UnlinkCurrent();
  2274. if (doCleanPred)
  2275. {
  2276. block->UnlinkPred(this, false);
  2277. }
  2278. return;
  2279. }
  2280. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2281. AssertMsg(UNREACHED, "Edge not found.");
  2282. }
  2283. bool
  2284. BasicBlock::IsLandingPad()
  2285. {
  2286. BasicBlock * nextBlock = this->GetNext();
  2287. return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
  2288. }
  2289. IR::Instr *
  2290. FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
  2291. {
  2292. IR::Instr *instrPrev = instr->m_prev;
  2293. if (globOpt)
  2294. {
  2295. // Removing block during glob opt. Need to maintain the graph so that
  2296. // bailout will record the byte code use in case the dead code is exposed
  2297. // by dyno-pogo optimization (where bailout need the byte code uses from
  2298. // the dead blocks where it may not be dead after bailing out)
  2299. if (instr->IsLabelInstr())
  2300. {
  2301. instr->AsLabelInstr()->m_isLoopTop = false;
  2302. return instr;
  2303. }
  2304. else if (instr->IsByteCodeUsesInstr())
  2305. {
  2306. return instr;
  2307. }
  2308. Js::OpCode opcode = instr->m_opcode;
  2309. IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
  2310. if (newByteCodeUseInstr != nullptr)
  2311. {
  2312. // We don't care about property used in these instruction
  2313. // It is only necessary for field copy prop so that we will keep the implicit call
  2314. // up to the copy prop location.
  2315. newByteCodeUseInstr->propertySymUse = nullptr;
  2316. if (opcode == Js::OpCode::Yield)
  2317. {
  2318. IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
  2319. while (instrLabel->m_opcode != Js::OpCode::Label)
  2320. {
  2321. instrLabel = instrLabel->m_next;
  2322. }
  2323. func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
  2324. instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
  2325. }
  2326. // Save the last instruction to update the block with
  2327. return newByteCodeUseInstr;
  2328. }
  2329. else
  2330. {
  2331. return instrPrev;
  2332. }
  2333. }
  2334. else
  2335. {
  2336. instr->Remove();
  2337. return instrPrev;
  2338. }
  2339. }
  2340. void
  2341. FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
  2342. {
  2343. Assert(!block->isDead && !block->isDeleted);
  2344. IR::Instr * lastInstr = nullptr;
  2345. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  2346. {
  2347. if (instr->m_opcode == Js::OpCode::FunctionExit)
  2348. {
  2349. // Removing FunctionExit causes problems downstream...
  2350. // We could change the opcode, or have FunctionEpilog/FunctionExit to get
  2351. // rid of the epilog.
  2352. break;
  2353. }
  2354. if (instr == block->GetFirstInstr())
  2355. {
  2356. Assert(instr->IsLabelInstr());
  2357. instr->AsLabelInstr()->m_isLoopTop = false;
  2358. }
  2359. else
  2360. {
  2361. lastInstr = this->RemoveInstr(instr, globOpt);
  2362. }
  2363. } NEXT_INSTR_IN_BLOCK_EDITING;
  2364. if (lastInstr)
  2365. {
  2366. block->SetLastInstr(lastInstr);
  2367. }
  2368. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
  2369. {
  2370. edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
  2371. } NEXT_SLISTBASECOUNTED_ENTRY;
  2372. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
  2373. {
  2374. edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
  2375. } NEXT_SLISTBASECOUNTED_ENTRY;
  2376. if (block->isLoopHeader && this->loopList)
  2377. {
  2378. // If loop graph is built, remove loop from loopList
  2379. Loop **pPrevLoop = &this->loopList;
  2380. while (*pPrevLoop != block->loop)
  2381. {
  2382. pPrevLoop = &((*pPrevLoop)->next);
  2383. }
  2384. *pPrevLoop = (*pPrevLoop)->next;
  2385. this->hasLoop = (this->loopList != nullptr);
  2386. }
  2387. if (globOpt != nullptr)
  2388. {
  2389. block->isDead = true;
  2390. block->GetPredList()->MoveTo(block->GetDeadPredList());
  2391. block->GetSuccList()->MoveTo(block->GetDeadSuccList());
  2392. }
  2393. if (tailDuping)
  2394. {
  2395. block->isDead = true;
  2396. }
  2397. block->isDeleted = true;
  2398. block->SetDataUseCount(0);
  2399. }
  2400. void
  2401. BasicBlock::UnlinkInstr(IR::Instr * instr)
  2402. {
  2403. Assert(this->Contains(instr));
  2404. Assert(this->GetFirstInstr() != this->GetLastInstr());
  2405. if (instr == this->GetFirstInstr())
  2406. {
  2407. Assert(!this->GetFirstInstr()->IsLabelInstr());
  2408. this->SetFirstInstr(instr->m_next);
  2409. }
  2410. else if (instr == this->GetLastInstr())
  2411. {
  2412. this->SetLastInstr(instr->m_prev);
  2413. }
  2414. instr->Unlink();
  2415. }
  2416. void
  2417. BasicBlock::RemoveInstr(IR::Instr * instr)
  2418. {
  2419. Assert(this->Contains(instr));
  2420. if (instr == this->GetFirstInstr())
  2421. {
  2422. this->SetFirstInstr(instr->m_next);
  2423. }
  2424. else if (instr == this->GetLastInstr())
  2425. {
  2426. this->SetLastInstr(instr->m_prev);
  2427. }
  2428. instr->Remove();
  2429. }
  2430. void
  2431. BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
  2432. {
  2433. Assert(this->Contains(beforeThisInstr));
  2434. beforeThisInstr->InsertBefore(newInstr);
  2435. if(this->GetFirstInstr() == beforeThisInstr)
  2436. {
  2437. Assert(!beforeThisInstr->IsLabelInstr());
  2438. this->SetFirstInstr(newInstr);
  2439. }
  2440. }
  2441. void
  2442. BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
  2443. {
  2444. Assert(this->Contains(afterThisInstr));
  2445. afterThisInstr->InsertAfter(newInstr);
  2446. if (this->GetLastInstr() == afterThisInstr)
  2447. {
  2448. Assert(afterThisInstr->HasFallThrough());
  2449. this->SetLastInstr(newInstr);
  2450. }
  2451. }
  2452. void
  2453. BasicBlock::InsertAfter(IR::Instr *newInstr)
  2454. {
  2455. Assert(this->GetLastInstr()->HasFallThrough());
  2456. this->GetLastInstr()->InsertAfter(newInstr);
  2457. this->SetLastInstr(newInstr);
  2458. }
  2459. void
  2460. Loop::SetHasCall()
  2461. {
  2462. Loop * current = this;
  2463. do
  2464. {
  2465. if (current->hasCall)
  2466. {
  2467. #if DBG
  2468. current = current->parent;
  2469. while (current)
  2470. {
  2471. Assert(current->hasCall);
  2472. current = current->parent;
  2473. }
  2474. #endif
  2475. break;
  2476. }
  2477. current->hasCall = true;
  2478. current = current->parent;
  2479. }
  2480. while (current != nullptr);
  2481. }
  2482. void
  2483. Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
  2484. {
  2485. Loop * current = this;
  2486. do
  2487. {
  2488. if ((current->implicitCallFlags & newFlags) == newFlags)
  2489. {
  2490. #if DBG
  2491. current = current->parent;
  2492. while (current)
  2493. {
  2494. Assert((current->implicitCallFlags & newFlags) == newFlags);
  2495. current = current->parent;
  2496. }
  2497. #endif
  2498. break;
  2499. }
  2500. newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
  2501. current->implicitCallFlags = newFlags;
  2502. current = current->parent;
  2503. }
  2504. while (current != nullptr);
  2505. }
  2506. Js::ImplicitCallFlags
  2507. Loop::GetImplicitCallFlags()
  2508. {
  2509. if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
  2510. {
  2511. if (this->parent == nullptr)
  2512. {
  2513. // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
  2514. this->implicitCallFlags = Js::ImplicitCall_None;
  2515. }
  2516. else
  2517. {
  2518. // We don't have any information, get it from the parent and hope for the best
  2519. this->implicitCallFlags = this->parent->GetImplicitCallFlags();
  2520. }
  2521. }
  2522. return this->implicitCallFlags;
  2523. }
  2524. bool
  2525. Loop::CanDoFieldCopyProp()
  2526. {
  2527. #if DBG_DUMP
  2528. if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
  2529. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  2530. {
  2531. Output::Print(L"fieldcopyprop disabled because external: loop count: %d", GetLoopNumber());
  2532. GetFunc()->GetJnFunction()->DumpFullFunctionName();
  2533. Output::Print(L"\n");
  2534. Output::Flush();
  2535. }
  2536. #endif
  2537. return GlobOpt::ImplicitCallFlagsAllowOpts(this);
  2538. }
  2539. bool
  2540. Loop::CanDoFieldHoist()
  2541. {
  2542. // We can do field hoist wherever we can do copy prop
  2543. return CanDoFieldCopyProp();
  2544. }
  2545. bool
  2546. Loop::CanHoistInvariants()
  2547. {
  2548. Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
  2549. if (PHASE_OFF(Js::InvariantsPhase, func))
  2550. {
  2551. return false;
  2552. }
  2553. return true;
  2554. }
  2555. IR::LabelInstr *
  2556. Loop::GetLoopTopInstr() const
  2557. {
  2558. IR::LabelInstr * instr = nullptr;
  2559. if (this->topFunc->isFlowGraphValid)
  2560. {
  2561. instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
  2562. }
  2563. else
  2564. {
  2565. // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
  2566. instr = this->loopTopLabel;
  2567. }
  2568. if (instr)
  2569. {
  2570. Assert(instr->m_isLoopTop);
  2571. }
  2572. return instr;
  2573. }
  2574. void
  2575. Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
  2576. {
  2577. this->loopTopLabel = loopTop;
  2578. }
  2579. #if DBG_DUMP
  2580. uint
  2581. Loop::GetLoopNumber() const
  2582. {
  2583. IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
  2584. if (loopTopInstr->IsProfiledLabelInstr())
  2585. {
  2586. return loopTopInstr->AsProfiledLabelInstr()->loopNum;
  2587. }
  2588. return Js::LoopHeader::NoLoop;
  2589. }
  2590. bool
  2591. BasicBlock::Contains(IR::Instr * instr)
  2592. {
  2593. FOREACH_INSTR_IN_BLOCK(blockInstr, this)
  2594. {
  2595. if (instr == blockInstr)
  2596. {
  2597. return true;
  2598. }
  2599. }
  2600. NEXT_INSTR_IN_BLOCK;
  2601. return false;
  2602. }
  2603. #endif
  2604. FlowEdge *
  2605. FlowEdge::New(FlowGraph * graph)
  2606. {
  2607. FlowEdge * edge;
  2608. edge = JitAnew(graph->alloc, FlowEdge);
  2609. return edge;
  2610. }
  2611. bool
  2612. Loop::IsDescendentOrSelf(Loop const * loop) const
  2613. {
  2614. Loop const * currentLoop = loop;
  2615. while (currentLoop != nullptr)
  2616. {
  2617. if (currentLoop == this)
  2618. {
  2619. return true;
  2620. }
  2621. currentLoop = currentLoop->parent;
  2622. }
  2623. return false;
  2624. }
  2625. void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
  2626. {
  2627. BasicBlock *block;
  2628. if (instr->m_next->IsLabelInstr())
  2629. {
  2630. block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
  2631. block->RemoveInstr(instr);
  2632. }
  2633. else if (instr->IsLabelInstr())
  2634. {
  2635. block = instr->AsLabelInstr()->GetBasicBlock();
  2636. block->RemoveInstr(instr);
  2637. }
  2638. else
  2639. {
  2640. Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
  2641. instr->Remove();
  2642. }
  2643. }
  2644. bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
  2645. {
  2646. // Look for an unsigned constant, or the result of an unsigned shift by zero
  2647. if (!src->IsRegOpnd())
  2648. {
  2649. return false;
  2650. }
  2651. if (!src->AsRegOpnd()->m_sym->IsSingleDef())
  2652. {
  2653. return false;
  2654. }
  2655. if (src->AsRegOpnd()->m_sym->IsIntConst())
  2656. {
  2657. int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
  2658. if (intConst >= 0)
  2659. {
  2660. *pShrSrc1 = src;
  2661. return true;
  2662. }
  2663. else
  2664. {
  2665. return false;
  2666. }
  2667. }
  2668. IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
  2669. if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
  2670. {
  2671. return false;
  2672. }
  2673. IR::Opnd *shrCnt = shrUInstr->GetSrc2();
  2674. if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
  2675. {
  2676. return false;
  2677. }
  2678. *pShrSrc1 = shrUInstr->GetSrc1();
  2679. return true;
  2680. }
  2681. bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
  2682. {
  2683. IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
  2684. IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
  2685. IR::Opnd *newSrc1;
  2686. IR::Opnd *newSrc2;
  2687. // Look for something like:
  2688. // t1 = ShrU_A x, 0
  2689. // t2 = 10;
  2690. // BrGt t1, t2, L
  2691. //
  2692. // Peep to:
  2693. //
  2694. // t1 = ShrU_A x, 0
  2695. // t2 = 10;
  2696. // ByteCodeUse t1
  2697. // BrUnGt x, t2, L
  2698. //
  2699. // Hopefully dead-store can get rid of the ShrU
  2700. if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
  2701. {
  2702. return false;
  2703. }
  2704. if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
  2705. {
  2706. return false;
  2707. }
  2708. if (!cmpInstr->GetSrc2())
  2709. {
  2710. return false;
  2711. }
  2712. if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
  2713. {
  2714. return false;
  2715. }
  2716. if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
  2717. {
  2718. return false;
  2719. }
  2720. switch(cmpInstr->m_opcode)
  2721. {
  2722. case Js::OpCode::BrEq_A:
  2723. case Js::OpCode::BrNeq_A:
  2724. case Js::OpCode::BrSrEq_A:
  2725. case Js::OpCode::BrSrNeq_A:
  2726. break;
  2727. case Js::OpCode::BrLe_A:
  2728. cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
  2729. break;
  2730. case Js::OpCode::BrLt_A:
  2731. cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
  2732. break;
  2733. case Js::OpCode::BrGe_A:
  2734. cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
  2735. break;
  2736. case Js::OpCode::BrGt_A:
  2737. cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
  2738. break;
  2739. case Js::OpCode::CmLe_A:
  2740. cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
  2741. break;
  2742. case Js::OpCode::CmLt_A:
  2743. cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
  2744. break;
  2745. case Js::OpCode::CmGe_A:
  2746. cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
  2747. break;
  2748. case Js::OpCode::CmGt_A:
  2749. cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
  2750. break;
  2751. default:
  2752. return false;
  2753. }
  2754. IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr->m_func);
  2755. bytecodeInstr->SetByteCodeOffset(cmpInstr);
  2756. bytecodeInstr->byteCodeUpwardExposedUsed = Anew(cmpInstr->m_func->m_alloc, BVSparse<JitArenaAllocator>,cmpInstr->m_func->m_alloc);
  2757. cmpInstr->InsertBefore(bytecodeInstr);
  2758. if (cmpSrc1 != newSrc1)
  2759. {
  2760. if (cmpSrc1->IsRegOpnd())
  2761. {
  2762. bytecodeInstr->byteCodeUpwardExposedUsed->Set(cmpSrc1->AsRegOpnd()->m_sym->m_id);
  2763. }
  2764. cmpInstr->ReplaceSrc1(newSrc1);
  2765. if (newSrc1->IsRegOpnd())
  2766. {
  2767. cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2768. }
  2769. }
  2770. if (cmpSrc2 != newSrc2)
  2771. {
  2772. if (cmpSrc2->IsRegOpnd())
  2773. {
  2774. bytecodeInstr->byteCodeUpwardExposedUsed->Set(cmpSrc2->AsRegOpnd()->m_sym->m_id);
  2775. }
  2776. cmpInstr->ReplaceSrc2(newSrc2);
  2777. if (newSrc2->IsRegOpnd())
  2778. {
  2779. cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2780. }
  2781. }
  2782. return true;
  2783. }
  2784. #if DBG
  2785. void
  2786. FlowGraph::VerifyLoopGraph()
  2787. {
  2788. FOREACH_BLOCK(block, this)
  2789. {
  2790. Loop *loop = block->loop;
  2791. FOREACH_SUCCESSOR_BLOCK(succ, block)
  2792. {
  2793. if (loop == succ->loop)
  2794. {
  2795. Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
  2796. continue;
  2797. }
  2798. if (succ->isLoopHeader)
  2799. {
  2800. Assert(succ->loop->parent == loop
  2801. || (!loop->IsDescendentOrSelf(succ->loop)));
  2802. continue;
  2803. }
  2804. Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
  2805. } NEXT_SUCCESSOR_BLOCK;
  2806. if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
  2807. {
  2808. // Make sure all break blocks have been removed.
  2809. if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTryCatch()))
  2810. {
  2811. Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
  2812. }
  2813. }
  2814. } NEXT_BLOCK;
  2815. }
  2816. #endif
  2817. #if DBG_DUMP
  2818. void
  2819. FlowGraph::Dump(bool onlyOnVerboseMode, const wchar_t *form)
  2820. {
  2821. if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  2822. {
  2823. if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
  2824. {
  2825. if (form)
  2826. {
  2827. Output::Print(form);
  2828. }
  2829. this->Dump();
  2830. }
  2831. }
  2832. }
  2833. void
  2834. FlowGraph::Dump()
  2835. {
  2836. Output::Print(L"\nFlowGraph\n");
  2837. FOREACH_BLOCK(block, this)
  2838. {
  2839. Loop * loop = block->loop;
  2840. while (loop)
  2841. {
  2842. Output::Print(L" ");
  2843. loop = loop->parent;
  2844. }
  2845. block->DumpHeader(false);
  2846. } NEXT_BLOCK;
  2847. Output::Print(L"\nLoopGraph\n");
  2848. for (Loop *loop = this->loopList; loop; loop = loop->next)
  2849. {
  2850. Output::Print(L"\nLoop\n");
  2851. FOREACH_BLOCK_IN_LOOP(block, loop)
  2852. {
  2853. block->DumpHeader(false);
  2854. }NEXT_BLOCK_IN_LOOP;
  2855. Output::Print(L"Loop Ends\n");
  2856. }
  2857. }
  2858. void
  2859. BasicBlock::DumpHeader(bool insertCR)
  2860. {
  2861. if (insertCR)
  2862. {
  2863. Output::Print(L"\n");
  2864. }
  2865. Output::Print(L"BLOCK %d:", this->number);
  2866. if (this->isDead)
  2867. {
  2868. Output::Print(L" **** DEAD ****");
  2869. }
  2870. if (this->isBreakBlock)
  2871. {
  2872. Output::Print(L" **** Break Block ****");
  2873. }
  2874. else if (this->isAirLockBlock)
  2875. {
  2876. Output::Print(L" **** Air lock Block ****");
  2877. }
  2878. else if (this->isBreakCompensationBlockAtSource)
  2879. {
  2880. Output::Print(L" **** Break Source Compensation Code ****");
  2881. }
  2882. else if (this->isBreakCompensationBlockAtSink)
  2883. {
  2884. Output::Print(L" **** Break Sink Compensation Code ****");
  2885. }
  2886. else if (this->isAirLockCompensationBlock)
  2887. {
  2888. Output::Print(L" **** Airlock block Compensation Code ****");
  2889. }
  2890. if (!this->predList.Empty())
  2891. {
  2892. BOOL fFirst = TRUE;
  2893. Output::Print(L" In(");
  2894. FOREACH_PREDECESSOR_BLOCK(blockPred, this)
  2895. {
  2896. if (!fFirst)
  2897. {
  2898. Output::Print(L", ");
  2899. }
  2900. Output::Print(L"%d", blockPred->GetBlockNum());
  2901. fFirst = FALSE;
  2902. }
  2903. NEXT_PREDECESSOR_BLOCK;
  2904. Output::Print(L")");
  2905. }
  2906. if (!this->succList.Empty())
  2907. {
  2908. BOOL fFirst = TRUE;
  2909. Output::Print(L" Out(");
  2910. FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
  2911. {
  2912. if (!fFirst)
  2913. {
  2914. Output::Print(L", ");
  2915. }
  2916. Output::Print(L"%d", blockSucc->GetBlockNum());
  2917. fFirst = FALSE;
  2918. }
  2919. NEXT_SUCCESSOR_BLOCK;
  2920. Output::Print(L")");
  2921. }
  2922. if (!this->deadPredList.Empty())
  2923. {
  2924. BOOL fFirst = TRUE;
  2925. Output::Print(L" DeadIn(");
  2926. FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
  2927. {
  2928. if (!fFirst)
  2929. {
  2930. Output::Print(L", ");
  2931. }
  2932. Output::Print(L"%d", blockPred->GetBlockNum());
  2933. fFirst = FALSE;
  2934. }
  2935. NEXT_DEAD_PREDECESSOR_BLOCK;
  2936. Output::Print(L")");
  2937. }
  2938. if (!this->deadSuccList.Empty())
  2939. {
  2940. BOOL fFirst = TRUE;
  2941. Output::Print(L" DeadOut(");
  2942. FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
  2943. {
  2944. if (!fFirst)
  2945. {
  2946. Output::Print(L", ");
  2947. }
  2948. Output::Print(L"%d", blockSucc->GetBlockNum());
  2949. fFirst = FALSE;
  2950. }
  2951. NEXT_DEAD_SUCCESSOR_BLOCK;
  2952. Output::Print(L")");
  2953. }
  2954. if (this->loop)
  2955. {
  2956. Output::Print(L" Loop(%d) header: %d", this->loop->loopNumber, this->loop->GetHeadBlock()->GetBlockNum());
  2957. if (this->loop->parent)
  2958. {
  2959. Output::Print(L" parent(%d): %d", this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
  2960. }
  2961. if (this->loop->GetHeadBlock() == this)
  2962. {
  2963. Output::SkipToColumn(50);
  2964. Output::Print(L"Call Exp/Imp: ");
  2965. if (this->loop->GetHasCall())
  2966. {
  2967. Output::Print(L"yes/");
  2968. }
  2969. else
  2970. {
  2971. Output::Print(L" no/");
  2972. }
  2973. Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
  2974. }
  2975. }
  2976. Output::Print(L"\n");
  2977. if (insertCR)
  2978. {
  2979. Output::Print(L"\n");
  2980. }
  2981. }
  2982. void
  2983. BasicBlock::Dump()
  2984. {
  2985. // Dumping the first instruction (label) will dump the block header as well.
  2986. FOREACH_INSTR_IN_BLOCK(instr, this)
  2987. {
  2988. instr->Dump();
  2989. }
  2990. NEXT_INSTR_IN_BLOCK;
  2991. }
  2992. void
  2993. AddPropertyCacheBucket::Dump() const
  2994. {
  2995. Assert(this->initialType != nullptr);
  2996. Assert(this->finalType != nullptr);
  2997. Output::Print(L" initial type: 0x%x, final type: 0x%x ", this->initialType, this->finalType);
  2998. }
  2999. void
  3000. ObjTypeGuardBucket::Dump() const
  3001. {
  3002. Assert(this->guardedPropertyOps != nullptr);
  3003. this->guardedPropertyOps->Dump();
  3004. }
  3005. void
  3006. ObjWriteGuardBucket::Dump() const
  3007. {
  3008. Assert(this->writeGuards != nullptr);
  3009. this->writeGuards->Dump();
  3010. }
  3011. #endif