FlowGraph.cpp 105 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. FlowGraph *
  7. FlowGraph::New(Func * func, JitArenaAllocator * alloc)
  8. {
  9. FlowGraph * graph;
  10. graph = JitAnew(alloc, FlowGraph, func, alloc);
  11. return graph;
  12. }
  13. ///----------------------------------------------------------------------------
  14. ///
  15. /// FlowGraph::Build
  16. ///
  17. /// Construct flow graph and loop structures for the current state of the function.
  18. ///
  19. ///----------------------------------------------------------------------------
  20. void
  21. FlowGraph::Build(void)
  22. {
  23. Func * func = this->func;
  24. BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  25. this->RunPeeps();
  26. END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  27. // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
  28. // bailing out from a simple jitted function to do a full jit of a loop body in the function
  29. // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to catch.
  30. if (this->func->HasTry() &&
  31. (this->func->DoOptimizeTryCatch() ||
  32. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody())
  33. )
  34. )
  35. {
  36. this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  37. }
  38. IR::Instr * currLastInstr = nullptr;
  39. BasicBlock * currBlock = nullptr;
  40. BasicBlock * nextBlock = nullptr;
  41. bool hasCall = false;
  42. FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
  43. {
  44. if (currLastInstr == nullptr || instr->EndsBasicBlock())
  45. {
  46. // Start working on a new block.
  47. // If we're currently processing a block, then wrap it up before beginning a new one.
  48. if (currLastInstr != nullptr)
  49. {
  50. nextBlock = currBlock;
  51. currBlock = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
  52. currBlock->hasCall = hasCall;
  53. hasCall = false;
  54. }
  55. currLastInstr = instr;
  56. }
  57. if (instr->StartsBasicBlock())
  58. {
  59. // Insert a BrOnException after the loop top if we are in a try-catch. This is required to
  60. // model flow from the loop to the catch block for loops that don't have a break condition.
  61. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop &&
  62. this->catchLabelStack && !this->catchLabelStack->Empty() &&
  63. instr->m_next->m_opcode != Js::OpCode::BrOnException)
  64. {
  65. IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
  66. instr->InsertAfter(brOnException);
  67. instrPrev = brOnException; // process BrOnException before adding a new block for loop top label.
  68. continue;
  69. }
  70. // Wrap up the current block and get ready to process a new one.
  71. nextBlock = currBlock;
  72. currBlock = this->AddBlock(instr, currLastInstr, nextBlock);
  73. currBlock->hasCall = hasCall;
  74. hasCall = false;
  75. currLastInstr = nullptr;
  76. }
  77. switch (instr->m_opcode)
  78. {
  79. case Js::OpCode::Catch:
  80. Assert(instr->m_prev->IsLabelInstr());
  81. if (this->catchLabelStack)
  82. {
  83. this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
  84. }
  85. break;
  86. case Js::OpCode::TryCatch:
  87. if (this->catchLabelStack)
  88. {
  89. this->catchLabelStack->Pop();
  90. }
  91. break;
  92. case Js::OpCode::CloneBlockScope:
  93. case Js::OpCode::CloneInnerScopeSlots:
  94. // It would be nice to do this in IRBuilder, but doing so gives us
  95. // trouble when doing the DoSlotArrayCheck since it assume single def
  96. // of the sym to do its check properly. So instead we assign the dst
  97. // here in FlowGraph.
  98. instr->SetDst(instr->GetSrc1());
  99. break;
  100. }
  101. if (OpCodeAttr::UseAllFields(instr->m_opcode))
  102. {
  103. // UseAllFields opcode are call instruction or opcode that would call.
  104. hasCall = true;
  105. if (OpCodeAttr::CallInstr(instr->m_opcode))
  106. {
  107. if (!instr->isCallInstrProtectedByNoProfileBailout)
  108. {
  109. instr->m_func->SetHasCallsOnSelfAndParents();
  110. }
  111. // For ARM & X64 because of their register calling convention
  112. // the ArgOuts need to be moved next to the call.
  113. #if defined(_M_ARM) || defined(_M_X64)
  114. IR::Instr* argInsertInstr = instr;
  115. instr->IterateArgInstrs([&](IR::Instr* argInstr)
  116. {
  117. if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
  118. argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
  119. argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
  120. argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
  121. {
  122. // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
  123. if (!argInstr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  124. {
  125. // Need to always generate byte code arg out capture,
  126. // because bailout can't restore from the arg out as it is
  127. // replaced by new sym for register calling convention in lower
  128. argInstr->GenerateBytecodeArgOutCapture();
  129. }
  130. // Check if the instruction is already next
  131. if (argInstr != argInsertInstr->m_prev)
  132. {
  133. // It is not, move it.
  134. argInstr->Move(argInsertInstr);
  135. }
  136. argInsertInstr = argInstr;
  137. }
  138. return false;
  139. });
  140. #endif
  141. }
  142. }
  143. }
  144. NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
  145. this->func->isFlowGraphValid = true;
  146. Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
  147. // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
  148. // forward to number the blocks in lexical order.
  149. unsigned int blockNum = 0;
  150. FOREACH_BLOCK(block, this)
  151. {
  152. block->SetBlockNum(blockNum++);
  153. }NEXT_BLOCK;
  154. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  155. this->RemoveUnreachableBlocks();
  156. this->FindLoops();
  157. bool breakBlocksRelocated = this->CanonicalizeLoops();
  158. #if DBG
  159. this->VerifyLoopGraph();
  160. #endif
  161. // Renumber the blocks. Break block remove code has likely inserted new basic blocks.
  162. blockNum = 0;
  163. // Regions need to be assigned before Globopt because:
  164. // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
  165. // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
  166. // in the try region to be restored correctly by the bailout, it should not be removed from the
  167. // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
  168. //
  169. // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
  170. // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
  171. // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
  172. bool assignRegionsBeforeGlobopt = this->func->HasTry() &&
  173. (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout));
  174. Region ** blockToRegion = nullptr;
  175. if (assignRegionsBeforeGlobopt)
  176. {
  177. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  178. }
  179. FOREACH_BLOCK_ALL(block, this)
  180. {
  181. block->SetBlockNum(blockNum++);
  182. if (assignRegionsBeforeGlobopt)
  183. {
  184. if (block->isDeleted && !block->isDead)
  185. {
  186. continue;
  187. }
  188. this->UpdateRegionForBlock(block, blockToRegion);
  189. }
  190. } NEXT_BLOCK_ALL;
  191. AssertMsg (blockNum == this->blockCount, "Block count is out of whack");
  192. if (breakBlocksRelocated)
  193. {
  194. // Sort loop lists only if there is break block removal.
  195. SortLoopLists();
  196. }
  197. #if DBG_DUMP
  198. this->Dump(false, nullptr);
  199. #endif
  200. }
  201. void
  202. FlowGraph::SortLoopLists()
  203. {
  204. // Sort the blocks in loopList
  205. for (Loop *loop = this->loopList; loop; loop = loop->next)
  206. {
  207. unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
  208. // Insertion sort as the blockList is almost sorted in the loop.
  209. FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
  210. {
  211. if (lastBlockNumber <= block->GetBlockNum())
  212. {
  213. lastBlockNumber = block->GetBlockNum();
  214. }
  215. else
  216. {
  217. iter.UnlinkCurrent();
  218. FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
  219. {
  220. if (insertBlock->GetBlockNum() > block->GetBlockNum())
  221. {
  222. break;
  223. }
  224. }NEXT_BLOCK_IN_LOOP_EDITING;
  225. newIter.InsertBefore(block);
  226. }
  227. }NEXT_BLOCK_IN_LOOP_EDITING;
  228. }
  229. }
  230. void
  231. FlowGraph::RunPeeps()
  232. {
  233. if (this->func->HasTry())
  234. {
  235. return;
  236. }
  237. if (PHASE_OFF(Js::FGPeepsPhase, this->func))
  238. {
  239. return;
  240. }
  241. IR::Instr * instrCm = nullptr;
  242. bool tryUnsignedCmpPeep = false;
  243. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  244. {
  245. switch(instr->m_opcode)
  246. {
  247. case Js::OpCode::Br:
  248. case Js::OpCode::BrEq_I4:
  249. case Js::OpCode::BrGe_I4:
  250. case Js::OpCode::BrGt_I4:
  251. case Js::OpCode::BrLt_I4:
  252. case Js::OpCode::BrLe_I4:
  253. case Js::OpCode::BrUnGe_I4:
  254. case Js::OpCode::BrUnGt_I4:
  255. case Js::OpCode::BrUnLt_I4:
  256. case Js::OpCode::BrUnLe_I4:
  257. case Js::OpCode::BrNeq_I4:
  258. case Js::OpCode::BrEq_A:
  259. case Js::OpCode::BrGe_A:
  260. case Js::OpCode::BrGt_A:
  261. case Js::OpCode::BrLt_A:
  262. case Js::OpCode::BrLe_A:
  263. case Js::OpCode::BrUnGe_A:
  264. case Js::OpCode::BrUnGt_A:
  265. case Js::OpCode::BrUnLt_A:
  266. case Js::OpCode::BrUnLe_A:
  267. case Js::OpCode::BrNotEq_A:
  268. case Js::OpCode::BrNotNeq_A:
  269. case Js::OpCode::BrSrNotEq_A:
  270. case Js::OpCode::BrSrNotNeq_A:
  271. case Js::OpCode::BrNotGe_A:
  272. case Js::OpCode::BrNotGt_A:
  273. case Js::OpCode::BrNotLt_A:
  274. case Js::OpCode::BrNotLe_A:
  275. case Js::OpCode::BrNeq_A:
  276. case Js::OpCode::BrNotNull_A:
  277. case Js::OpCode::BrNotAddr_A:
  278. case Js::OpCode::BrAddr_A:
  279. case Js::OpCode::BrSrEq_A:
  280. case Js::OpCode::BrSrNeq_A:
  281. case Js::OpCode::BrOnHasProperty:
  282. case Js::OpCode::BrOnNoProperty:
  283. case Js::OpCode::BrHasSideEffects:
  284. case Js::OpCode::BrNotHasSideEffects:
  285. case Js::OpCode::BrFncEqApply:
  286. case Js::OpCode::BrFncNeqApply:
  287. case Js::OpCode::BrOnEmpty:
  288. case Js::OpCode::BrOnNotEmpty:
  289. case Js::OpCode::BrFncCachedScopeEq:
  290. case Js::OpCode::BrFncCachedScopeNeq:
  291. case Js::OpCode::BrOnObject_A:
  292. case Js::OpCode::BrOnClassConstructor:
  293. case Js::OpCode::BrOnBaseConstructorKind:
  294. if (tryUnsignedCmpPeep)
  295. {
  296. this->UnsignedCmpPeep(instr);
  297. }
  298. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  299. break;
  300. case Js::OpCode::MultiBr:
  301. // TODO: Run peeps on these as well...
  302. break;
  303. case Js::OpCode::BrTrue_I4:
  304. case Js::OpCode::BrFalse_I4:
  305. case Js::OpCode::BrTrue_A:
  306. case Js::OpCode::BrFalse_A:
  307. if (instrCm)
  308. {
  309. if (instrCm->GetDst()->IsInt32())
  310. {
  311. Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
  312. instrNext = this->PeepTypedCm(instrCm);
  313. }
  314. else
  315. {
  316. instrNext = this->PeepCm(instrCm);
  317. }
  318. instrCm = nullptr;
  319. if (instrNext == nullptr)
  320. {
  321. // Set instrNext back to the current instr.
  322. instrNext = instr;
  323. }
  324. }
  325. else
  326. {
  327. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  328. }
  329. break;
  330. case Js::OpCode::CmEq_I4:
  331. case Js::OpCode::CmGe_I4:
  332. case Js::OpCode::CmGt_I4:
  333. case Js::OpCode::CmLt_I4:
  334. case Js::OpCode::CmLe_I4:
  335. case Js::OpCode::CmNeq_I4:
  336. case Js::OpCode::CmEq_A:
  337. case Js::OpCode::CmGe_A:
  338. case Js::OpCode::CmGt_A:
  339. case Js::OpCode::CmLt_A:
  340. case Js::OpCode::CmLe_A:
  341. case Js::OpCode::CmNeq_A:
  342. case Js::OpCode::CmSrEq_A:
  343. case Js::OpCode::CmSrNeq_A:
  344. if (tryUnsignedCmpPeep)
  345. {
  346. this->UnsignedCmpPeep(instr);
  347. }
  348. case Js::OpCode::CmUnGe_I4:
  349. case Js::OpCode::CmUnGt_I4:
  350. case Js::OpCode::CmUnLt_I4:
  351. case Js::OpCode::CmUnLe_I4:
  352. case Js::OpCode::CmUnGe_A:
  353. case Js::OpCode::CmUnGt_A:
  354. case Js::OpCode::CmUnLt_A:
  355. case Js::OpCode::CmUnLe_A:
  356. // There may be useless branches between the Cm instr and the branch that uses the result.
  357. // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
  358. instrCm = instr;
  359. break;
  360. case Js::OpCode::Label:
  361. if (instr->AsLabelInstr()->IsUnreferenced())
  362. {
  363. instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
  364. }
  365. break;
  366. case Js::OpCode::StatementBoundary:
  367. instr->ClearByteCodeOffset();
  368. instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
  369. break;
  370. case Js::OpCode::ShrU_I4:
  371. case Js::OpCode::ShrU_A:
  372. if (tryUnsignedCmpPeep)
  373. {
  374. break;
  375. }
  376. if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
  377. && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
  378. && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
  379. {
  380. tryUnsignedCmpPeep = true;
  381. }
  382. break;
  383. default:
  384. Assert(!instr->IsBranchInstr());
  385. }
  386. } NEXT_INSTR_IN_FUNC_EDITING;
  387. }
  388. void
  389. Loop::InsertLandingPad(FlowGraph *fg)
  390. {
  391. BasicBlock *headBlock = this->GetHeadBlock();
  392. // Always create a landing pad. This allows globopt to easily hoist instructions
  393. // and re-optimize the block if needed.
  394. BasicBlock *landingPad = BasicBlock::New(fg);
  395. this->landingPad = landingPad;
  396. IR::Instr * headInstr = headBlock->GetFirstInstr();
  397. IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
  398. landingPadLabel->SetByteCodeOffset(headInstr);
  399. headInstr->InsertBefore(landingPadLabel);
  400. landingPadLabel->SetBasicBlock(landingPad);
  401. landingPad->SetBlockNum(fg->blockCount++);
  402. landingPad->SetFirstInstr(landingPadLabel);
  403. landingPad->SetLastInstr(landingPadLabel);
  404. landingPad->prev = headBlock->prev;
  405. landingPad->prev->next = landingPad;
  406. landingPad->next = headBlock;
  407. headBlock->prev = landingPad;
  408. Loop *parentLoop = this->parent;
  409. landingPad->loop = parentLoop;
  410. // We need to add this block to the block list of the parent loops
  411. while (parentLoop)
  412. {
  413. // Find the head block in the block list of the parent loop
  414. FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
  415. {
  416. if (block == headBlock)
  417. {
  418. // Add the landing pad to the block list
  419. iter.InsertBefore(landingPad);
  420. break;
  421. }
  422. } NEXT_BLOCK_IN_LOOP_EDITING;
  423. parentLoop = parentLoop->parent;
  424. }
  425. // Fix predecessor flow edges
  426. FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
  427. {
  428. // Make sure it isn't a back-edge
  429. if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
  430. {
  431. if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
  432. {
  433. IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
  434. branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
  435. }
  436. headBlock->UnlinkPred(edge->GetPred(), false);
  437. landingPad->AddPred(edge, fg);
  438. edge->SetSucc(landingPad);
  439. }
  440. } NEXT_PREDECESSOR_EDGE_EDITING;
  441. fg->AddEdge(landingPad, headBlock);
  442. }
  443. bool
  444. Loop::RemoveBreakBlocks(FlowGraph *fg)
  445. {
  446. bool breakBlockRelocated = false;
  447. if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
  448. {
  449. return false;
  450. }
  451. BasicBlock *loopTailBlock = nullptr;
  452. FOREACH_BLOCK_IN_LOOP(block, this)
  453. {
  454. loopTailBlock = block;
  455. }NEXT_BLOCK_IN_LOOP;
  456. AnalysisAssert(loopTailBlock);
  457. FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
  458. {
  459. while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
  460. {
  461. // Found at least one break block;
  462. breakBlockRelocated = true;
  463. #if DBG
  464. breakBlockEnd->isBreakBlock = true;
  465. #endif
  466. // Find the first block in this break block sequence.
  467. BasicBlock *breakBlockStart = breakBlockEnd;
  468. BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
  469. // Walk back the blocks until we find a block which belongs to that block.
  470. // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
  471. // Algorithm works on one loop at a time.
  472. while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
  473. {
  474. breakBlockStart = breakBlockStartPrev;
  475. breakBlockStartPrev = breakBlockStartPrev->GetPrev();
  476. }
  477. #if DBG
  478. breakBlockStart->isBreakBlock = true; // Mark the first block as well.
  479. #endif
  480. BasicBlock *exitLoopTail = loopTailBlock;
  481. // Move these break blocks to the tail of the loop.
  482. fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
  483. #if DBG_DUMP
  484. fg->Dump(true /*needs verbose flag*/, _u("\n After Each iteration of canonicalization \n"));
  485. #endif
  486. // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
  487. breakBlockEnd = loopTailBlock;
  488. blockPrev = breakBlockEnd->prev;
  489. }
  490. } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
  491. return breakBlockRelocated;
  492. }
  493. void
  494. FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
  495. {
  496. BasicBlock *srcPredBlock = blockStart->prev;
  497. BasicBlock *srcNextBlock = blockEnd->next;
  498. BasicBlock *dstPredBlock = insertBlock->prev;
  499. IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
  500. IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
  501. // Fix block linkage
  502. srcPredBlock->next = srcNextBlock;
  503. srcNextBlock->prev = srcPredBlock;
  504. dstPredBlock->next = blockStart;
  505. insertBlock->prev = blockEnd;
  506. blockStart->prev = dstPredBlock;
  507. blockEnd->next = insertBlock;
  508. // Fix instruction linkage
  509. IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
  510. // Fix instruction flow
  511. IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
  512. if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
  513. {
  514. // There was a fallthrough in the break blocks original position.
  515. IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
  516. IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
  517. // Save the target and invert the branch.
  518. IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
  519. srcPredBlock->InvertBranch(srcBranch);
  520. IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
  521. // Point the inverted branch to break block.
  522. srcBranch->SetTarget(srcLabel);
  523. if (srcBranchNextInstr != srcBranchTarget)
  524. {
  525. FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
  526. Assert(srcEdge);
  527. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
  528. Assert(compensationBlock);
  529. }
  530. }
  531. IR::Instr *dstLastInstr = dstPredBlockLastInstr;
  532. if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
  533. {
  534. //There is a fallthrough in the block after which break block is inserted.
  535. FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
  536. Assert(dstEdge);
  537. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
  538. Assert(compensationBlock);
  539. }
  540. }
  541. FlowEdge *
  542. FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
  543. {
  544. FlowEdge *srcEdge = nullptr;
  545. FOREACH_SUCCESSOR_EDGE(edge, predBlock)
  546. {
  547. if (edge->GetSucc() == succBlock)
  548. {
  549. srcEdge = edge;
  550. break;
  551. }
  552. } NEXT_SUCCESSOR_EDGE;
  553. return srcEdge;
  554. }
  555. void
  556. BasicBlock::InvertBranch(IR::BranchInstr *branch)
  557. {
  558. Assert(this->GetLastInstr() == branch);
  559. Assert(this->GetSuccList()->HasTwo());
  560. branch->Invert();
  561. this->GetSuccList()->Reverse();
  562. }
  563. bool
  564. FlowGraph::CanonicalizeLoops()
  565. {
  566. if (this->func->HasProfileInfo())
  567. {
  568. this->implicitCallFlags = this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags();
  569. for (Loop *loop = this->loopList; loop; loop = loop->next)
  570. {
  571. this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
  572. }
  573. }
  574. #if DBG_DUMP
  575. this->Dump(true, _u("\n Before canonicalizeLoops \n"));
  576. #endif
  577. bool breakBlockRelocated = false;
  578. for (Loop *loop = this->loopList; loop; loop = loop->next)
  579. {
  580. loop->InsertLandingPad(this);
  581. if (!this->func->HasTry() || this->func->DoOptimizeTryCatch())
  582. {
  583. bool relocated = loop->RemoveBreakBlocks(this);
  584. if (!breakBlockRelocated && relocated)
  585. {
  586. breakBlockRelocated = true;
  587. }
  588. }
  589. }
  590. #if DBG_DUMP
  591. this->Dump(true, _u("\n After canonicalizeLoops \n"));
  592. #endif
  593. return breakBlockRelocated;
  594. }
  595. // Find the loops in this function, build the loop structure, and build a linked
  596. // list of the basic blocks in this loop (including blocks of inner loops). The
  597. // list preserves the reverse post-order of the blocks in the flowgraph block list.
  598. void
  599. FlowGraph::FindLoops()
  600. {
  601. if (!this->hasLoop)
  602. {
  603. return;
  604. }
  605. Func * func = this->func;
  606. FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
  607. {
  608. if (block->loop != nullptr)
  609. {
  610. // Block already visited
  611. continue;
  612. }
  613. FOREACH_SUCCESSOR_BLOCK(succ, block)
  614. {
  615. if (succ->isLoopHeader && succ->loop == nullptr)
  616. {
  617. // Found a loop back-edge
  618. BuildLoop(succ, block);
  619. }
  620. } NEXT_SUCCESSOR_BLOCK;
  621. if (block->isLoopHeader && block->loop == nullptr)
  622. {
  623. // We would have built a loop for it if it was a loop...
  624. block->isLoopHeader = false;
  625. block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  626. }
  627. } NEXT_BLOCK_BACKWARD_IN_FUNC;
  628. }
  629. void
  630. FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
  631. {
  632. // This function is recursive, so when jitting in the foreground, probe the stack
  633. if(!func->IsBackgroundJIT())
  634. {
  635. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  636. }
  637. if (tailBlock->number < headBlock->number)
  638. {
  639. // Not a loop. We didn't see any back-edge.
  640. headBlock->isLoopHeader = false;
  641. headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  642. return;
  643. }
  644. Assert(headBlock->isLoopHeader);
  645. Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
  646. loop->next = this->loopList;
  647. this->loopList = loop;
  648. headBlock->loop = loop;
  649. loop->headBlock = headBlock;
  650. loop->int32SymsOnEntry = nullptr;
  651. loop->lossyInt32SymsOnEntry = nullptr;
  652. // If parentLoop is a parent of loop, it's headBlock better appear first.
  653. if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
  654. {
  655. loop->parent = parentLoop;
  656. parentLoop->isLeaf = false;
  657. }
  658. loop->hasDeadStoreCollectionPass = false;
  659. loop->hasDeadStorePrepass = false;
  660. loop->memOpInfo = nullptr;
  661. loop->doMemOp = true;
  662. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LoopBuilder"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  663. WalkLoopBlocks(tailBlock, loop, &tempAlloc);
  664. Assert(loop->GetHeadBlock() == headBlock);
  665. IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
  666. firstInstr->SetLoop(loop);
  667. if (firstInstr->IsProfiledLabelInstr())
  668. {
  669. loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
  670. if (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsLoopImplicitCallInfoDisabled())
  671. {
  672. loop->SetImplicitCallFlags(this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags());
  673. }
  674. loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
  675. }
  676. else
  677. {
  678. // Didn't collect profile information, don't do optimizations
  679. loop->SetImplicitCallFlags(Js::ImplicitCall_All);
  680. }
  681. }
  682. Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
  683. {
  684. Assert(this->IsMemCopy());
  685. return (Loop::MemCopyCandidate*)this;
  686. }
  687. Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
  688. {
  689. Assert(this->IsMemSet());
  690. return (Loop::MemSetCandidate*)this;
  691. }
  692. void
  693. Loop::EnsureMemOpVariablesInitialized()
  694. {
  695. Assert(this->doMemOp);
  696. if (this->memOpInfo == nullptr)
  697. {
  698. JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
  699. this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
  700. this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
  701. this->memOpInfo->startIndexOpndCache[0] = nullptr;
  702. this->memOpInfo->startIndexOpndCache[1] = nullptr;
  703. this->memOpInfo->startIndexOpndCache[2] = nullptr;
  704. this->memOpInfo->startIndexOpndCache[3] = nullptr;
  705. this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
  706. this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
  707. this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
  708. }
  709. }
  710. // Walk the basic blocks backwards until we find the loop header.
  711. // Mark basic blocks in the loop by looking at the predecessors
  712. // of blocks known to be in the loop.
  713. // Recurse on inner loops.
  714. void
  715. FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
  716. {
  717. AnalysisAssert(loop);
  718. BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
  719. BasicBlock *tailBlock = block;
  720. BasicBlock *lastBlock;
  721. loopBlocksBv->Set(block->GetBlockNum());
  722. this->AddBlockToLoop(block, loop);
  723. if (block == loop->headBlock)
  724. {
  725. // Single block loop, we're done
  726. return;
  727. }
  728. do
  729. {
  730. BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
  731. FOREACH_SUCCESSOR_BLOCK(succ, block)
  732. {
  733. if (succ->isLoopHeader)
  734. {
  735. // Found a loop back-edge
  736. if (loop->headBlock == succ)
  737. {
  738. isInLoop = true;
  739. }
  740. else if (succ->loop == nullptr || succ->loop->headBlock != succ)
  741. {
  742. // Recurse on inner loop
  743. BuildLoop(succ, block, isInLoop ? loop : nullptr);
  744. }
  745. }
  746. } NEXT_SUCCESSOR_BLOCK;
  747. if (isInLoop)
  748. {
  749. // This block is in the loop. All of it's predecessors should be contained in the loop as well.
  750. FOREACH_PREDECESSOR_BLOCK(pred, block)
  751. {
  752. // Fix up loop parent if it isn't set already.
  753. // If pred->loop != loop, we're looking at an inner loop, which was already visited.
  754. // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
  755. // loop, so this must be an immediate child.
  756. if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
  757. && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
  758. {
  759. pred->loop->parent = loop;
  760. loop->isLeaf = false;
  761. if (pred->loop->hasCall)
  762. {
  763. loop->SetHasCall();
  764. }
  765. loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
  766. }
  767. // Add pred to loop bit vector
  768. loopBlocksBv->Set(pred->GetBlockNum());
  769. } NEXT_PREDECESSOR_BLOCK;
  770. if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
  771. {
  772. block->loop = loop;
  773. }
  774. if (block != tailBlock)
  775. {
  776. this->AddBlockToLoop(block, loop);
  777. }
  778. }
  779. lastBlock = block;
  780. block = block->GetPrev();
  781. } while (lastBlock != loop->headBlock);
  782. }
  783. // Add block to this loop, and it's parent loops.
  784. void
  785. FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
  786. {
  787. loop->blockList.Prepend(block);
  788. if (block->hasCall)
  789. {
  790. loop->SetHasCall();
  791. }
  792. }
  793. ///----------------------------------------------------------------------------
  794. ///
  795. /// FlowGraph::AddBlock
  796. ///
  797. /// Finish processing of a new block: hook up successor arcs, note loops, etc.
  798. ///
  799. ///----------------------------------------------------------------------------
  800. BasicBlock *
  801. FlowGraph::AddBlock(
  802. IR::Instr * firstInstr,
  803. IR::Instr * lastInstr,
  804. BasicBlock * nextBlock)
  805. {
  806. BasicBlock * block;
  807. IR::LabelInstr * labelInstr;
  808. if (firstInstr->IsLabelInstr())
  809. {
  810. labelInstr = firstInstr->AsLabelInstr();
  811. }
  812. else
  813. {
  814. labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
  815. labelInstr->SetByteCodeOffset(firstInstr);
  816. if (firstInstr->IsEntryInstr())
  817. {
  818. firstInstr->InsertAfter(labelInstr);
  819. }
  820. else
  821. {
  822. firstInstr->InsertBefore(labelInstr);
  823. }
  824. firstInstr = labelInstr;
  825. }
  826. block = labelInstr->GetBasicBlock();
  827. if (block == nullptr)
  828. {
  829. block = BasicBlock::New(this);
  830. labelInstr->SetBasicBlock(block);
  831. // Remember last block in function to target successor of RETs.
  832. if (!this->tailBlock)
  833. {
  834. this->tailBlock = block;
  835. }
  836. }
  837. // Hook up the successor edges
  838. if (lastInstr->EndsBasicBlock())
  839. {
  840. BasicBlock * blockTarget = nullptr;
  841. if (lastInstr->IsBranchInstr())
  842. {
  843. // Hook up a successor edge to the branch target.
  844. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  845. if(branchInstr->IsMultiBranch())
  846. {
  847. BasicBlock * blockMultiBrTarget;
  848. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  849. multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
  850. {
  851. blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
  852. this->AddEdge(block, blockMultiBrTarget);
  853. });
  854. }
  855. else
  856. {
  857. IR::LabelInstr * targetLabelInstr = branchInstr->GetTarget();
  858. blockTarget = SetBlockTargetAndLoopFlag(targetLabelInstr);
  859. if (branchInstr->IsConditional())
  860. {
  861. IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
  862. if (instrNext->IsLabelInstr())
  863. {
  864. SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
  865. }
  866. }
  867. }
  868. }
  869. else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
  870. {
  871. blockTarget = this->tailBlock;
  872. }
  873. if (blockTarget)
  874. {
  875. this->AddEdge(block, blockTarget);
  876. }
  877. }
  878. if (lastInstr->HasFallThrough())
  879. {
  880. // Add a branch to next instruction so that we don't have to update the flow graph
  881. // when the glob opt tries to insert instructions.
  882. // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
  883. if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
  884. {
  885. IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
  886. lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
  887. instr->SetByteCodeOffset(lastInstr->m_next);
  888. lastInstr->InsertAfter(instr);
  889. lastInstr = instr;
  890. }
  891. this->AddEdge(block, nextBlock);
  892. }
  893. block->SetBlockNum(this->blockCount++);
  894. block->SetFirstInstr(firstInstr);
  895. block->SetLastInstr(lastInstr);
  896. if (this->blockList)
  897. {
  898. this->blockList->prev = block;
  899. }
  900. block->next = this->blockList;
  901. this->blockList = block;
  902. return block;
  903. }
  904. BasicBlock *
  905. FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
  906. {
  907. BasicBlock * blockTarget = nullptr;
  908. blockTarget = labelInstr->GetBasicBlock();
  909. if (blockTarget == nullptr)
  910. {
  911. blockTarget = BasicBlock::New(this);
  912. labelInstr->SetBasicBlock(blockTarget);
  913. }
  914. if (labelInstr->m_isLoopTop)
  915. {
  916. blockTarget->isLoopHeader = true;
  917. this->hasLoop = true;
  918. }
  919. return blockTarget;
  920. }
  921. ///----------------------------------------------------------------------------
  922. ///
  923. /// FlowGraph::AddEdge
  924. ///
  925. /// Add an edge connecting the two given blocks.
  926. ///
  927. ///----------------------------------------------------------------------------
  928. FlowEdge *
  929. FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
  930. {
  931. FlowEdge * edge = FlowEdge::New(this);
  932. edge->SetPred(blockPred);
  933. edge->SetSucc(blockSucc);
  934. blockPred->AddSucc(edge, this);
  935. blockSucc->AddPred(edge, this);
  936. return edge;
  937. }
  938. ///----------------------------------------------------------------------------
  939. ///
  940. /// FlowGraph::Destroy
  941. ///
  942. /// Remove all references to FG structures from the IR in preparation for freeing
  943. /// the FG.
  944. ///
  945. ///----------------------------------------------------------------------------
  946. void
  947. FlowGraph::Destroy(void)
  948. {
  949. BOOL fHasTry = this->func->HasTry();
  950. Region ** blockToRegion = nullptr;
  951. if (fHasTry)
  952. {
  953. blockToRegion = JitAnewArrayZ(this->alloc, Region*, this->blockCount);
  954. // Do unreachable code removal up front to avoid problems
  955. // with unreachable back edges, etc.
  956. this->RemoveUnreachableBlocks();
  957. }
  958. FOREACH_BLOCK_ALL(block, this)
  959. {
  960. IR::Instr * firstInstr = block->GetFirstInstr();
  961. if (block->isDeleted && !block->isDead)
  962. {
  963. if (firstInstr->IsLabelInstr())
  964. {
  965. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  966. labelInstr->UnlinkBasicBlock();
  967. // Removing the label for non try blocks as we have a deleted block which has the label instruction
  968. // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
  969. // i.e. helper introduced by polymorphic inlining bailout.
  970. // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
  971. if (!fHasTry)
  972. {
  973. if (this->func->GetJITFunctionBody()->IsCoroutine())
  974. {
  975. // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
  976. this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
  977. {
  978. if (labelInstr == yorl.Second())
  979. {
  980. labelInstr->m_hasNonBranchRef = false;
  981. this->func->RemoveYieldOffsetResumeLabel(yorl);
  982. return true;
  983. }
  984. return false;
  985. });
  986. }
  987. Assert(labelInstr->IsUnreferenced());
  988. labelInstr->Remove();
  989. }
  990. }
  991. continue;
  992. }
  993. if (block->isLoopHeader && !block->isDead)
  994. {
  995. // Mark the tail block of this loop (the last back-edge). The register allocator
  996. // uses this to lexically find loops.
  997. BasicBlock *loopTail = nullptr;
  998. AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
  999. "Label not marked as loop top...");
  1000. FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
  1001. {
  1002. FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
  1003. {
  1004. if (succ == block)
  1005. {
  1006. loopTail = loopBlock;
  1007. break;
  1008. }
  1009. } NEXT_SUCCESSOR_BLOCK;
  1010. } NEXT_BLOCK_IN_LOOP;
  1011. if (loopTail)
  1012. {
  1013. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  1014. block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
  1015. }
  1016. else
  1017. {
  1018. // This loop doesn't have a back-edge: that is, it is not a loop
  1019. // anymore...
  1020. firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
  1021. }
  1022. }
  1023. if (fHasTry)
  1024. {
  1025. this->UpdateRegionForBlock(block, blockToRegion);
  1026. }
  1027. if (firstInstr->IsLabelInstr())
  1028. {
  1029. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1030. labelInstr->UnlinkBasicBlock();
  1031. if (labelInstr->IsUnreferenced() && !fHasTry)
  1032. {
  1033. // This is an unreferenced label, probably added by FG building.
  1034. // Delete it now to make extended basic blocks visible.
  1035. if (firstInstr == block->GetLastInstr())
  1036. {
  1037. labelInstr->Remove();
  1038. continue;
  1039. }
  1040. else
  1041. {
  1042. labelInstr->Remove();
  1043. }
  1044. }
  1045. }
  1046. // We don't run the globopt with try/catch, don't need to remove branch to next for fall through blocks
  1047. IR::Instr * lastInstr = block->GetLastInstr();
  1048. if (!fHasTry && lastInstr->IsBranchInstr())
  1049. {
  1050. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1051. if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
  1052. {
  1053. // Remove branch to next
  1054. branchInstr->Remove();
  1055. }
  1056. }
  1057. }
  1058. NEXT_BLOCK;
  1059. #if DBG
  1060. if (fHasTry)
  1061. {
  1062. // Now that all blocks have regions, we should see consistently propagated regions at all
  1063. // block boundaries.
  1064. FOREACH_BLOCK(block, this)
  1065. {
  1066. Region * region = blockToRegion[block->GetBlockNum()];
  1067. Region * predRegion = nullptr;
  1068. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1069. {
  1070. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1071. if (predBlock->GetLastInstr() == nullptr)
  1072. {
  1073. AssertMsg(region == predRegion, "Bad region propagation through empty block");
  1074. }
  1075. else
  1076. {
  1077. switch (predBlock->GetLastInstr()->m_opcode)
  1078. {
  1079. case Js::OpCode::TryCatch:
  1080. case Js::OpCode::TryFinally:
  1081. AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
  1082. if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
  1083. {
  1084. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
  1085. {
  1086. AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
  1087. }
  1088. else
  1089. {
  1090. AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
  1091. }
  1092. }
  1093. else
  1094. {
  1095. AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
  1096. }
  1097. break;
  1098. case Js::OpCode::Leave:
  1099. case Js::OpCode::LeaveNull:
  1100. AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1101. break;
  1102. // If the try region has a branch out of the loop,
  1103. // - the branch is moved out of the loop as part of break block removal, and
  1104. // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
  1105. // Otherwise,
  1106. // - FullJit: BrOnException is removed in the forward pass.
  1107. case Js::OpCode::BrOnException:
  1108. Assert(!this->func->DoGlobOpt());
  1109. case Js::OpCode::BrOnNoException:
  1110. Assert(this->func->HasTry() &&
  1111. ((!this->func->HasFinally() && !this->func->IsLoopBody() && !PHASE_OFF(Js::OptimizeTryCatchPhase, this->func)) ||
  1112. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody()))); // should be relaxed as more bailouts are added in Simple Jit
  1113. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch);
  1114. if (region->GetType() == RegionTypeCatch)
  1115. {
  1116. Assert((predRegion->GetType() == RegionTypeTry) || (predRegion->GetType() == RegionTypeCatch));
  1117. }
  1118. else if (region->GetType() == RegionTypeTry)
  1119. {
  1120. Assert(region == predRegion);
  1121. }
  1122. break;
  1123. case Js::OpCode::Br:
  1124. if (region->GetType() == RegionTypeCatch && region != predRegion)
  1125. {
  1126. AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1127. }
  1128. else
  1129. {
  1130. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1131. }
  1132. break;
  1133. default:
  1134. AssertMsg(region == predRegion, "Bad region propagation through interior block");
  1135. break;
  1136. }
  1137. }
  1138. }
  1139. NEXT_PREDECESSOR_BLOCK;
  1140. switch (region->GetType())
  1141. {
  1142. case RegionTypeRoot:
  1143. Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion());
  1144. break;
  1145. case RegionTypeTry:
  1146. Assert(!(region->GetMatchingCatchRegion() && region->GetMatchingFinallyRegion()));
  1147. break;
  1148. case RegionTypeCatch:
  1149. case RegionTypeFinally:
  1150. Assert(region->GetMatchingTryRegion());
  1151. break;
  1152. }
  1153. }
  1154. NEXT_BLOCK;
  1155. FOREACH_BLOCK_DEAD_OR_ALIVE(block, this)
  1156. {
  1157. if (block->GetFirstInstr()->IsLabelInstr())
  1158. {
  1159. IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
  1160. if (labelInstr->IsUnreferenced())
  1161. {
  1162. // This is an unreferenced label, probably added by FG building.
  1163. // Delete it now to make extended basic blocks visible.
  1164. labelInstr->Remove();
  1165. }
  1166. }
  1167. } NEXT_BLOCK_DEAD_OR_ALIVE;
  1168. }
  1169. #endif
  1170. this->func->isFlowGraphValid = false;
  1171. }
  1172. // Propagate the region forward from the block's predecessor(s), tracking the effect
  1173. // of the flow transition. Record the region in the block-to-region map provided
  1174. // and on the label at the entry to the block (if any).
  1175. void
  1176. FlowGraph::UpdateRegionForBlock(BasicBlock * block, Region ** blockToRegion)
  1177. {
  1178. Region *region;
  1179. Region * predRegion = nullptr;
  1180. IR::Instr * tryInstr = nullptr;
  1181. IR::Instr * firstInstr = block->GetFirstInstr();
  1182. if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1183. {
  1184. Assert(this->func->HasTry() && (this->func->DoOptimizeTryCatch() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1185. blockToRegion[block->GetBlockNum()] = firstInstr->AsLabelInstr()->GetRegion();
  1186. return;
  1187. }
  1188. if (block == this->blockList)
  1189. {
  1190. // Head of the graph: create the root region.
  1191. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1192. }
  1193. else
  1194. {
  1195. // Propagate the region forward by finding a predecessor we've already processed.
  1196. // We require that there be one, since we've already removed unreachable blocks.
  1197. region = nullptr;
  1198. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1199. {
  1200. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1201. predRegion = blockToRegion[predBlock->GetBlockNum()];
  1202. if (predRegion != nullptr)
  1203. {
  1204. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1205. break;
  1206. }
  1207. }
  1208. NEXT_PREDECESSOR_BLOCK;
  1209. }
  1210. AnalysisAssertMsg(region != nullptr, "Failed to find region for block");
  1211. if (!region->ehBailoutData)
  1212. {
  1213. region->AllocateEHBailoutData(this->func, tryInstr);
  1214. }
  1215. // Record the region in the block-to-region map.
  1216. blockToRegion[block->GetBlockNum()] = region;
  1217. if (firstInstr->IsLabelInstr())
  1218. {
  1219. // Record the region on the label and make sure it stays around as a region
  1220. // marker if we're entering a region at this point.
  1221. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1222. labelInstr->SetRegion(region);
  1223. if (region != predRegion)
  1224. {
  1225. labelInstr->m_hasNonBranchRef = true;
  1226. }
  1227. }
  1228. }
  1229. Region *
  1230. FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
  1231. {
  1232. // Propagate predRegion to region, looking at the flow transition for an opcode
  1233. // that affects the region.
  1234. Region * region = nullptr;
  1235. IR::Instr * predLastInstr = predBlock->GetLastInstr();
  1236. IR::Instr * firstInstr = block->GetFirstInstr();
  1237. if (predLastInstr == nullptr)
  1238. {
  1239. // Empty block: trivially propagate the region.
  1240. region = predRegion;
  1241. }
  1242. else
  1243. {
  1244. Region * tryRegion = nullptr;
  1245. IR::LabelInstr * tryInstrNext = nullptr;
  1246. switch (predLastInstr->m_opcode)
  1247. {
  1248. case Js::OpCode::TryCatch:
  1249. // Entry to a try-catch. See whether we're entering the try or the catch
  1250. // by looking for the handler label.
  1251. Assert(predLastInstr->m_next->IsLabelInstr());
  1252. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1253. tryRegion = tryInstrNext->GetRegion();
  1254. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1255. {
  1256. region = Region::New(RegionTypeCatch, predRegion, this->func);
  1257. Assert(tryRegion);
  1258. region->SetMatchingTryRegion(tryRegion);
  1259. tryRegion->SetMatchingCatchRegion(region);
  1260. }
  1261. else
  1262. {
  1263. region = Region::New(RegionTypeTry, predRegion, this->func);
  1264. tryInstr = predLastInstr;
  1265. }
  1266. break;
  1267. case Js::OpCode::TryFinally:
  1268. // Entry to a try-finally. See whether we're entering the try or the finally
  1269. // by looking for the handler label.
  1270. Assert(predLastInstr->m_next->IsLabelInstr());
  1271. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1272. tryRegion = tryInstrNext->GetRegion();
  1273. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1274. {
  1275. region = Region::New(RegionTypeFinally, predRegion, this->func);
  1276. Assert(tryRegion);
  1277. region->SetMatchingTryRegion(tryRegion);
  1278. tryRegion->SetMatchingFinallyRegion(region);
  1279. }
  1280. else
  1281. {
  1282. region = Region::New(RegionTypeTry, predRegion, this->func);
  1283. tryInstr = predLastInstr;
  1284. }
  1285. break;
  1286. case Js::OpCode::Leave:
  1287. case Js::OpCode::LeaveNull:
  1288. // Exiting a try or handler. Retrieve the current region's parent.
  1289. region = predRegion->GetParent();
  1290. if (region == nullptr)
  1291. {
  1292. // We found a Leave in the root region- this can only happen when a jitted loop body
  1293. // in a try block has a return statement.
  1294. Assert(this->func->IsLoopBodyInTry());
  1295. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1296. region = predRegion;
  1297. }
  1298. break;
  1299. default:
  1300. // Normal (non-EH) transition: just propagate the region.
  1301. region = predRegion;
  1302. break;
  1303. }
  1304. }
  1305. return region;
  1306. }
  1307. void
  1308. FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
  1309. {
  1310. if (loop)
  1311. {
  1312. bool found = false;
  1313. FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
  1314. {
  1315. if (loopBlock == targetBlock)
  1316. {
  1317. found = true;
  1318. break;
  1319. }
  1320. } NEXT_BLOCK_IN_LOOP_EDITING;
  1321. if (found)
  1322. {
  1323. if (postTarget)
  1324. {
  1325. iter.Next();
  1326. }
  1327. iter.InsertBefore(compBlock);
  1328. }
  1329. InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
  1330. }
  1331. }
  1332. // Insert a block on the given edge
  1333. BasicBlock *
  1334. FlowGraph::InsertAirlockBlock(FlowEdge * edge)
  1335. {
  1336. BasicBlock * airlockBlock = BasicBlock::New(this);
  1337. BasicBlock * sourceBlock = edge->GetPred();
  1338. BasicBlock * sinkBlock = edge->GetSucc();
  1339. BasicBlock * sinkPrevBlock = sinkBlock->prev;
  1340. IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
  1341. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1342. airlockBlock->loop = sinkBlock->loop;
  1343. airlockBlock->SetBlockNum(this->blockCount++);
  1344. #ifdef DBG
  1345. airlockBlock->isAirLockBlock = true;
  1346. #endif
  1347. //
  1348. // Fixup block linkage
  1349. //
  1350. // airlock block is inserted right before sourceBlock
  1351. airlockBlock->prev = sinkBlock->prev;
  1352. sinkBlock->prev = airlockBlock;
  1353. airlockBlock->next = sinkBlock;
  1354. airlockBlock->prev->next = airlockBlock;
  1355. //
  1356. // Fixup flow edges
  1357. //
  1358. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1359. // Add sourceBlock -> airlockBlock
  1360. this->AddEdge(sourceBlock, airlockBlock);
  1361. // Add airlockBlock -> sinkBlock
  1362. edge->SetPred(airlockBlock);
  1363. airlockBlock->AddSucc(edge, this);
  1364. // Fixup data use count
  1365. airlockBlock->SetDataUseCount(1);
  1366. sourceBlock->DecrementDataUseCount();
  1367. //
  1368. // Fixup IR
  1369. //
  1370. // Maintain the instruction region for inlining
  1371. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1372. Func * sinkLabelFunc = sinkLabel->m_func;
  1373. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1374. sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
  1375. airlockBlock->SetFirstInstr(airlockLabel);
  1376. airlockLabel->SetBasicBlock(airlockBlock);
  1377. // Add br to sinkBlock from airlock block
  1378. IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1379. airlockBr->SetByteCodeOffset(sinkLabel);
  1380. airlockLabel->InsertAfter(airlockBr);
  1381. airlockBlock->SetLastInstr(airlockBr);
  1382. airlockLabel->SetByteCodeOffset(sinkLabel);
  1383. // Fixup flow out of sourceBlock
  1384. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1385. if (sourceBr->IsMultiBranch())
  1386. {
  1387. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
  1388. Assert(replaced);
  1389. }
  1390. else if (sourceBr->GetTarget() == sinkLabel)
  1391. {
  1392. sourceBr->SetTarget(airlockLabel);
  1393. }
  1394. if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
  1395. {
  1396. if (!sinkPrevBlock->isDeleted)
  1397. {
  1398. FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
  1399. if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
  1400. {
  1401. BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
  1402. compensationBlock->IncrementDataUseCount();
  1403. // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
  1404. compensationBlock->isAirLockCompensationBlock = true;
  1405. }
  1406. }
  1407. }
  1408. #if DBG_DUMP
  1409. this->Dump(true, _u("\n After insertion of airlock block \n"));
  1410. #endif
  1411. return airlockBlock;
  1412. }
  1413. // Insert a block on the given edge
  1414. BasicBlock *
  1415. FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
  1416. {
  1417. BasicBlock * compBlock = BasicBlock::New(this);
  1418. BasicBlock * sourceBlock = edge->GetPred();
  1419. BasicBlock * sinkBlock = edge->GetSucc();
  1420. BasicBlock * fallthroughBlock = sourceBlock->next;
  1421. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1422. compBlock->SetBlockNum(this->blockCount++);
  1423. if (insertToLoopList)
  1424. {
  1425. // For flow graph edits in
  1426. if (sinkBlockLoop)
  1427. {
  1428. if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
  1429. {
  1430. // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
  1431. // Insert it to all the parent loop lists.
  1432. compBlock->loop = sinkBlock->loop->parent;
  1433. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
  1434. }
  1435. else
  1436. {
  1437. compBlock->loop = sinkBlock->loop;
  1438. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
  1439. }
  1440. #ifdef DBG
  1441. compBlock->isBreakCompensationBlockAtSink = true;
  1442. #endif
  1443. }
  1444. else
  1445. {
  1446. compBlock->loop = sourceBlock->loop;
  1447. InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
  1448. #ifdef DBG
  1449. compBlock->isBreakCompensationBlockAtSource = true;
  1450. #endif
  1451. }
  1452. }
  1453. //
  1454. // Fixup block linkage
  1455. //
  1456. // compensation block is inserted right after sourceBlock
  1457. compBlock->next = fallthroughBlock;
  1458. fallthroughBlock->prev = compBlock;
  1459. compBlock->prev = sourceBlock;
  1460. sourceBlock->next = compBlock;
  1461. //
  1462. // Fixup flow edges
  1463. //
  1464. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1465. // Add sourceBlock -> airlockBlock
  1466. this->AddEdge(sourceBlock, compBlock);
  1467. // Add airlockBlock -> sinkBlock
  1468. edge->SetPred(compBlock);
  1469. compBlock->AddSucc(edge, this);
  1470. //
  1471. // Fixup IR
  1472. //
  1473. // Maintain the instruction region for inlining
  1474. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1475. Func * sinkLabelFunc = sinkLabel->m_func;
  1476. IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1477. sourceLastInstr->InsertAfter(compLabel);
  1478. compBlock->SetFirstInstr(compLabel);
  1479. compLabel->SetBasicBlock(compBlock);
  1480. // Add br to sinkBlock from compensation block
  1481. IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  1482. compBr->SetByteCodeOffset(sinkLabel);
  1483. compLabel->InsertAfter(compBr);
  1484. compBlock->SetLastInstr(compBr);
  1485. compLabel->SetByteCodeOffset(sinkLabel);
  1486. // Fixup flow out of sourceBlock
  1487. if (sourceLastInstr->IsBranchInstr())
  1488. {
  1489. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  1490. Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
  1491. if (sourceBr->IsMultiBranch())
  1492. {
  1493. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
  1494. Assert(replaced);
  1495. }
  1496. }
  1497. return compBlock;
  1498. }
  1499. void
  1500. FlowGraph::RemoveUnreachableBlocks()
  1501. {
  1502. AnalysisAssert(this->blockList);
  1503. FOREACH_BLOCK(block, this)
  1504. {
  1505. block->isVisited = false;
  1506. }
  1507. NEXT_BLOCK;
  1508. this->blockList->isVisited = true;
  1509. FOREACH_BLOCK_EDITING(block, this)
  1510. {
  1511. if (block->isVisited)
  1512. {
  1513. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1514. {
  1515. succ->isVisited = true;
  1516. } NEXT_SUCCESSOR_BLOCK;
  1517. }
  1518. else
  1519. {
  1520. this->RemoveBlock(block);
  1521. }
  1522. }
  1523. NEXT_BLOCK_EDITING;
  1524. }
  1525. // If block has no predecessor, remove it.
  1526. bool
  1527. FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
  1528. {
  1529. bool isDead = false;
  1530. if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
  1531. {
  1532. isDead = true;
  1533. }
  1534. else if (block->isLoopHeader)
  1535. {
  1536. // A dead loop still has back-edges pointing to it...
  1537. isDead = true;
  1538. FOREACH_PREDECESSOR_BLOCK(pred, block)
  1539. {
  1540. if (!block->loop->IsDescendentOrSelf(pred->loop))
  1541. {
  1542. isDead = false;
  1543. }
  1544. } NEXT_PREDECESSOR_BLOCK;
  1545. }
  1546. if (isDead)
  1547. {
  1548. this->RemoveBlock(block, globOpt);
  1549. return true;
  1550. }
  1551. return false;
  1552. }
  1553. IR::Instr *
  1554. FlowGraph::PeepTypedCm(IR::Instr *instr)
  1555. {
  1556. // Basic pattern, peep:
  1557. // t1 = CmEq a, b
  1558. // BrTrue_I4 $L1, t1
  1559. // Into:
  1560. // t1 = 1
  1561. // BrEq $L1, a, b
  1562. // t1 = 0
  1563. IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
  1564. // find intermediate Lds
  1565. IR::Instr * instrLd = nullptr;
  1566. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1567. {
  1568. instrLd = instrNext;
  1569. instrNext = instrNext->GetNextRealInstrOrLabel();
  1570. }
  1571. IR::Instr * instrLd2 = nullptr;
  1572. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  1573. {
  1574. instrLd2 = instrNext;
  1575. instrNext = instrNext->GetNextRealInstrOrLabel();
  1576. }
  1577. // Find BrTrue/BrFalse
  1578. IR::Instr *instrBr;
  1579. bool brIsTrue;
  1580. if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
  1581. {
  1582. instrBr = instrNext;
  1583. brIsTrue = true;
  1584. }
  1585. else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
  1586. {
  1587. instrBr = instrNext;
  1588. brIsTrue = false;
  1589. }
  1590. else
  1591. {
  1592. return nullptr;
  1593. }
  1594. AssertMsg(instrLd || (!instrLd && !instrLd2), "Either instrLd is non-null or both null");
  1595. // if we have intermediate Lds, then make sure pattern is:
  1596. // t1 = CmEq a, b
  1597. // t2 = Ld_A t1
  1598. // BrTrue $L1, t2
  1599. if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
  1600. {
  1601. return nullptr;
  1602. }
  1603. if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
  1604. {
  1605. return nullptr;
  1606. }
  1607. // Make sure we have:
  1608. // t1 = CmEq a, b
  1609. // BrTrue/BrFalse t1
  1610. if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
  1611. {
  1612. return nullptr;
  1613. }
  1614. IR::Opnd * src1 = instr->UnlinkSrc1();
  1615. IR::Opnd * src2 = instr->UnlinkSrc2();
  1616. IR::Instr * instrNew;
  1617. IR::Opnd * tmpOpnd;
  1618. if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
  1619. {
  1620. Assert(src1->IsInt32());
  1621. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1622. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
  1623. instrNew->SetByteCodeOffset(instr);
  1624. instr->InsertBefore(instrNew);
  1625. src1 = tmpOpnd;
  1626. }
  1627. if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
  1628. {
  1629. Assert(src2->IsInt32());
  1630. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  1631. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
  1632. instrNew->SetByteCodeOffset(instr);
  1633. instr->InsertBefore(instrNew);
  1634. src2 = tmpOpnd;
  1635. }
  1636. instrBr->ReplaceSrc1(src1);
  1637. instrBr->SetSrc2(src2);
  1638. Js::OpCode newOpcode;
  1639. switch (instr->m_opcode)
  1640. {
  1641. case Js::OpCode::CmEq_I4:
  1642. newOpcode = Js::OpCode::BrEq_I4;
  1643. break;
  1644. case Js::OpCode::CmGe_I4:
  1645. newOpcode = Js::OpCode::BrGe_I4;
  1646. break;
  1647. case Js::OpCode::CmGt_I4:
  1648. newOpcode = Js::OpCode::BrGt_I4;
  1649. break;
  1650. case Js::OpCode::CmLt_I4:
  1651. newOpcode = Js::OpCode::BrLt_I4;
  1652. break;
  1653. case Js::OpCode::CmLe_I4:
  1654. newOpcode = Js::OpCode::BrLe_I4;
  1655. break;
  1656. case Js::OpCode::CmUnGe_I4:
  1657. newOpcode = Js::OpCode::BrUnGe_I4;
  1658. break;
  1659. case Js::OpCode::CmUnGt_I4:
  1660. newOpcode = Js::OpCode::BrUnGt_I4;
  1661. break;
  1662. case Js::OpCode::CmUnLt_I4:
  1663. newOpcode = Js::OpCode::BrUnLt_I4;
  1664. break;
  1665. case Js::OpCode::CmUnLe_I4:
  1666. newOpcode = Js::OpCode::BrUnLe_I4;
  1667. break;
  1668. case Js::OpCode::CmNeq_I4:
  1669. newOpcode = Js::OpCode::BrNeq_I4;
  1670. break;
  1671. case Js::OpCode::CmEq_A:
  1672. newOpcode = Js::OpCode::BrEq_A;
  1673. break;
  1674. case Js::OpCode::CmGe_A:
  1675. newOpcode = Js::OpCode::BrGe_A;
  1676. break;
  1677. case Js::OpCode::CmGt_A:
  1678. newOpcode = Js::OpCode::BrGt_A;
  1679. break;
  1680. case Js::OpCode::CmLt_A:
  1681. newOpcode = Js::OpCode::BrLt_A;
  1682. break;
  1683. case Js::OpCode::CmLe_A:
  1684. newOpcode = Js::OpCode::BrLe_A;
  1685. break;
  1686. case Js::OpCode::CmUnGe_A:
  1687. newOpcode = Js::OpCode::BrUnGe_A;
  1688. break;
  1689. case Js::OpCode::CmUnGt_A:
  1690. newOpcode = Js::OpCode::BrUnGt_A;
  1691. break;
  1692. case Js::OpCode::CmUnLt_A:
  1693. newOpcode = Js::OpCode::BrUnLt_A;
  1694. break;
  1695. case Js::OpCode::CmUnLe_A:
  1696. newOpcode = Js::OpCode::BrUnLe_A;
  1697. break;
  1698. case Js::OpCode::CmNeq_A:
  1699. newOpcode = Js::OpCode::BrNeq_A;
  1700. break;
  1701. case Js::OpCode::CmSrEq_A:
  1702. newOpcode = Js::OpCode::BrSrEq_A;
  1703. break;
  1704. case Js::OpCode::CmSrNeq_A:
  1705. newOpcode = Js::OpCode::BrSrNeq_A;
  1706. break;
  1707. default:
  1708. newOpcode = Js::OpCode::InvalidOpCode;
  1709. Assume(UNREACHED);
  1710. }
  1711. instrBr->m_opcode = newOpcode;
  1712. if (brIsTrue)
  1713. {
  1714. instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1715. instr->m_opcode = Js::OpCode::Ld_I4;
  1716. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1717. instrNew->SetByteCodeOffset(instrBr);
  1718. instrBr->InsertAfter(instrNew);
  1719. if (instrLd)
  1720. {
  1721. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1722. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1723. instrNew->SetByteCodeOffset(instrBr);
  1724. instrBr->InsertAfter(instrNew);
  1725. if (instrLd2)
  1726. {
  1727. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  1728. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  1729. instrNew->SetByteCodeOffset(instrBr);
  1730. instrBr->InsertAfter(instrNew);
  1731. }
  1732. }
  1733. }
  1734. else
  1735. {
  1736. instrBr->AsBranchInstr()->Invert();
  1737. instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1738. instr->m_opcode = Js::OpCode::Ld_I4;
  1739. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1740. instrNew->SetByteCodeOffset(instrBr);
  1741. instrBr->InsertAfter(instrNew);
  1742. if (instrLd)
  1743. {
  1744. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1745. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1746. instrNew->SetByteCodeOffset(instrBr);
  1747. instrBr->InsertAfter(instrNew);
  1748. if (instrLd2)
  1749. {
  1750. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  1751. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  1752. instrNew->SetByteCodeOffset(instrBr);
  1753. instrBr->InsertAfter(instrNew);
  1754. }
  1755. }
  1756. }
  1757. return instrBr;
  1758. }
  1759. IR::Instr *
  1760. FlowGraph::PeepCm(IR::Instr *instr)
  1761. {
  1762. // Basic pattern, peep:
  1763. // t1 = CmEq a, b
  1764. // t2 = Ld_A t1
  1765. // BrTrue $L1, t2
  1766. // Into:
  1767. // t1 = True
  1768. // t2 = True
  1769. // BrEq $L1, a, b
  1770. // t1 = False
  1771. // t2 = False
  1772. //
  1773. // The true/false Ld_A's will most likely end up being dead-stores...
  1774. // Alternate Pattern
  1775. // t1= CmEq a, b
  1776. // BrTrue $L1, t1
  1777. // Into:
  1778. // BrEq $L1, a, b
  1779. Func *func = instr->m_func;
  1780. // Find Ld_A
  1781. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  1782. IR::Instr *inlineeEndInstr = nullptr;
  1783. IR::Instr *instrNew;
  1784. IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
  1785. IR::Instr *instrByteCode = instrNext;
  1786. bool ldFound = false;
  1787. IR::Opnd *brSrc = instr->GetDst();
  1788. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
  1789. {
  1790. ldFound = true;
  1791. instrLd = instrNext;
  1792. brSrc = instrNext->GetDst();
  1793. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1794. {
  1795. return nullptr;
  1796. }
  1797. instrNext = instrLd->GetNextRealInstrOrLabel();
  1798. // Is there a second Ld_A?
  1799. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
  1800. {
  1801. // We have:
  1802. // t1 = Cm
  1803. // t2 = t1 // ldSrc = t1
  1804. // t3 = t2 // ldDst = t3
  1805. // BrTrue/BrFalse t3
  1806. instrLd2 = instrNext;
  1807. brSrc = instrLd2->GetDst();
  1808. instrNext = instrLd2->GetNextRealInstrOrLabel();
  1809. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  1810. {
  1811. return nullptr;
  1812. }
  1813. }
  1814. }
  1815. // Skip over InlineeEnd
  1816. if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
  1817. {
  1818. inlineeEndInstr = instrNext;
  1819. instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
  1820. }
  1821. // Find BrTrue/BrFalse
  1822. bool brIsTrue;
  1823. if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
  1824. {
  1825. brIsTrue = true;
  1826. }
  1827. else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
  1828. {
  1829. brIsTrue = false;
  1830. }
  1831. else
  1832. {
  1833. return nullptr;
  1834. }
  1835. IR::Instr *instrBr = instrNext;
  1836. // Make sure we have:
  1837. // t1 = Ld_A
  1838. // BrTrue/BrFalse t1
  1839. if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
  1840. {
  1841. return nullptr;
  1842. }
  1843. //
  1844. // We have a match. Generate the new branch
  1845. //
  1846. // BrTrue/BrFalse t1
  1847. // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
  1848. // and sym id of t1 for proper restoration on a bailout before the branch.
  1849. Func* origBrFunc = instrBr->m_func;
  1850. uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
  1851. uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
  1852. bool origBranchSrcOpndIsJITOpt = instrBr->GetSrc1()->GetIsJITOptimizedReg();
  1853. instrBr->Unlink();
  1854. instr->InsertBefore(instrBr);
  1855. instrBr->ClearByteCodeOffset();
  1856. instrBr->SetByteCodeOffset(instr);
  1857. instrBr->FreeSrc1();
  1858. instrBr->SetSrc1(instr->UnlinkSrc1());
  1859. instrBr->SetSrc2(instr->UnlinkSrc2());
  1860. instrBr->m_func = instr->m_func;
  1861. Js::OpCode newOpcode;
  1862. switch(instr->m_opcode)
  1863. {
  1864. case Js::OpCode::CmEq_A:
  1865. newOpcode = Js::OpCode::BrEq_A;
  1866. break;
  1867. case Js::OpCode::CmGe_A:
  1868. newOpcode = Js::OpCode::BrGe_A;
  1869. break;
  1870. case Js::OpCode::CmGt_A:
  1871. newOpcode = Js::OpCode::BrGt_A;
  1872. break;
  1873. case Js::OpCode::CmLt_A:
  1874. newOpcode = Js::OpCode::BrLt_A;
  1875. break;
  1876. case Js::OpCode::CmLe_A:
  1877. newOpcode = Js::OpCode::BrLe_A;
  1878. break;
  1879. case Js::OpCode::CmUnGe_A:
  1880. newOpcode = Js::OpCode::BrUnGe_A;
  1881. break;
  1882. case Js::OpCode::CmUnGt_A:
  1883. newOpcode = Js::OpCode::BrUnGt_A;
  1884. break;
  1885. case Js::OpCode::CmUnLt_A:
  1886. newOpcode = Js::OpCode::BrUnLt_A;
  1887. break;
  1888. case Js::OpCode::CmUnLe_A:
  1889. newOpcode = Js::OpCode::BrUnLe_A;
  1890. break;
  1891. case Js::OpCode::CmNeq_A:
  1892. newOpcode = Js::OpCode::BrNeq_A;
  1893. break;
  1894. case Js::OpCode::CmSrEq_A:
  1895. newOpcode = Js::OpCode::BrSrEq_A;
  1896. break;
  1897. case Js::OpCode::CmSrNeq_A:
  1898. newOpcode = Js::OpCode::BrSrNeq_A;
  1899. break;
  1900. default:
  1901. Assert(UNREACHED);
  1902. __assume(UNREACHED);
  1903. }
  1904. instrBr->m_opcode = newOpcode;
  1905. IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
  1906. IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetFalseAddr(), IR::AddrOpndKindDynamicVar, func, true);
  1907. trueOpnd->SetValueType(ValueType::Boolean);
  1908. falseOpnd->SetValueType(ValueType::Boolean);
  1909. if (ldFound)
  1910. {
  1911. // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
  1912. if (brIsTrue)
  1913. {
  1914. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1915. instrNew->SetByteCodeOffset(instrBr);
  1916. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1917. instrBr->InsertBefore(instrNew);
  1918. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
  1919. instrNew->SetByteCodeOffset(instrBr);
  1920. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1921. instrBr->InsertBefore(instrNew);
  1922. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
  1923. instrLd->InsertBefore(instrNew);
  1924. instrNew->SetByteCodeOffset(instrLd);
  1925. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1926. instrLd->ReplaceSrc1(falseOpnd);
  1927. if (instrLd2)
  1928. {
  1929. instrLd2->ReplaceSrc1(falseOpnd);
  1930. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
  1931. instrBr->InsertBefore(instrNew);
  1932. instrNew->SetByteCodeOffset(instrBr);
  1933. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1934. }
  1935. }
  1936. else
  1937. {
  1938. instrBr->AsBranchInstr()->Invert();
  1939. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
  1940. instrBr->InsertBefore(instrNew);
  1941. instrNew->SetByteCodeOffset(instrBr);
  1942. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1943. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
  1944. instrBr->InsertBefore(instrNew);
  1945. instrNew->SetByteCodeOffset(instrBr);
  1946. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1947. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
  1948. instrLd->InsertBefore(instrNew);
  1949. instrNew->SetByteCodeOffset(instrLd);
  1950. instrLd->ReplaceSrc1(trueOpnd);
  1951. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1952. if (instrLd2)
  1953. {
  1954. instrLd2->ReplaceSrc1(trueOpnd);
  1955. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  1956. instrBr->InsertBefore(instrNew);
  1957. instrNew->SetByteCodeOffset(instrBr);
  1958. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  1959. }
  1960. }
  1961. }
  1962. // Fix InlineeEnd
  1963. if (inlineeEndInstr)
  1964. {
  1965. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  1966. }
  1967. if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  1968. {
  1969. Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
  1970. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  1971. instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
  1972. }
  1973. instr->Remove();
  1974. //
  1975. // Try optimizing through a second branch.
  1976. // Peep:
  1977. //
  1978. // t2 = True;
  1979. // BrTrue $L1
  1980. // ...
  1981. // L1:
  1982. // t1 = Ld_A t2
  1983. // BrTrue $L2
  1984. //
  1985. // Into:
  1986. // t2 = True;
  1987. // t1 = True;
  1988. // BrTrue $L2 <---
  1989. // ...
  1990. // L1:
  1991. // t1 = Ld_A t2
  1992. // BrTrue $L2
  1993. //
  1994. // This cleanup helps expose second level Cm peeps.
  1995. IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  1996. // Skip over branch to branch
  1997. while (instrLd3->m_opcode == Js::OpCode::Br)
  1998. {
  1999. instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2000. }
  2001. // Find Ld_A
  2002. if (instrLd3->m_opcode != Js::OpCode::Ld_A)
  2003. {
  2004. return instrBr;
  2005. }
  2006. IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
  2007. IR::Instr *inlineeEndInstr2 = nullptr;
  2008. // InlineeEnd?
  2009. // REVIEW: Can we handle 2 inlineeEnds?
  2010. if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
  2011. {
  2012. inlineeEndInstr2 = instrBr2;
  2013. instrBr2 = instrBr2->GetNextRealInstrOrLabel();
  2014. }
  2015. // Find branch
  2016. bool brIsTrue2;
  2017. if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
  2018. {
  2019. brIsTrue2 = true;
  2020. }
  2021. else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
  2022. {
  2023. brIsTrue2 = false;
  2024. }
  2025. else
  2026. {
  2027. return nullptr;
  2028. }
  2029. // Make sure Ld_A operates on the right tmps.
  2030. if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
  2031. {
  2032. return nullptr;
  2033. }
  2034. if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
  2035. {
  2036. return nullptr;
  2037. }
  2038. // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
  2039. if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
  2040. {
  2041. StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
  2042. if (IR::Instr::FindRegUseInRange(symLd3, instrBr->m_next, instrLd3))
  2043. {
  2044. return nullptr;
  2045. }
  2046. }
  2047. //
  2048. // We have a match!
  2049. //
  2050. if(inlineeEndInstr2)
  2051. {
  2052. origBrFunc = instrBr2->m_func;
  2053. origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
  2054. origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
  2055. }
  2056. // Fix Ld_A
  2057. if (brIsTrue)
  2058. {
  2059. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
  2060. instrBr->InsertBefore(instrNew);
  2061. instrNew->SetByteCodeOffset(instrBr);
  2062. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2063. }
  2064. else
  2065. {
  2066. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
  2067. instrBr->InsertBefore(instrNew);
  2068. instrNew->SetByteCodeOffset(instrBr);
  2069. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2070. }
  2071. IR::LabelInstr *brTarget2;
  2072. // Retarget branch
  2073. if (brIsTrue2 == brIsTrue)
  2074. {
  2075. brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
  2076. }
  2077. else
  2078. {
  2079. brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
  2080. brTarget2->SetByteCodeOffset(instrBr2->m_next);
  2081. instrBr2->InsertAfter(brTarget2);
  2082. }
  2083. instrBr->AsBranchInstr()->SetTarget(brTarget2);
  2084. // InlineeEnd?
  2085. if (inlineeEndInstr2)
  2086. {
  2087. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2088. }
  2089. return instrBr;
  2090. }
  2091. void
  2092. FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId)
  2093. {
  2094. // Helper for PeepsCm code.
  2095. //
  2096. // We've skipped some InlineeEnd. Globopt expects to see these
  2097. // on all flow paths out of the inlinee. Insert an InlineeEnd
  2098. // on the new path:
  2099. // BrEq $L1, a, b
  2100. // Becomes:
  2101. // BrNeq $L2, a, b
  2102. // InlineeEnd
  2103. // Br $L1
  2104. // L2:
  2105. instrBr->AsBranchInstr()->Invert();
  2106. IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
  2107. newBr->SetByteCodeOffset(origByteCodeOffset);
  2108. instrBr->InsertAfter(newBr);
  2109. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
  2110. newLabel->SetByteCodeOffset(instrBytecode);
  2111. newBr->InsertAfter(newLabel);
  2112. instrBr->AsBranchInstr()->SetTarget(newLabel);
  2113. IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
  2114. newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
  2115. newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
  2116. newInlineeEnd->SetByteCodeOffset(instrBytecode);
  2117. newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
  2118. newBr->InsertBefore(newInlineeEnd);
  2119. IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc, origByteCodeOffset);
  2120. useOrigBranchSrcInstr->SetRemovedOpndSymbol(origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2121. newBr->InsertBefore(useOrigBranchSrcInstr);
  2122. uint newBrFnNumber = newBr->m_func->GetFunctionNumber();
  2123. Assert(newBrFnNumber == origBrFunc->GetFunctionNumber());
  2124. // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
  2125. // Only case when they can be same is recursive calls - inlinee and inliner are the same function
  2126. Assert(newBrFnNumber != inlineeEndInstr->m_func->GetFunctionNumber() ||
  2127. newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->GetFunctionNumber());
  2128. }
  2129. BasicBlock *
  2130. BasicBlock::New(FlowGraph * graph)
  2131. {
  2132. BasicBlock * block;
  2133. block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
  2134. return block;
  2135. }
  2136. void
  2137. BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
  2138. {
  2139. this->predList.Prepend(graph->alloc, edge);
  2140. }
  2141. void
  2142. BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
  2143. {
  2144. this->succList.Prepend(graph->alloc, edge);
  2145. }
  2146. void
  2147. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
  2148. {
  2149. this->RemovePred(block, graph, true, false);
  2150. }
  2151. void
  2152. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
  2153. {
  2154. this->RemoveSucc(block, graph, true, false);
  2155. }
  2156. void
  2157. BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
  2158. {
  2159. this->RemovePred(block, graph, true, true);
  2160. }
  2161. void
  2162. BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
  2163. {
  2164. this->RemoveSucc(block, graph, true, true);
  2165. }
  2166. void
  2167. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
  2168. {
  2169. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2170. {
  2171. if (edge->GetPred() == block)
  2172. {
  2173. BasicBlock *blockSucc = edge->GetSucc();
  2174. if (moveToDead)
  2175. {
  2176. iter.MoveCurrentTo(this->GetDeadPredList());
  2177. }
  2178. else
  2179. {
  2180. iter.RemoveCurrent(graph->alloc);
  2181. }
  2182. if (doCleanSucc)
  2183. {
  2184. block->RemoveSucc(this, graph, false, moveToDead);
  2185. }
  2186. if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
  2187. {
  2188. Loop *loop = blockSucc->loop;
  2189. loop->isDead = true;
  2190. }
  2191. return;
  2192. }
  2193. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2194. AssertMsg(UNREACHED, "Edge not found.");
  2195. }
  2196. void
  2197. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
  2198. {
  2199. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2200. {
  2201. if (edge->GetSucc() == block)
  2202. {
  2203. if (moveToDead)
  2204. {
  2205. iter.MoveCurrentTo(this->GetDeadSuccList());
  2206. }
  2207. else
  2208. {
  2209. iter.RemoveCurrent(graph->alloc);
  2210. }
  2211. if (doCleanPred)
  2212. {
  2213. block->RemovePred(this, graph, false, moveToDead);
  2214. }
  2215. if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
  2216. {
  2217. Loop *loop = block->loop;
  2218. loop->isDead = true;
  2219. }
  2220. return;
  2221. }
  2222. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2223. AssertMsg(UNREACHED, "Edge not found.");
  2224. }
  2225. void
  2226. BasicBlock::UnlinkPred(BasicBlock *block)
  2227. {
  2228. this->UnlinkPred(block, true);
  2229. }
  2230. void
  2231. BasicBlock::UnlinkSucc(BasicBlock *block)
  2232. {
  2233. this->UnlinkSucc(block, true);
  2234. }
  2235. void
  2236. BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
  2237. {
  2238. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2239. {
  2240. if (edge->GetPred() == block)
  2241. {
  2242. iter.UnlinkCurrent();
  2243. if (doCleanSucc)
  2244. {
  2245. block->UnlinkSucc(this, false);
  2246. }
  2247. return;
  2248. }
  2249. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2250. AssertMsg(UNREACHED, "Edge not found.");
  2251. }
  2252. void
  2253. BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
  2254. {
  2255. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2256. {
  2257. if (edge->GetSucc() == block)
  2258. {
  2259. iter.UnlinkCurrent();
  2260. if (doCleanPred)
  2261. {
  2262. block->UnlinkPred(this, false);
  2263. }
  2264. return;
  2265. }
  2266. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2267. AssertMsg(UNREACHED, "Edge not found.");
  2268. }
  2269. bool
  2270. BasicBlock::IsLandingPad()
  2271. {
  2272. BasicBlock * nextBlock = this->GetNext();
  2273. return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
  2274. }
  2275. IR::Instr *
  2276. FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
  2277. {
  2278. IR::Instr *instrPrev = instr->m_prev;
  2279. if (globOpt)
  2280. {
  2281. // Removing block during glob opt. Need to maintain the graph so that
  2282. // bailout will record the byte code use in case the dead code is exposed
  2283. // by dyno-pogo optimization (where bailout need the byte code uses from
  2284. // the dead blocks where it may not be dead after bailing out)
  2285. if (instr->IsLabelInstr())
  2286. {
  2287. instr->AsLabelInstr()->m_isLoopTop = false;
  2288. return instr;
  2289. }
  2290. else if (instr->IsByteCodeUsesInstr())
  2291. {
  2292. return instr;
  2293. }
  2294. /*
  2295. * Scope object has to be implicitly live whenever Heap Arguments object is live.
  2296. * - When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
  2297. */
  2298. Js::OpCode opcode = instr->m_opcode;
  2299. if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt(this->func) &&
  2300. globOpt->IsArgumentsOpnd(instr->GetSrc1()) && instr->m_func->GetScopeObjSym())
  2301. {
  2302. IR::ByteCodeUsesInstr * byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  2303. byteCodeUsesInstr->SetNonOpndSymbol(instr->m_func->GetScopeObjSym()->m_id);
  2304. instr->InsertAfter(byteCodeUsesInstr);
  2305. }
  2306. IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
  2307. if (newByteCodeUseInstr != nullptr)
  2308. {
  2309. // We don't care about property used in these instruction
  2310. // It is only necessary for field copy prop so that we will keep the implicit call
  2311. // up to the copy prop location.
  2312. newByteCodeUseInstr->propertySymUse = nullptr;
  2313. if (opcode == Js::OpCode::Yield)
  2314. {
  2315. IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
  2316. while (instrLabel->m_opcode != Js::OpCode::Label)
  2317. {
  2318. instrLabel = instrLabel->m_next;
  2319. }
  2320. func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
  2321. instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
  2322. }
  2323. // Save the last instruction to update the block with
  2324. return newByteCodeUseInstr;
  2325. }
  2326. else
  2327. {
  2328. return instrPrev;
  2329. }
  2330. }
  2331. else
  2332. {
  2333. instr->Remove();
  2334. return instrPrev;
  2335. }
  2336. }
  2337. void
  2338. FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
  2339. {
  2340. Assert(!block->isDead && !block->isDeleted);
  2341. IR::Instr * lastInstr = nullptr;
  2342. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  2343. {
  2344. if (instr->m_opcode == Js::OpCode::FunctionExit)
  2345. {
  2346. // Removing FunctionExit causes problems downstream...
  2347. // We could change the opcode, or have FunctionEpilog/FunctionExit to get
  2348. // rid of the epilog.
  2349. break;
  2350. }
  2351. if (instr == block->GetFirstInstr())
  2352. {
  2353. Assert(instr->IsLabelInstr());
  2354. instr->AsLabelInstr()->m_isLoopTop = false;
  2355. }
  2356. else
  2357. {
  2358. lastInstr = this->RemoveInstr(instr, globOpt);
  2359. }
  2360. } NEXT_INSTR_IN_BLOCK_EDITING;
  2361. if (lastInstr)
  2362. {
  2363. block->SetLastInstr(lastInstr);
  2364. }
  2365. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
  2366. {
  2367. edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
  2368. } NEXT_SLISTBASECOUNTED_ENTRY;
  2369. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
  2370. {
  2371. edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
  2372. } NEXT_SLISTBASECOUNTED_ENTRY;
  2373. if (block->isLoopHeader && this->loopList)
  2374. {
  2375. // If loop graph is built, remove loop from loopList
  2376. Loop **pPrevLoop = &this->loopList;
  2377. while (*pPrevLoop != block->loop)
  2378. {
  2379. pPrevLoop = &((*pPrevLoop)->next);
  2380. }
  2381. *pPrevLoop = (*pPrevLoop)->next;
  2382. this->hasLoop = (this->loopList != nullptr);
  2383. }
  2384. if (globOpt != nullptr)
  2385. {
  2386. block->isDead = true;
  2387. block->GetPredList()->MoveTo(block->GetDeadPredList());
  2388. block->GetSuccList()->MoveTo(block->GetDeadSuccList());
  2389. }
  2390. if (tailDuping)
  2391. {
  2392. block->isDead = true;
  2393. }
  2394. block->isDeleted = true;
  2395. block->SetDataUseCount(0);
  2396. }
  2397. void
  2398. BasicBlock::UnlinkInstr(IR::Instr * instr)
  2399. {
  2400. Assert(this->Contains(instr));
  2401. Assert(this->GetFirstInstr() != this->GetLastInstr());
  2402. if (instr == this->GetFirstInstr())
  2403. {
  2404. Assert(!this->GetFirstInstr()->IsLabelInstr());
  2405. this->SetFirstInstr(instr->m_next);
  2406. }
  2407. else if (instr == this->GetLastInstr())
  2408. {
  2409. this->SetLastInstr(instr->m_prev);
  2410. }
  2411. instr->Unlink();
  2412. }
  2413. void
  2414. BasicBlock::RemoveInstr(IR::Instr * instr)
  2415. {
  2416. Assert(this->Contains(instr));
  2417. if (instr == this->GetFirstInstr())
  2418. {
  2419. this->SetFirstInstr(instr->m_next);
  2420. }
  2421. else if (instr == this->GetLastInstr())
  2422. {
  2423. this->SetLastInstr(instr->m_prev);
  2424. }
  2425. instr->Remove();
  2426. }
  2427. void
  2428. BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
  2429. {
  2430. Assert(this->Contains(beforeThisInstr));
  2431. beforeThisInstr->InsertBefore(newInstr);
  2432. if(this->GetFirstInstr() == beforeThisInstr)
  2433. {
  2434. Assert(!beforeThisInstr->IsLabelInstr());
  2435. this->SetFirstInstr(newInstr);
  2436. }
  2437. }
  2438. void
  2439. BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
  2440. {
  2441. Assert(this->Contains(afterThisInstr));
  2442. afterThisInstr->InsertAfter(newInstr);
  2443. if (this->GetLastInstr() == afterThisInstr)
  2444. {
  2445. Assert(afterThisInstr->HasFallThrough());
  2446. this->SetLastInstr(newInstr);
  2447. }
  2448. }
  2449. void
  2450. BasicBlock::InsertAfter(IR::Instr *newInstr)
  2451. {
  2452. Assert(this->GetLastInstr()->HasFallThrough());
  2453. this->GetLastInstr()->InsertAfter(newInstr);
  2454. this->SetLastInstr(newInstr);
  2455. }
  2456. void
  2457. Loop::SetHasCall()
  2458. {
  2459. Loop * current = this;
  2460. do
  2461. {
  2462. if (current->hasCall)
  2463. {
  2464. #if DBG
  2465. current = current->parent;
  2466. while (current)
  2467. {
  2468. Assert(current->hasCall);
  2469. current = current->parent;
  2470. }
  2471. #endif
  2472. break;
  2473. }
  2474. current->hasCall = true;
  2475. current = current->parent;
  2476. }
  2477. while (current != nullptr);
  2478. }
  2479. void
  2480. Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
  2481. {
  2482. Loop * current = this;
  2483. do
  2484. {
  2485. if ((current->implicitCallFlags & newFlags) == newFlags)
  2486. {
  2487. #if DBG
  2488. current = current->parent;
  2489. while (current)
  2490. {
  2491. Assert((current->implicitCallFlags & newFlags) == newFlags);
  2492. current = current->parent;
  2493. }
  2494. #endif
  2495. break;
  2496. }
  2497. newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
  2498. current->implicitCallFlags = newFlags;
  2499. current = current->parent;
  2500. }
  2501. while (current != nullptr);
  2502. }
  2503. Js::ImplicitCallFlags
  2504. Loop::GetImplicitCallFlags()
  2505. {
  2506. if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
  2507. {
  2508. if (this->parent == nullptr)
  2509. {
  2510. // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
  2511. this->implicitCallFlags = Js::ImplicitCall_None;
  2512. }
  2513. else
  2514. {
  2515. // We don't have any information, get it from the parent and hope for the best
  2516. this->implicitCallFlags = this->parent->GetImplicitCallFlags();
  2517. }
  2518. }
  2519. return this->implicitCallFlags;
  2520. }
  2521. bool
  2522. Loop::CanDoFieldCopyProp()
  2523. {
  2524. #if DBG_DUMP
  2525. if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
  2526. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  2527. {
  2528. Output::Print(_u("fieldcopyprop disabled because external: loop count: %d"), GetLoopNumber());
  2529. GetFunc()->DumpFullFunctionName();
  2530. Output::Print(_u("\n"));
  2531. Output::Flush();
  2532. }
  2533. #endif
  2534. return GlobOpt::ImplicitCallFlagsAllowOpts(this);
  2535. }
  2536. bool
  2537. Loop::CanDoFieldHoist()
  2538. {
  2539. // We can do field hoist wherever we can do copy prop
  2540. return CanDoFieldCopyProp();
  2541. }
  2542. bool
  2543. Loop::CanHoistInvariants()
  2544. {
  2545. Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
  2546. if (PHASE_OFF(Js::InvariantsPhase, func))
  2547. {
  2548. return false;
  2549. }
  2550. return true;
  2551. }
  2552. IR::LabelInstr *
  2553. Loop::GetLoopTopInstr() const
  2554. {
  2555. IR::LabelInstr * instr = nullptr;
  2556. if (this->topFunc->isFlowGraphValid)
  2557. {
  2558. instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
  2559. }
  2560. else
  2561. {
  2562. // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
  2563. instr = this->loopTopLabel;
  2564. }
  2565. if (instr)
  2566. {
  2567. Assert(instr->m_isLoopTop);
  2568. }
  2569. return instr;
  2570. }
  2571. void
  2572. Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
  2573. {
  2574. this->loopTopLabel = loopTop;
  2575. }
  2576. #if DBG_DUMP
  2577. uint
  2578. Loop::GetLoopNumber() const
  2579. {
  2580. IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
  2581. if (loopTopInstr->IsProfiledLabelInstr())
  2582. {
  2583. return loopTopInstr->AsProfiledLabelInstr()->loopNum;
  2584. }
  2585. return Js::LoopHeader::NoLoop;
  2586. }
  2587. bool
  2588. BasicBlock::Contains(IR::Instr * instr)
  2589. {
  2590. FOREACH_INSTR_IN_BLOCK(blockInstr, this)
  2591. {
  2592. if (instr == blockInstr)
  2593. {
  2594. return true;
  2595. }
  2596. }
  2597. NEXT_INSTR_IN_BLOCK;
  2598. return false;
  2599. }
  2600. #endif
  2601. FlowEdge *
  2602. FlowEdge::New(FlowGraph * graph)
  2603. {
  2604. FlowEdge * edge;
  2605. edge = JitAnew(graph->alloc, FlowEdge);
  2606. return edge;
  2607. }
  2608. bool
  2609. Loop::IsDescendentOrSelf(Loop const * loop) const
  2610. {
  2611. Loop const * currentLoop = loop;
  2612. while (currentLoop != nullptr)
  2613. {
  2614. if (currentLoop == this)
  2615. {
  2616. return true;
  2617. }
  2618. currentLoop = currentLoop->parent;
  2619. }
  2620. return false;
  2621. }
  2622. void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
  2623. {
  2624. BasicBlock *block;
  2625. if (instr->m_next->IsLabelInstr())
  2626. {
  2627. block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
  2628. block->RemoveInstr(instr);
  2629. }
  2630. else if (instr->IsLabelInstr())
  2631. {
  2632. block = instr->AsLabelInstr()->GetBasicBlock();
  2633. block->RemoveInstr(instr);
  2634. }
  2635. else
  2636. {
  2637. Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
  2638. instr->Remove();
  2639. }
  2640. }
  2641. bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
  2642. {
  2643. // Look for an unsigned constant, or the result of an unsigned shift by zero
  2644. if (!src->IsRegOpnd())
  2645. {
  2646. return false;
  2647. }
  2648. if (!src->AsRegOpnd()->m_sym->IsSingleDef())
  2649. {
  2650. return false;
  2651. }
  2652. if (src->AsRegOpnd()->m_sym->IsIntConst())
  2653. {
  2654. int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
  2655. if (intConst >= 0)
  2656. {
  2657. *pShrSrc1 = src;
  2658. return true;
  2659. }
  2660. else
  2661. {
  2662. return false;
  2663. }
  2664. }
  2665. IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
  2666. if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
  2667. {
  2668. return false;
  2669. }
  2670. IR::Opnd *shrCnt = shrUInstr->GetSrc2();
  2671. if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
  2672. {
  2673. return false;
  2674. }
  2675. *pShrSrc1 = shrUInstr->GetSrc1();
  2676. return true;
  2677. }
  2678. bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
  2679. {
  2680. IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
  2681. IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
  2682. IR::Opnd *newSrc1;
  2683. IR::Opnd *newSrc2;
  2684. // Look for something like:
  2685. // t1 = ShrU_A x, 0
  2686. // t2 = 10;
  2687. // BrGt t1, t2, L
  2688. //
  2689. // Peep to:
  2690. //
  2691. // t1 = ShrU_A x, 0
  2692. // t2 = 10;
  2693. // BrUnGt x, t2, L
  2694. // ByteCodeUse t1
  2695. //
  2696. // Hopefully dead-store can get rid of the ShrU
  2697. if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
  2698. {
  2699. return false;
  2700. }
  2701. if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
  2702. {
  2703. return false;
  2704. }
  2705. if (!cmpInstr->GetSrc2())
  2706. {
  2707. return false;
  2708. }
  2709. if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
  2710. {
  2711. return false;
  2712. }
  2713. if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
  2714. {
  2715. return false;
  2716. }
  2717. switch(cmpInstr->m_opcode)
  2718. {
  2719. case Js::OpCode::BrEq_A:
  2720. case Js::OpCode::BrNeq_A:
  2721. case Js::OpCode::BrSrEq_A:
  2722. case Js::OpCode::BrSrNeq_A:
  2723. break;
  2724. case Js::OpCode::BrLe_A:
  2725. cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
  2726. break;
  2727. case Js::OpCode::BrLt_A:
  2728. cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
  2729. break;
  2730. case Js::OpCode::BrGe_A:
  2731. cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
  2732. break;
  2733. case Js::OpCode::BrGt_A:
  2734. cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
  2735. break;
  2736. case Js::OpCode::CmLe_A:
  2737. cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
  2738. break;
  2739. case Js::OpCode::CmLt_A:
  2740. cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
  2741. break;
  2742. case Js::OpCode::CmGe_A:
  2743. cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
  2744. break;
  2745. case Js::OpCode::CmGt_A:
  2746. cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
  2747. break;
  2748. default:
  2749. return false;
  2750. }
  2751. IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr);
  2752. cmpInstr->InsertAfter(bytecodeInstr);
  2753. if (cmpSrc1 != newSrc1)
  2754. {
  2755. if (cmpSrc1->IsRegOpnd() && !cmpSrc1->GetIsJITOptimizedReg())
  2756. {
  2757. bytecodeInstr->Set(cmpSrc1);
  2758. }
  2759. cmpInstr->ReplaceSrc1(newSrc1);
  2760. if (newSrc1->IsRegOpnd())
  2761. {
  2762. cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2763. }
  2764. }
  2765. if (cmpSrc2 != newSrc2)
  2766. {
  2767. if (cmpSrc2->IsRegOpnd() && !cmpSrc2->GetIsJITOptimizedReg())
  2768. {
  2769. bytecodeInstr->Set(cmpSrc2);
  2770. }
  2771. cmpInstr->ReplaceSrc2(newSrc2);
  2772. if (newSrc2->IsRegOpnd())
  2773. {
  2774. cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  2775. }
  2776. }
  2777. return true;
  2778. }
  2779. #if DBG
  2780. void
  2781. FlowGraph::VerifyLoopGraph()
  2782. {
  2783. FOREACH_BLOCK(block, this)
  2784. {
  2785. Loop *loop = block->loop;
  2786. FOREACH_SUCCESSOR_BLOCK(succ, block)
  2787. {
  2788. if (loop == succ->loop)
  2789. {
  2790. Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
  2791. continue;
  2792. }
  2793. if (succ->isLoopHeader)
  2794. {
  2795. Assert(succ->loop->parent == loop
  2796. || (!loop->IsDescendentOrSelf(succ->loop)));
  2797. continue;
  2798. }
  2799. Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
  2800. } NEXT_SUCCESSOR_BLOCK;
  2801. if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
  2802. {
  2803. // Make sure all break blocks have been removed.
  2804. if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTryCatch()))
  2805. {
  2806. Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
  2807. }
  2808. }
  2809. } NEXT_BLOCK;
  2810. }
  2811. #endif
  2812. #if DBG_DUMP
  2813. void
  2814. FlowGraph::Dump(bool onlyOnVerboseMode, const char16 *form)
  2815. {
  2816. if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  2817. {
  2818. if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
  2819. {
  2820. if (form)
  2821. {
  2822. Output::Print(form);
  2823. }
  2824. this->Dump();
  2825. }
  2826. }
  2827. }
  2828. void
  2829. FlowGraph::Dump()
  2830. {
  2831. Output::Print(_u("\nFlowGraph\n"));
  2832. FOREACH_BLOCK(block, this)
  2833. {
  2834. Loop * loop = block->loop;
  2835. while (loop)
  2836. {
  2837. Output::Print(_u(" "));
  2838. loop = loop->parent;
  2839. }
  2840. block->DumpHeader(false);
  2841. } NEXT_BLOCK;
  2842. Output::Print(_u("\nLoopGraph\n"));
  2843. for (Loop *loop = this->loopList; loop; loop = loop->next)
  2844. {
  2845. Output::Print(_u("\nLoop\n"));
  2846. FOREACH_BLOCK_IN_LOOP(block, loop)
  2847. {
  2848. block->DumpHeader(false);
  2849. }NEXT_BLOCK_IN_LOOP;
  2850. Output::Print(_u("Loop Ends\n"));
  2851. }
  2852. }
  2853. void
  2854. BasicBlock::DumpHeader(bool insertCR)
  2855. {
  2856. if (insertCR)
  2857. {
  2858. Output::Print(_u("\n"));
  2859. }
  2860. Output::Print(_u("BLOCK %d:"), this->number);
  2861. if (this->isDead)
  2862. {
  2863. Output::Print(_u(" **** DEAD ****"));
  2864. }
  2865. if (this->isBreakBlock)
  2866. {
  2867. Output::Print(_u(" **** Break Block ****"));
  2868. }
  2869. else if (this->isAirLockBlock)
  2870. {
  2871. Output::Print(_u(" **** Air lock Block ****"));
  2872. }
  2873. else if (this->isBreakCompensationBlockAtSource)
  2874. {
  2875. Output::Print(_u(" **** Break Source Compensation Code ****"));
  2876. }
  2877. else if (this->isBreakCompensationBlockAtSink)
  2878. {
  2879. Output::Print(_u(" **** Break Sink Compensation Code ****"));
  2880. }
  2881. else if (this->isAirLockCompensationBlock)
  2882. {
  2883. Output::Print(_u(" **** Airlock block Compensation Code ****"));
  2884. }
  2885. if (!this->predList.Empty())
  2886. {
  2887. BOOL fFirst = TRUE;
  2888. Output::Print(_u(" In("));
  2889. FOREACH_PREDECESSOR_BLOCK(blockPred, this)
  2890. {
  2891. if (!fFirst)
  2892. {
  2893. Output::Print(_u(", "));
  2894. }
  2895. Output::Print(_u("%d"), blockPred->GetBlockNum());
  2896. fFirst = FALSE;
  2897. }
  2898. NEXT_PREDECESSOR_BLOCK;
  2899. Output::Print(_u(")"));
  2900. }
  2901. if (!this->succList.Empty())
  2902. {
  2903. BOOL fFirst = TRUE;
  2904. Output::Print(_u(" Out("));
  2905. FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
  2906. {
  2907. if (!fFirst)
  2908. {
  2909. Output::Print(_u(", "));
  2910. }
  2911. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  2912. fFirst = FALSE;
  2913. }
  2914. NEXT_SUCCESSOR_BLOCK;
  2915. Output::Print(_u(")"));
  2916. }
  2917. if (!this->deadPredList.Empty())
  2918. {
  2919. BOOL fFirst = TRUE;
  2920. Output::Print(_u(" DeadIn("));
  2921. FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
  2922. {
  2923. if (!fFirst)
  2924. {
  2925. Output::Print(_u(", "));
  2926. }
  2927. Output::Print(_u("%d"), blockPred->GetBlockNum());
  2928. fFirst = FALSE;
  2929. }
  2930. NEXT_DEAD_PREDECESSOR_BLOCK;
  2931. Output::Print(_u(")"));
  2932. }
  2933. if (!this->deadSuccList.Empty())
  2934. {
  2935. BOOL fFirst = TRUE;
  2936. Output::Print(_u(" DeadOut("));
  2937. FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
  2938. {
  2939. if (!fFirst)
  2940. {
  2941. Output::Print(_u(", "));
  2942. }
  2943. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  2944. fFirst = FALSE;
  2945. }
  2946. NEXT_DEAD_SUCCESSOR_BLOCK;
  2947. Output::Print(_u(")"));
  2948. }
  2949. if (this->loop)
  2950. {
  2951. Output::Print(_u(" Loop(%d) header: %d"), this->loop->loopNumber, this->loop->GetHeadBlock()->GetBlockNum());
  2952. if (this->loop->parent)
  2953. {
  2954. Output::Print(_u(" parent(%d): %d"), this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
  2955. }
  2956. if (this->loop->GetHeadBlock() == this)
  2957. {
  2958. Output::SkipToColumn(50);
  2959. Output::Print(_u("Call Exp/Imp: "));
  2960. if (this->loop->GetHasCall())
  2961. {
  2962. Output::Print(_u("yes/"));
  2963. }
  2964. else
  2965. {
  2966. Output::Print(_u(" no/"));
  2967. }
  2968. Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
  2969. }
  2970. }
  2971. Output::Print(_u("\n"));
  2972. if (insertCR)
  2973. {
  2974. Output::Print(_u("\n"));
  2975. }
  2976. }
  2977. void
  2978. BasicBlock::Dump()
  2979. {
  2980. // Dumping the first instruction (label) will dump the block header as well.
  2981. FOREACH_INSTR_IN_BLOCK(instr, this)
  2982. {
  2983. instr->Dump();
  2984. }
  2985. NEXT_INSTR_IN_BLOCK;
  2986. }
  2987. void
  2988. AddPropertyCacheBucket::Dump() const
  2989. {
  2990. Assert(this->initialType != nullptr);
  2991. Assert(this->finalType != nullptr);
  2992. Output::Print(_u(" initial type: 0x%x, final type: 0x%x "), this->initialType->GetAddr(), this->finalType->GetAddr());
  2993. }
  2994. void
  2995. ObjTypeGuardBucket::Dump() const
  2996. {
  2997. Assert(this->guardedPropertyOps != nullptr);
  2998. this->guardedPropertyOps->Dump();
  2999. }
  3000. void
  3001. ObjWriteGuardBucket::Dump() const
  3002. {
  3003. Assert(this->writeGuards != nullptr);
  3004. this->writeGuards->Dump();
  3005. }
  3006. #endif