FlowGraph.cpp 181 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. FlowGraph *
  7. FlowGraph::New(Func * func, JitArenaAllocator * alloc)
  8. {
  9. FlowGraph * graph;
  10. graph = JitAnew(alloc, FlowGraph, func, alloc);
  11. return graph;
  12. }
  13. // When there is an early exit within an EH region,
  14. // Leave instructions are inserted in the bytecode to jump up the region tree one by one
  15. // We delete this Leave chain of instructions, and add an edge to Finally
  16. IR::LabelInstr * FlowGraph::DeleteLeaveChainBlocks(IR::BranchInstr *leaveInstr, IR::Instr * &instrPrev)
  17. {
  18. // Cleanup Rest of the Leave chain
  19. IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
  20. Assert(leaveTarget->GetNextRealInstr()->IsBranchInstr());
  21. IR::BranchInstr *leaveChain = leaveTarget->GetNextRealInstr()->AsBranchInstr();
  22. IR::LabelInstr * curLabel = leaveTarget->AsLabelInstr();
  23. while (leaveChain->m_opcode != Js::OpCode::Br)
  24. {
  25. Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
  26. IR::LabelInstr * nextLabel = leaveChain->m_next->AsLabelInstr();
  27. leaveChain = nextLabel->m_next->AsBranchInstr();
  28. BasicBlock *curBlock = curLabel->GetBasicBlock();
  29. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, curBlock->GetPredList(), iter1)
  30. {
  31. BasicBlock * pred = edge->GetPred();
  32. pred->RemoveSucc(curBlock, this);
  33. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  34. this->RemoveBlock(curBlock);
  35. curLabel = nextLabel;
  36. }
  37. instrPrev = leaveChain->m_next;
  38. IR::LabelInstr * exitLabel = leaveChain->GetTarget();
  39. BasicBlock * curBlock = curLabel->GetBasicBlock();
  40. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, curBlock->GetPredList(), iter1)
  41. {
  42. BasicBlock * pred = edge->GetPred();
  43. pred->RemoveSucc(curBlock, this);
  44. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  45. this->RemoveBlock(curBlock);
  46. return exitLabel;
  47. }
  48. bool FlowGraph::Dominates(Region *region1, Region *region2)
  49. {
  50. Assert(region1);
  51. Assert(region2);
  52. Region *startR1 = region1;
  53. Region *startR2 = region2;
  54. int region1Depth = 0;
  55. while (startR1 != nullptr)
  56. {
  57. region1Depth++;
  58. startR1 = startR1->GetParent();
  59. }
  60. int region2Depth = 0;
  61. while (startR2 != nullptr)
  62. {
  63. region2Depth++;
  64. startR2 = startR2->GetParent();
  65. }
  66. return region1Depth > region2Depth;
  67. }
  68. bool FlowGraph::DoesExitLabelDominate(IR::BranchInstr *leaveInstr)
  69. {
  70. IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
  71. Assert(leaveTarget->GetNextRealInstr()->IsBranchInstr());
  72. IR::BranchInstr *leaveChain = leaveTarget->GetNextRealInstr()->AsBranchInstr();
  73. while (leaveChain->m_opcode != Js::OpCode::Br)
  74. {
  75. Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
  76. IR::LabelInstr * nextLabel = leaveChain->m_next->AsLabelInstr();
  77. leaveChain = nextLabel->m_next->AsBranchInstr();
  78. }
  79. IR::LabelInstr * exitLabel = leaveChain->GetTarget();
  80. return Dominates(exitLabel->GetRegion(), finallyLabelStack->Top()->GetRegion());
  81. }
  82. bool FlowGraph::IsEarlyExitFromFinally(IR::BranchInstr *leaveInstr, Region *currentRegion, Region *branchTargetRegion, IR::Instr * &instrPrev, IR::LabelInstr * &exitLabel)
  83. {
  84. if (finallyLabelStack->Empty())
  85. {
  86. return false;
  87. }
  88. if (currentRegion->GetType() == RegionTypeTry)
  89. {
  90. if (currentRegion->GetMatchingCatchRegion() == nullptr)
  91. {
  92. // try of try-finally
  93. bool isEarly =
  94. (branchTargetRegion != currentRegion->GetMatchingFinallyRegion(false) &&
  95. branchTargetRegion != currentRegion->GetMatchingFinallyRegion(true));
  96. if (!isEarly) return false;
  97. if (DoesExitLabelDominate(leaveInstr)) return false;
  98. // Cleanup Rest of the Leave chain
  99. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  100. return true;
  101. }
  102. // try of try-catch
  103. IR::BranchInstr *leaveChain = leaveInstr;
  104. IR::LabelInstr * leaveTarget = leaveChain->GetTarget();
  105. IR::Instr *target = leaveTarget->GetNextRealInstr();
  106. if (target->m_opcode == Js::OpCode::Br)
  107. {
  108. IR::BranchInstr *tryExit = target->AsBranchInstr();
  109. instrPrev = tryExit;
  110. return false;
  111. }
  112. if (DoesExitLabelDominate(leaveInstr)) return false;
  113. // Cleanup Rest of the Leave chain
  114. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  115. return true;
  116. }
  117. if (currentRegion->GetType() == RegionTypeCatch)
  118. {
  119. // We don't care for early exits in catch blocks, because we bailout anyway
  120. return false;
  121. }
  122. Assert(currentRegion->GetType() == RegionTypeFinally);
  123. // All Leave's inside Finally region are early exits
  124. // Since we execute non-excepting Finallys in JIT now, we should convert Leave to Br
  125. if (DoesExitLabelDominate(leaveInstr)) return false;
  126. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  127. return true;
  128. }
  129. ///----------------------------------------------------------------------------
  130. ///
  131. /// FlowGraph::Build
  132. ///
  133. /// Construct flow graph and loop structures for the current state of the function.
  134. ///
  135. ///----------------------------------------------------------------------------
  136. void
  137. FlowGraph::Build(void)
  138. {
  139. Func * func = this->func;
  140. BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  141. this->RunPeeps();
  142. END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  143. // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
  144. // bailing out from a simple jitted function to do a full jit of a loop body in the function
  145. // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to handler.
  146. if (this->func->HasTry() && (this->func->DoOptimizeTry() ||
  147. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody())))
  148. {
  149. this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  150. }
  151. if (this->func->HasFinally() && (this->func->DoOptimizeTry() ||
  152. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody())))
  153. {
  154. this->finallyLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  155. this->leaveNullLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  156. this->regToFinallyEndMap = JitAnew(this->alloc, RegionToFinallyEndMapType, this->alloc, 0);
  157. this->leaveNullLabelToFinallyLabelMap = JitAnew(this->alloc, LeaveNullLabelToFinallyLabelMapType, this->alloc, 0);
  158. }
  159. IR::Instr * currLastInstr = nullptr;
  160. BasicBlock * currBlock = nullptr;
  161. BasicBlock * nextBlock = nullptr;
  162. bool hasCall = false;
  163. FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
  164. {
  165. if (currLastInstr == nullptr || instr->EndsBasicBlock())
  166. {
  167. // Start working on a new block.
  168. // If we're currently processing a block, then wrap it up before beginning a new one.
  169. if (currLastInstr != nullptr)
  170. {
  171. nextBlock = currBlock;
  172. currBlock = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
  173. currBlock->hasCall = hasCall;
  174. hasCall = false;
  175. }
  176. currLastInstr = instr;
  177. }
  178. if (instr->StartsBasicBlock())
  179. {
  180. // Insert a BrOnException after the loop top if we are in a try-catch/try-finally. This is required to
  181. // model flow from the loop to the catch/finally block for loops that don't have a break condition.
  182. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop && instr->m_next->m_opcode != Js::OpCode::BrOnException)
  183. {
  184. IR::BranchInstr * brOnException = nullptr;
  185. IR::Instr *instrNext = instr->m_next;
  186. if (this->catchLabelStack && !this->catchLabelStack->Empty())
  187. {
  188. brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
  189. instr->InsertAfter(brOnException);
  190. }
  191. if (this->finallyLabelStack && !this->finallyLabelStack->Empty())
  192. {
  193. brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->finallyLabelStack->Top(), instr->m_func);
  194. instr->InsertAfter(brOnException);
  195. }
  196. // Insert a BrOnException after the loop top if we are in a finally block. This is required so
  197. // that LeaveNull is not eliminated in the presence of loops with no exit condition
  198. // LeaveNull is needed to get the end of the finally block, this is important when adding edge from the finally block to the early exit block
  199. if (this->leaveNullLabelStack && !this->leaveNullLabelStack->Empty())
  200. {
  201. brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->leaveNullLabelStack->Top(), instr->m_func);
  202. instr->InsertAfter(brOnException);
  203. }
  204. if (brOnException)
  205. {
  206. instrPrev = instrNext->m_prev;
  207. continue;
  208. }
  209. }
  210. // Wrap up the current block and get ready to process a new one.
  211. nextBlock = currBlock;
  212. currBlock = this->AddBlock(instr, currLastInstr, nextBlock);
  213. currBlock->hasCall = hasCall;
  214. hasCall = false;
  215. currLastInstr = nullptr;
  216. }
  217. switch (instr->m_opcode)
  218. {
  219. case Js::OpCode::Catch:
  220. Assert(instr->m_prev->IsLabelInstr());
  221. if (this->catchLabelStack)
  222. {
  223. this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
  224. }
  225. break;
  226. case Js::OpCode::Finally:
  227. {
  228. if (!this->finallyLabelStack)
  229. {
  230. break;
  231. }
  232. //To enable globopt on functions with try finallys we transform the flowgraph as below :
  233. // TryFinally L1
  234. // <try code>
  235. // Leave L2
  236. // L2 : Br L3
  237. // L1 : <finally code>
  238. // LeaveNull
  239. // L3 : <code after try finally>
  240. //
  241. //to:
  242. //
  243. // TryFinally L1
  244. // <try code>
  245. // BrOnException L1
  246. // Leave L1'
  247. // L1 : BailOnException
  248. // L2 : Finally
  249. // <finally code>
  250. // LeaveNull
  251. // L3: <code after try finally>
  252. //We generate 2 flow edges from the try - an exception path and a non exception path.
  253. //This transformation enables us to optimize on the non-excepting finally path
  254. Assert(instr->m_prev->IsLabelInstr());
  255. IR::LabelInstr * finallyLabel = instr->m_prev->AsLabelInstr();
  256. // Find leave label
  257. Assert(finallyLabel->m_prev->m_opcode == Js::OpCode::Br && finallyLabel->m_prev->m_prev->m_opcode == Js::OpCode::Label);
  258. IR::Instr * insertPoint = finallyLabel->m_prev;
  259. IR::LabelInstr * leaveTarget = finallyLabel->m_prev->m_prev->AsLabelInstr();
  260. leaveTarget->Unlink();
  261. finallyLabel->InsertBefore(leaveTarget);
  262. finallyLabel->Unlink();
  263. insertPoint->InsertBefore(finallyLabel);
  264. // Bailout from the opcode following Finally
  265. IR::Instr * bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
  266. insertPoint->InsertBefore(bailOnException);
  267. insertPoint->Remove();
  268. this->finallyLabelStack->Push(finallyLabel);
  269. Assert(!this->leaveNullLabelStack->Empty());
  270. IR::LabelInstr * leaveNullLabel = this->leaveNullLabelStack->Pop();
  271. this->leaveNullLabelToFinallyLabelMap->Item(leaveNullLabel, leaveTarget /*new finally label*/);
  272. Assert(leaveTarget->labelRefs.HasOne());
  273. IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, finallyLabel, instr->m_func);
  274. leaveTarget->labelRefs.Head()->InsertBefore(brOnException);
  275. instrPrev = instr->m_prev;
  276. }
  277. break;
  278. case Js::OpCode::TryCatch:
  279. if (this->catchLabelStack)
  280. {
  281. this->catchLabelStack->Pop();
  282. }
  283. break;
  284. case Js::OpCode::TryFinally:
  285. if (this->finallyLabelStack)
  286. {
  287. this->finallyLabelStack->Pop();
  288. }
  289. break;
  290. case Js::OpCode::LeaveNull:
  291. {
  292. if (!this->leaveNullLabelStack)
  293. {
  294. break;
  295. }
  296. IR::Instr * label = instr->GetPrevRealInstrOrLabel();
  297. if (label->IsLabelInstr())
  298. {
  299. this->leaveNullLabelStack->Push(label->AsLabelInstr());
  300. break;
  301. }
  302. // Create a new label before LeaveNull
  303. IR::LabelInstr *leaveNullLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  304. instr->InsertBefore(leaveNullLabel);
  305. this->leaveNullLabelStack->Push(leaveNullLabel);
  306. leaveNullLabel->SetByteCodeOffset(instr);
  307. // Wrap up the current block and get ready to process a new one.
  308. nextBlock = currBlock;
  309. currBlock = this->AddBlock(leaveNullLabel, instr, nextBlock);
  310. currBlock->hasCall = hasCall;
  311. hasCall = false;
  312. currLastInstr = nullptr;
  313. }
  314. break;
  315. case Js::OpCode::CloneBlockScope:
  316. case Js::OpCode::CloneInnerScopeSlots:
  317. // It would be nice to do this in IRBuilder, but doing so gives us
  318. // trouble when doing the DoSlotArrayCheck since it assume single def
  319. // of the sym to do its check properly. So instead we assign the dst
  320. // here in FlowGraph.
  321. instr->SetDst(instr->GetSrc1());
  322. break;
  323. }
  324. if (OpCodeAttr::UseAllFields(instr->m_opcode))
  325. {
  326. // UseAllFields opcode are call instruction or opcode that would call.
  327. hasCall = true;
  328. if (OpCodeAttr::CallInstr(instr->m_opcode))
  329. {
  330. if (!instr->isCallInstrProtectedByNoProfileBailout)
  331. {
  332. instr->m_func->SetHasCallsOnSelfAndParents();
  333. }
  334. // For ARM & X64 because of their register calling convention
  335. // the ArgOuts need to be moved next to the call.
  336. #if defined(_M_ARM) || defined(_M_X64)
  337. IR::Instr* argInsertInstr = instr;
  338. instr->IterateArgInstrs([&](IR::Instr* argInstr)
  339. {
  340. if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
  341. argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
  342. argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
  343. argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
  344. {
  345. // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
  346. if (!argInstr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  347. {
  348. // Need to always generate byte code arg out capture,
  349. // because bailout can't restore from the arg out as it is
  350. // replaced by new sym for register calling convention in lower
  351. argInstr->GenerateBytecodeArgOutCapture();
  352. }
  353. // Check if the instruction is already next
  354. if (argInstr != argInsertInstr->m_prev)
  355. {
  356. // It is not, move it.
  357. argInstr->Move(argInsertInstr);
  358. }
  359. argInsertInstr = argInstr;
  360. }
  361. return false;
  362. });
  363. #endif
  364. }
  365. }
  366. }
  367. NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
  368. this->func->isFlowGraphValid = true;
  369. Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
  370. Assert(!this->finallyLabelStack || this->finallyLabelStack->Empty());
  371. Assert(!this->leaveNullLabelStack || this->leaveNullLabelStack->Empty());
  372. // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
  373. // forward to number the blocks in lexical order.
  374. unsigned int blockNum = 0;
  375. FOREACH_BLOCK(block, this)
  376. {
  377. block->SetBlockNum(blockNum++);
  378. }NEXT_BLOCK;
  379. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  380. this->RemoveUnreachableBlocks();
  381. // Regions need to be assigned before Globopt because:
  382. // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
  383. // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
  384. // in the try region to be restored correctly by the bailout, it should not be removed from the
  385. // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
  386. //
  387. // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
  388. // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
  389. // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
  390. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  391. (this->func->IsSimpleJit() && this->func->hasBailout));
  392. blockNum = 0;
  393. FOREACH_BLOCK_ALL(block, this)
  394. {
  395. block->SetBlockNum(blockNum++);
  396. if (assignRegionsBeforeGlobopt)
  397. {
  398. if (block->isDeleted && !block->isDead)
  399. {
  400. continue;
  401. }
  402. this->UpdateRegionForBlock(block);
  403. }
  404. } NEXT_BLOCK_ALL;
  405. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  406. #if DBG_DUMP
  407. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  408. {
  409. if (assignRegionsBeforeGlobopt)
  410. {
  411. Output::Print(_u("Before adding early exit edges\n"));
  412. FOREACH_BLOCK_ALL(block, this)
  413. {
  414. block->DumpHeader(true);
  415. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  416. if (region)
  417. {
  418. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  419. _u("RegionTypeRoot"),
  420. _u("RegionTypeTry"),
  421. _u("RegionTypeCatch"),
  422. _u("RegionTypeFinally") };
  423. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  424. }
  425. } NEXT_BLOCK_ALL;
  426. this->func->Dump();
  427. }
  428. }
  429. #endif
  430. if (this->finallyLabelStack)
  431. {
  432. Assert(this->finallyLabelStack->Empty());
  433. // Add s0 definition at the beginning of the function
  434. // We need this because - s0 symbol can get added to bytcodeUpwardExposed use when there are early returns,
  435. // And globopt will complain that s0 is uninitialized, because we define it to undefined only at the end of the function
  436. const auto addrOpnd = IR::AddrOpnd::New(this->func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, this->func, true);
  437. addrOpnd->SetValueType(ValueType::Undefined);
  438. IR::RegOpnd *regOpnd = IR::RegOpnd::New(this->func->m_symTable->FindStackSym(0), TyVar, this->func);
  439. IR::Instr *ldRet = IR::Instr::New(Js::OpCode::Ld_A, regOpnd, addrOpnd, this->func);
  440. this->func->m_headInstr->GetNextRealInstr()->InsertBefore(ldRet);
  441. IR::LabelInstr * currentLabel = nullptr;
  442. // look for early exits from a try, and insert bailout
  443. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)
  444. {
  445. if (instr->IsLabelInstr())
  446. {
  447. currentLabel = instr->AsLabelInstr();
  448. }
  449. else if (instr->m_opcode == Js::OpCode::TryFinally)
  450. {
  451. Assert(instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionTypeFinally);
  452. this->finallyLabelStack->Push(instr->AsBranchInstr()->GetTarget());
  453. }
  454. else if (instr->m_opcode == Js::OpCode::Leave)
  455. {
  456. IR::LabelInstr *branchTarget = instr->AsBranchInstr()->GetTarget();
  457. IR::LabelInstr *exitLabel = nullptr;
  458. // An early exit (break, continue, return) from an EH region appears as Leave opcode
  459. // When there is an early exit within a try finally, we have to execute finally code
  460. // Currently we bailout on early exits
  461. // For all such edges add edge from eh region -> finally and finally -> earlyexit
  462. Assert(currentLabel != nullptr);
  463. if (currentLabel && IsEarlyExitFromFinally(instr->AsBranchInstr(), currentLabel->GetRegion(), branchTarget->GetRegion(), instrNext, exitLabel))
  464. {
  465. Assert(exitLabel);
  466. IR::Instr * bailOnEarlyExit = IR::BailOutInstr::New(Js::OpCode::BailOnEarlyExit, IR::BailOutOnEarlyExit, instr, instr->m_func);
  467. instr->InsertBefore(bailOnEarlyExit);
  468. IR::LabelInstr *exceptFinallyLabel = this->finallyLabelStack->Top();
  469. IR::LabelInstr *nonExceptFinallyLabel = exceptFinallyLabel->m_next->m_next->AsLabelInstr();
  470. BasicBlock *nonExceptFinallyBlock = nonExceptFinallyLabel->GetBasicBlock();
  471. if (Dominates(nonExceptFinallyBlock->firstInstr->AsLabelInstr()->GetRegion(), exitLabel->GetRegion()))
  472. {
  473. IR::Instr * leaveToFinally = IR::BranchInstr::New(Js::OpCode::Leave, exceptFinallyLabel, this->func);
  474. instr->InsertBefore(leaveToFinally);
  475. instr->Remove();
  476. this->AddEdge(currentLabel->GetBasicBlock(), exceptFinallyLabel->GetBasicBlock());
  477. Assert(this->regToFinallyEndMap->ContainsKey(nonExceptFinallyLabel->GetRegion()));
  478. BasicBlock * finallyEndBlock = this->regToFinallyEndMap->Item(nonExceptFinallyLabel->GetRegion());
  479. Assert(finallyEndBlock);
  480. Assert(finallyEndBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() == nonExceptFinallyLabel->GetRegion());
  481. InsertEdgeFromFinallyToEarlyExit(finallyEndBlock, exitLabel);
  482. }
  483. }
  484. else if (currentLabel && currentLabel->GetRegion()->GetType() == RegionTypeFinally)
  485. {
  486. Assert(currentLabel->GetRegion()->GetMatchingTryRegion()->GetMatchingFinallyRegion(false) == currentLabel->GetRegion());
  487. // Convert Leave to Br because we execute non-excepting Finally in native code
  488. instr->m_opcode = Js::OpCode::Br;
  489. #if DBG
  490. instr->AsBranchInstr()->m_leaveConvToBr = true;
  491. #endif
  492. }
  493. }
  494. else if (instr->m_opcode == Js::OpCode::Finally)
  495. {
  496. this->finallyLabelStack->Pop();
  497. }
  498. }
  499. NEXT_INSTR_IN_FUNC_EDITING;
  500. }
  501. blockNum = 0;
  502. FOREACH_BLOCK_ALL(block, this)
  503. {
  504. block->SetBlockNum(blockNum++);
  505. } NEXT_BLOCK_ALL;
  506. this->FindLoops();
  507. #if DBG_DUMP
  508. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  509. {
  510. if (assignRegionsBeforeGlobopt)
  511. {
  512. Output::Print(_u("After adding early exit edges/Before CanonicalizeLoops\n"));
  513. FOREACH_BLOCK_ALL(block, this)
  514. {
  515. block->DumpHeader(true);
  516. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  517. if (region)
  518. {
  519. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  520. _u("RegionTypeRoot"),
  521. _u("RegionTypeTry"),
  522. _u("RegionTypeCatch"),
  523. _u("RegionTypeFinally") };
  524. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  525. }
  526. } NEXT_BLOCK_ALL;
  527. this->func->Dump();
  528. }
  529. }
  530. #endif
  531. bool breakBlocksRelocated = this->CanonicalizeLoops();
  532. blockNum = 0;
  533. FOREACH_BLOCK_ALL(block, this)
  534. {
  535. block->SetBlockNum(blockNum++);
  536. } NEXT_BLOCK_ALL;
  537. #if DBG
  538. FOREACH_BLOCK_ALL(block, this)
  539. {
  540. if (assignRegionsBeforeGlobopt)
  541. {
  542. if (block->GetFirstInstr()->AsLabelInstr()->GetRegion())
  543. {
  544. Assert(block->GetFirstInstr()->AsLabelInstr()->GetRegion()->ehBailoutData);
  545. }
  546. }
  547. } NEXT_BLOCK_ALL;
  548. #endif
  549. #if DBG_DUMP
  550. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  551. {
  552. if (assignRegionsBeforeGlobopt)
  553. {
  554. Output::Print(_u("After CanonicalizeLoops\n"));
  555. FOREACH_BLOCK_ALL(block, this)
  556. {
  557. block->DumpHeader(true);
  558. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  559. if (region)
  560. {
  561. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  562. _u("RegionTypeRoot"),
  563. _u("RegionTypeTry"),
  564. _u("RegionTypeCatch"),
  565. _u("RegionTypeFinally") };
  566. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  567. }
  568. } NEXT_BLOCK_ALL;
  569. this->func->Dump();
  570. }
  571. }
  572. #endif
  573. if (breakBlocksRelocated)
  574. {
  575. // Sort loop lists only if there is break block removal.
  576. SortLoopLists();
  577. }
  578. #if DBG
  579. this->VerifyLoopGraph();
  580. #endif
  581. #if DBG_DUMP
  582. this->Dump(false, nullptr);
  583. #endif
  584. }
  585. void FlowGraph::InsertEdgeFromFinallyToEarlyExit(BasicBlock *finallyEndBlock, IR::LabelInstr *exitLabel)
  586. {
  587. IR::Instr * lastInstr = finallyEndBlock->GetLastInstr();
  588. IR::LabelInstr * lastLabel = finallyEndBlock->GetFirstInstr()->AsLabelInstr();
  589. Assert(lastInstr->m_opcode == Js::OpCode::LeaveNull || lastInstr->m_opcode == Js::OpCode::Leave || lastInstr->m_opcode == Js::OpCode::BrOnException);
  590. // Add a new block, add BrOnException to earlyexit, assign region
  591. // Finally
  592. // ...
  593. // L1:
  594. // LeaveNull
  595. // to
  596. // Finally
  597. // ...
  598. // L1:
  599. // BrOnException earlyExitLabel
  600. // L1':
  601. // LeaveNull
  602. BasicBlock *nextBB = finallyEndBlock->GetNext();
  603. IR::LabelInstr *leaveLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  604. lastInstr->InsertBefore(leaveLabel);
  605. if (this->leaveNullLabelToFinallyLabelMap->ContainsKey(finallyEndBlock->GetFirstInstr()->AsLabelInstr()))
  606. {
  607. // Add new LeaveNull label to the map, we don't delete old LeaveNull label from the map
  608. Assert(this->leaveNullLabelToFinallyLabelMap->ContainsKey(finallyEndBlock->GetFirstInstr()->AsLabelInstr()));
  609. this->leaveNullLabelToFinallyLabelMap->Item(leaveLabel, this->leaveNullLabelToFinallyLabelMap->Item(finallyEndBlock->GetFirstInstr()->AsLabelInstr()));
  610. }
  611. this->AddBlock(leaveLabel, lastInstr, nextBB, finallyEndBlock /*prevBlock*/);
  612. leaveLabel->SetRegion(lastLabel->GetRegion());
  613. this->AddEdge(finallyEndBlock, leaveLabel->GetBasicBlock());
  614. // If the Leave/LeaveNull at the end of finally was not preceeded by a Label, we have to create a new block with BrOnException to early exit
  615. if (!lastInstr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  616. {
  617. IR::LabelInstr *brLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  618. leaveLabel->InsertBefore(brLabel);
  619. IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
  620. brToExit->m_brFinallyToEarlyExit = true;
  621. brToExit->SetByteCodeOffset(lastInstr);
  622. leaveLabel->InsertBefore(brToExit);
  623. this->AddBlock(brLabel, brToExit, finallyEndBlock->GetNext(), finallyEndBlock /*prevBlock*/);
  624. brLabel->SetRegion(lastLabel->GetRegion());
  625. this->AddEdge(finallyEndBlock, brLabel->GetBasicBlock());
  626. }
  627. else
  628. {
  629. // If the Leave/LeaveNull at the end of finally was preceeded by a Label, we reuse the block inserting BrOnException to early exit in it
  630. IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
  631. brToExit->SetByteCodeOffset(lastInstr);
  632. brToExit->m_brFinallyToEarlyExit = true;
  633. leaveLabel->InsertBefore(brToExit);
  634. this->AddEdge(finallyEndBlock, exitLabel->GetBasicBlock());
  635. }
  636. // In case of throw/non-terminating loop, there maybe no edge to the next block
  637. if (this->FindEdge(finallyEndBlock, nextBB))
  638. {
  639. finallyEndBlock->RemoveSucc(nextBB, this);
  640. }
  641. this->regToFinallyEndMap->Item(lastLabel->GetRegion(), leaveLabel->GetBasicBlock());
  642. }
  643. void
  644. FlowGraph::SortLoopLists()
  645. {
  646. // Sort the blocks in loopList
  647. for (Loop *loop = this->loopList; loop; loop = loop->next)
  648. {
  649. unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
  650. // Insertion sort as the blockList is almost sorted in the loop.
  651. FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
  652. {
  653. if (lastBlockNumber <= block->GetBlockNum())
  654. {
  655. lastBlockNumber = block->GetBlockNum();
  656. }
  657. else
  658. {
  659. iter.UnlinkCurrent();
  660. FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
  661. {
  662. if (insertBlock->GetBlockNum() > block->GetBlockNum())
  663. {
  664. break;
  665. }
  666. }NEXT_BLOCK_IN_LOOP_EDITING;
  667. newIter.InsertBefore(block);
  668. }
  669. }NEXT_BLOCK_IN_LOOP_EDITING;
  670. }
  671. }
  672. void
  673. FlowGraph::RunPeeps()
  674. {
  675. if (this->func->HasTry())
  676. {
  677. return;
  678. }
  679. if (PHASE_OFF(Js::FGPeepsPhase, this->func))
  680. {
  681. return;
  682. }
  683. IR::Instr * instrCm = nullptr;
  684. bool tryUnsignedCmpPeep = false;
  685. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  686. {
  687. switch(instr->m_opcode)
  688. {
  689. case Js::OpCode::Br:
  690. case Js::OpCode::BrEq_I4:
  691. case Js::OpCode::BrGe_I4:
  692. case Js::OpCode::BrGt_I4:
  693. case Js::OpCode::BrLt_I4:
  694. case Js::OpCode::BrLe_I4:
  695. case Js::OpCode::BrUnGe_I4:
  696. case Js::OpCode::BrUnGt_I4:
  697. case Js::OpCode::BrUnLt_I4:
  698. case Js::OpCode::BrUnLe_I4:
  699. case Js::OpCode::BrNeq_I4:
  700. case Js::OpCode::BrEq_A:
  701. case Js::OpCode::BrGe_A:
  702. case Js::OpCode::BrGt_A:
  703. case Js::OpCode::BrLt_A:
  704. case Js::OpCode::BrLe_A:
  705. case Js::OpCode::BrUnGe_A:
  706. case Js::OpCode::BrUnGt_A:
  707. case Js::OpCode::BrUnLt_A:
  708. case Js::OpCode::BrUnLe_A:
  709. case Js::OpCode::BrNotEq_A:
  710. case Js::OpCode::BrNotNeq_A:
  711. case Js::OpCode::BrSrNotEq_A:
  712. case Js::OpCode::BrSrNotNeq_A:
  713. case Js::OpCode::BrNotGe_A:
  714. case Js::OpCode::BrNotGt_A:
  715. case Js::OpCode::BrNotLt_A:
  716. case Js::OpCode::BrNotLe_A:
  717. case Js::OpCode::BrNeq_A:
  718. case Js::OpCode::BrNotNull_A:
  719. case Js::OpCode::BrNotAddr_A:
  720. case Js::OpCode::BrAddr_A:
  721. case Js::OpCode::BrSrEq_A:
  722. case Js::OpCode::BrSrNeq_A:
  723. case Js::OpCode::BrOnHasProperty:
  724. case Js::OpCode::BrOnNoProperty:
  725. case Js::OpCode::BrHasSideEffects:
  726. case Js::OpCode::BrNotHasSideEffects:
  727. case Js::OpCode::BrFncEqApply:
  728. case Js::OpCode::BrFncNeqApply:
  729. case Js::OpCode::BrOnEmpty:
  730. case Js::OpCode::BrOnNotEmpty:
  731. case Js::OpCode::BrFncCachedScopeEq:
  732. case Js::OpCode::BrFncCachedScopeNeq:
  733. case Js::OpCode::BrOnObject_A:
  734. case Js::OpCode::BrOnClassConstructor:
  735. case Js::OpCode::BrOnBaseConstructorKind:
  736. if (tryUnsignedCmpPeep)
  737. {
  738. this->UnsignedCmpPeep(instr);
  739. }
  740. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  741. break;
  742. case Js::OpCode::MultiBr:
  743. // TODO: Run peeps on these as well...
  744. break;
  745. case Js::OpCode::BrTrue_I4:
  746. case Js::OpCode::BrFalse_I4:
  747. case Js::OpCode::BrTrue_A:
  748. case Js::OpCode::BrFalse_A:
  749. if (instrCm)
  750. {
  751. if (instrCm->GetDst()->IsInt32())
  752. {
  753. Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
  754. instrNext = this->PeepTypedCm(instrCm);
  755. }
  756. else
  757. {
  758. instrNext = this->PeepCm(instrCm);
  759. }
  760. instrCm = nullptr;
  761. if (instrNext == nullptr)
  762. {
  763. // Set instrNext back to the current instr.
  764. instrNext = instr;
  765. }
  766. }
  767. else
  768. {
  769. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  770. }
  771. break;
  772. case Js::OpCode::CmEq_I4:
  773. case Js::OpCode::CmGe_I4:
  774. case Js::OpCode::CmGt_I4:
  775. case Js::OpCode::CmLt_I4:
  776. case Js::OpCode::CmLe_I4:
  777. case Js::OpCode::CmNeq_I4:
  778. case Js::OpCode::CmEq_A:
  779. case Js::OpCode::CmGe_A:
  780. case Js::OpCode::CmGt_A:
  781. case Js::OpCode::CmLt_A:
  782. case Js::OpCode::CmLe_A:
  783. case Js::OpCode::CmNeq_A:
  784. case Js::OpCode::CmSrEq_A:
  785. case Js::OpCode::CmSrNeq_A:
  786. if (tryUnsignedCmpPeep)
  787. {
  788. this->UnsignedCmpPeep(instr);
  789. }
  790. case Js::OpCode::CmUnGe_I4:
  791. case Js::OpCode::CmUnGt_I4:
  792. case Js::OpCode::CmUnLt_I4:
  793. case Js::OpCode::CmUnLe_I4:
  794. case Js::OpCode::CmUnGe_A:
  795. case Js::OpCode::CmUnGt_A:
  796. case Js::OpCode::CmUnLt_A:
  797. case Js::OpCode::CmUnLe_A:
  798. // There may be useless branches between the Cm instr and the branch that uses the result.
  799. // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
  800. instrCm = instr;
  801. break;
  802. case Js::OpCode::Label:
  803. if (instr->AsLabelInstr()->IsUnreferenced())
  804. {
  805. instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
  806. }
  807. break;
  808. case Js::OpCode::StatementBoundary:
  809. instr->ClearByteCodeOffset();
  810. instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
  811. break;
  812. case Js::OpCode::ShrU_I4:
  813. case Js::OpCode::ShrU_A:
  814. if (tryUnsignedCmpPeep)
  815. {
  816. break;
  817. }
  818. if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
  819. && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
  820. && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
  821. {
  822. tryUnsignedCmpPeep = true;
  823. }
  824. break;
  825. default:
  826. Assert(!instr->IsBranchInstr());
  827. }
  828. } NEXT_INSTR_IN_FUNC_EDITING;
  829. }
  830. void
  831. Loop::InsertLandingPad(FlowGraph *fg)
  832. {
  833. BasicBlock *headBlock = this->GetHeadBlock();
  834. // Always create a landing pad. This allows globopt to easily hoist instructions
  835. // and re-optimize the block if needed.
  836. BasicBlock *landingPad = BasicBlock::New(fg);
  837. this->landingPad = landingPad;
  838. IR::Instr * headInstr = headBlock->GetFirstInstr();
  839. IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
  840. landingPadLabel->SetByteCodeOffset(headInstr);
  841. headInstr->InsertBefore(landingPadLabel);
  842. landingPadLabel->SetBasicBlock(landingPad);
  843. landingPadLabel->SetRegion(headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion());
  844. landingPadLabel->m_hasNonBranchRef = headBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef;
  845. landingPad->SetBlockNum(fg->blockCount++);
  846. landingPad->SetFirstInstr(landingPadLabel);
  847. landingPad->SetLastInstr(landingPadLabel);
  848. landingPad->prev = headBlock->prev;
  849. landingPad->prev->next = landingPad;
  850. landingPad->next = headBlock;
  851. headBlock->prev = landingPad;
  852. Loop *parentLoop = this->parent;
  853. landingPad->loop = parentLoop;
  854. // We need to add this block to the block list of the parent loops
  855. while (parentLoop)
  856. {
  857. // Find the head block in the block list of the parent loop
  858. FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
  859. {
  860. if (block == headBlock)
  861. {
  862. // Add the landing pad to the block list
  863. iter.InsertBefore(landingPad);
  864. break;
  865. }
  866. } NEXT_BLOCK_IN_LOOP_EDITING;
  867. parentLoop = parentLoop->parent;
  868. }
  869. // Fix predecessor flow edges
  870. FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
  871. {
  872. // Make sure it isn't a back-edge
  873. if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
  874. {
  875. if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
  876. {
  877. IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
  878. branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
  879. }
  880. headBlock->UnlinkPred(edge->GetPred(), false);
  881. landingPad->AddPred(edge, fg);
  882. edge->SetSucc(landingPad);
  883. }
  884. } NEXT_PREDECESSOR_EDGE_EDITING;
  885. fg->AddEdge(landingPad, headBlock);
  886. if (headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() && headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion()->GetType() != RegionTypeRoot)
  887. {
  888. landingPadLabel->m_hasNonBranchRef = true;
  889. }
  890. }
  891. bool
  892. Loop::RemoveBreakBlocks(FlowGraph *fg)
  893. {
  894. bool breakBlockRelocated = false;
  895. if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
  896. {
  897. return false;
  898. }
  899. BasicBlock *loopTailBlock = nullptr;
  900. FOREACH_BLOCK_IN_LOOP(block, this)
  901. {
  902. loopTailBlock = block;
  903. }NEXT_BLOCK_IN_LOOP;
  904. AnalysisAssert(loopTailBlock);
  905. FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
  906. {
  907. while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
  908. {
  909. // Found at least one break block;
  910. breakBlockRelocated = true;
  911. #if DBG
  912. breakBlockEnd->isBreakBlock = true;
  913. #endif
  914. // Find the first block in this break block sequence.
  915. BasicBlock *breakBlockStart = breakBlockEnd;
  916. BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
  917. // Walk back the blocks until we find a block which belongs to that block.
  918. // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
  919. // Algorithm works on one loop at a time.
  920. while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
  921. {
  922. breakBlockStart = breakBlockStartPrev;
  923. breakBlockStartPrev = breakBlockStartPrev->GetPrev();
  924. }
  925. #if DBG
  926. breakBlockStart->isBreakBlock = true; // Mark the first block as well.
  927. #endif
  928. BasicBlock *exitLoopTail = loopTailBlock;
  929. // Move these break blocks to the tail of the loop.
  930. fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
  931. #if DBG_DUMP
  932. fg->Dump(true /*needs verbose flag*/, _u("\n After Each iteration of canonicalization \n"));
  933. #endif
  934. // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
  935. breakBlockEnd = loopTailBlock;
  936. blockPrev = breakBlockEnd->prev;
  937. }
  938. } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
  939. return breakBlockRelocated;
  940. }
  941. void
  942. FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
  943. {
  944. BasicBlock *srcPredBlock = blockStart->prev;
  945. BasicBlock *srcNextBlock = blockEnd->next;
  946. BasicBlock *dstPredBlock = insertBlock->prev;
  947. IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
  948. IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
  949. // Fix block linkage
  950. srcPredBlock->next = srcNextBlock;
  951. srcNextBlock->prev = srcPredBlock;
  952. dstPredBlock->next = blockStart;
  953. insertBlock->prev = blockEnd;
  954. blockStart->prev = dstPredBlock;
  955. blockEnd->next = insertBlock;
  956. // Fix instruction linkage
  957. IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
  958. // Fix instruction flow
  959. IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
  960. if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
  961. {
  962. // There was a fallthrough in the break blocks original position.
  963. IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
  964. IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
  965. // Save the target and invert the branch.
  966. IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
  967. srcPredBlock->InvertBranch(srcBranch);
  968. IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
  969. // Point the inverted branch to break block.
  970. srcBranch->SetTarget(srcLabel);
  971. if (srcBranchNextInstr != srcBranchTarget)
  972. {
  973. FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
  974. Assert(srcEdge);
  975. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
  976. Assert(compensationBlock);
  977. }
  978. }
  979. IR::Instr *dstLastInstr = dstPredBlockLastInstr;
  980. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  981. (this->func->IsSimpleJit() && this->func->hasBailout));
  982. if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
  983. {
  984. //There is a fallthrough in the block after which break block is inserted.
  985. FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
  986. Assert(dstEdge);
  987. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
  988. Assert(compensationBlock);
  989. }
  990. // We have to update region info for blocks whose predecessors changed
  991. if (assignRegionsBeforeGlobopt)
  992. {
  993. UpdateRegionForBlockFromEHPred(dstPredBlock, true);
  994. UpdateRegionForBlockFromEHPred(blockStart, true);
  995. UpdateRegionForBlockFromEHPred(srcNextBlock, true);
  996. }
  997. }
  998. FlowEdge *
  999. FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
  1000. {
  1001. FlowEdge *srcEdge = nullptr;
  1002. FOREACH_SUCCESSOR_EDGE(edge, predBlock)
  1003. {
  1004. if (edge->GetSucc() == succBlock)
  1005. {
  1006. srcEdge = edge;
  1007. break;
  1008. }
  1009. } NEXT_SUCCESSOR_EDGE;
  1010. return srcEdge;
  1011. }
  1012. void
  1013. BasicBlock::InvertBranch(IR::BranchInstr *branch)
  1014. {
  1015. Assert(this->GetLastInstr() == branch);
  1016. Assert(this->GetSuccList()->HasTwo());
  1017. branch->Invert();
  1018. this->GetSuccList()->Reverse();
  1019. }
  1020. bool
  1021. FlowGraph::CanonicalizeLoops()
  1022. {
  1023. if (this->func->HasProfileInfo())
  1024. {
  1025. this->implicitCallFlags = this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags();
  1026. for (Loop *loop = this->loopList; loop; loop = loop->next)
  1027. {
  1028. this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
  1029. }
  1030. }
  1031. #if DBG_DUMP
  1032. this->Dump(true, _u("\n Before canonicalizeLoops \n"));
  1033. #endif
  1034. bool breakBlockRelocated = false;
  1035. for (Loop *loop = this->loopList; loop; loop = loop->next)
  1036. {
  1037. loop->InsertLandingPad(this);
  1038. if (!this->func->HasTry() || this->func->DoOptimizeTry())
  1039. {
  1040. bool relocated = loop->RemoveBreakBlocks(this);
  1041. if (!breakBlockRelocated && relocated)
  1042. {
  1043. breakBlockRelocated = true;
  1044. }
  1045. }
  1046. }
  1047. #if DBG_DUMP
  1048. this->Dump(true, _u("\n After canonicalizeLoops \n"));
  1049. #endif
  1050. return breakBlockRelocated;
  1051. }
  1052. // Find the loops in this function, build the loop structure, and build a linked
  1053. // list of the basic blocks in this loop (including blocks of inner loops). The
  1054. // list preserves the reverse post-order of the blocks in the flowgraph block list.
  1055. void
  1056. FlowGraph::FindLoops()
  1057. {
  1058. if (!this->hasLoop)
  1059. {
  1060. return;
  1061. }
  1062. Func * func = this->func;
  1063. FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
  1064. {
  1065. if (block->loop != nullptr)
  1066. {
  1067. // Block already visited
  1068. continue;
  1069. }
  1070. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1071. {
  1072. if (succ->isLoopHeader && succ->loop == nullptr)
  1073. {
  1074. // Found a loop back-edge
  1075. BuildLoop(succ, block);
  1076. }
  1077. } NEXT_SUCCESSOR_BLOCK;
  1078. if (block->isLoopHeader && block->loop == nullptr)
  1079. {
  1080. // We would have built a loop for it if it was a loop...
  1081. block->isLoopHeader = false;
  1082. block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  1083. }
  1084. } NEXT_BLOCK_BACKWARD_IN_FUNC;
  1085. }
  1086. void
  1087. FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
  1088. {
  1089. // This function is recursive, so when jitting in the foreground, probe the stack
  1090. if(!func->IsBackgroundJIT())
  1091. {
  1092. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  1093. }
  1094. if (tailBlock->number < headBlock->number)
  1095. {
  1096. // Not a loop. We didn't see any back-edge.
  1097. headBlock->isLoopHeader = false;
  1098. headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  1099. return;
  1100. }
  1101. Assert(headBlock->isLoopHeader);
  1102. Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
  1103. loop->next = this->loopList;
  1104. this->loopList = loop;
  1105. headBlock->loop = loop;
  1106. loop->headBlock = headBlock;
  1107. loop->int32SymsOnEntry = nullptr;
  1108. loop->lossyInt32SymsOnEntry = nullptr;
  1109. // If parentLoop is a parent of loop, it's headBlock better appear first.
  1110. if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
  1111. {
  1112. loop->parent = parentLoop;
  1113. parentLoop->isLeaf = false;
  1114. }
  1115. loop->hasDeadStoreCollectionPass = false;
  1116. loop->hasDeadStorePrepass = false;
  1117. loop->memOpInfo = nullptr;
  1118. loop->doMemOp = true;
  1119. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LoopBuilder"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  1120. WalkLoopBlocks(tailBlock, loop, &tempAlloc);
  1121. Assert(loop->GetHeadBlock() == headBlock);
  1122. IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
  1123. firstInstr->SetLoop(loop);
  1124. if (firstInstr->IsProfiledLabelInstr())
  1125. {
  1126. loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
  1127. if (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsLoopImplicitCallInfoDisabled())
  1128. {
  1129. loop->SetImplicitCallFlags(this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags());
  1130. }
  1131. loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
  1132. }
  1133. else
  1134. {
  1135. // Didn't collect profile information, don't do optimizations
  1136. loop->SetImplicitCallFlags(Js::ImplicitCall_All);
  1137. }
  1138. }
  1139. Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
  1140. {
  1141. Assert(this->IsMemCopy());
  1142. return (Loop::MemCopyCandidate*)this;
  1143. }
  1144. Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
  1145. {
  1146. Assert(this->IsMemSet());
  1147. return (Loop::MemSetCandidate*)this;
  1148. }
  1149. void
  1150. Loop::EnsureMemOpVariablesInitialized()
  1151. {
  1152. Assert(this->doMemOp);
  1153. if (this->memOpInfo == nullptr)
  1154. {
  1155. JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
  1156. this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
  1157. this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
  1158. this->memOpInfo->startIndexOpndCache[0] = nullptr;
  1159. this->memOpInfo->startIndexOpndCache[1] = nullptr;
  1160. this->memOpInfo->startIndexOpndCache[2] = nullptr;
  1161. this->memOpInfo->startIndexOpndCache[3] = nullptr;
  1162. this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
  1163. this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
  1164. this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
  1165. }
  1166. }
  1167. // Walk the basic blocks backwards until we find the loop header.
  1168. // Mark basic blocks in the loop by looking at the predecessors
  1169. // of blocks known to be in the loop.
  1170. // Recurse on inner loops.
  1171. void
  1172. FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
  1173. {
  1174. AnalysisAssert(loop);
  1175. BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
  1176. BasicBlock *tailBlock = block;
  1177. BasicBlock *lastBlock;
  1178. loopBlocksBv->Set(block->GetBlockNum());
  1179. this->AddBlockToLoop(block, loop);
  1180. if (block == loop->headBlock)
  1181. {
  1182. // Single block loop, we're done
  1183. return;
  1184. }
  1185. do
  1186. {
  1187. BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
  1188. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1189. {
  1190. if (succ->isLoopHeader)
  1191. {
  1192. // Found a loop back-edge
  1193. if (loop->headBlock == succ)
  1194. {
  1195. isInLoop = true;
  1196. }
  1197. else if (succ->loop == nullptr || succ->loop->headBlock != succ)
  1198. {
  1199. // Recurse on inner loop
  1200. BuildLoop(succ, block, isInLoop ? loop : nullptr);
  1201. }
  1202. }
  1203. } NEXT_SUCCESSOR_BLOCK;
  1204. if (isInLoop)
  1205. {
  1206. // This block is in the loop. All of it's predecessors should be contained in the loop as well.
  1207. FOREACH_PREDECESSOR_BLOCK(pred, block)
  1208. {
  1209. // Fix up loop parent if it isn't set already.
  1210. // If pred->loop != loop, we're looking at an inner loop, which was already visited.
  1211. // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
  1212. // loop, so this must be an immediate child.
  1213. if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
  1214. && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
  1215. {
  1216. pred->loop->parent = loop;
  1217. loop->isLeaf = false;
  1218. if (pred->loop->hasCall)
  1219. {
  1220. loop->SetHasCall();
  1221. }
  1222. loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
  1223. }
  1224. // Add pred to loop bit vector
  1225. loopBlocksBv->Set(pred->GetBlockNum());
  1226. } NEXT_PREDECESSOR_BLOCK;
  1227. if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
  1228. {
  1229. block->loop = loop;
  1230. }
  1231. if (block != tailBlock)
  1232. {
  1233. this->AddBlockToLoop(block, loop);
  1234. }
  1235. }
  1236. lastBlock = block;
  1237. block = block->GetPrev();
  1238. } while (lastBlock != loop->headBlock);
  1239. }
  1240. // Add block to this loop, and it's parent loops.
  1241. void
  1242. FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
  1243. {
  1244. loop->blockList.Prepend(block);
  1245. if (block->hasCall)
  1246. {
  1247. loop->SetHasCall();
  1248. }
  1249. }
  1250. ///----------------------------------------------------------------------------
  1251. ///
  1252. /// FlowGraph::AddBlock
  1253. ///
  1254. /// Finish processing of a new block: hook up successor arcs, note loops, etc.
  1255. ///
  1256. ///----------------------------------------------------------------------------
  1257. BasicBlock *
  1258. FlowGraph::AddBlock(
  1259. IR::Instr * firstInstr,
  1260. IR::Instr * lastInstr,
  1261. BasicBlock * nextBlock,
  1262. BasicBlock *prevBlock)
  1263. {
  1264. BasicBlock * block;
  1265. IR::LabelInstr * labelInstr;
  1266. if (firstInstr->IsLabelInstr())
  1267. {
  1268. labelInstr = firstInstr->AsLabelInstr();
  1269. }
  1270. else
  1271. {
  1272. labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
  1273. labelInstr->SetByteCodeOffset(firstInstr);
  1274. if (firstInstr->IsEntryInstr())
  1275. {
  1276. firstInstr->InsertAfter(labelInstr);
  1277. }
  1278. else
  1279. {
  1280. firstInstr->InsertBefore(labelInstr);
  1281. }
  1282. firstInstr = labelInstr;
  1283. }
  1284. block = labelInstr->GetBasicBlock();
  1285. if (block == nullptr)
  1286. {
  1287. block = BasicBlock::New(this);
  1288. labelInstr->SetBasicBlock(block);
  1289. // Remember last block in function to target successor of RETs.
  1290. if (!this->tailBlock)
  1291. {
  1292. this->tailBlock = block;
  1293. }
  1294. }
  1295. // Hook up the successor edges
  1296. if (lastInstr->EndsBasicBlock())
  1297. {
  1298. BasicBlock * blockTarget = nullptr;
  1299. if (lastInstr->IsBranchInstr())
  1300. {
  1301. // Hook up a successor edge to the branch target.
  1302. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1303. if(branchInstr->IsMultiBranch())
  1304. {
  1305. BasicBlock * blockMultiBrTarget;
  1306. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  1307. multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
  1308. {
  1309. blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
  1310. this->AddEdge(block, blockMultiBrTarget);
  1311. });
  1312. }
  1313. else
  1314. {
  1315. IR::LabelInstr * targetLabelInstr = branchInstr->GetTarget();
  1316. blockTarget = SetBlockTargetAndLoopFlag(targetLabelInstr);
  1317. if (branchInstr->IsConditional())
  1318. {
  1319. IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
  1320. if (instrNext->IsLabelInstr())
  1321. {
  1322. SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
  1323. }
  1324. }
  1325. }
  1326. }
  1327. else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
  1328. {
  1329. blockTarget = this->tailBlock;
  1330. }
  1331. if (blockTarget)
  1332. {
  1333. this->AddEdge(block, blockTarget);
  1334. }
  1335. }
  1336. if (lastInstr->HasFallThrough())
  1337. {
  1338. // Add a branch to next instruction so that we don't have to update the flow graph
  1339. // when the glob opt tries to insert instructions.
  1340. // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
  1341. if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
  1342. {
  1343. IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
  1344. lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
  1345. instr->SetByteCodeOffset(lastInstr->m_next);
  1346. lastInstr->InsertAfter(instr);
  1347. lastInstr = instr;
  1348. }
  1349. this->AddEdge(block, nextBlock);
  1350. }
  1351. block->SetBlockNum(this->blockCount++);
  1352. block->SetFirstInstr(firstInstr);
  1353. block->SetLastInstr(lastInstr);
  1354. if (!prevBlock)
  1355. {
  1356. if (this->blockList)
  1357. {
  1358. this->blockList->prev = block;
  1359. }
  1360. block->next = this->blockList;
  1361. this->blockList = block;
  1362. }
  1363. else
  1364. {
  1365. prevBlock->next = block;
  1366. block->prev = prevBlock;
  1367. block->next = nextBlock;
  1368. nextBlock->prev = block;
  1369. }
  1370. return block;
  1371. }
  1372. BasicBlock *
  1373. FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
  1374. {
  1375. BasicBlock * blockTarget = nullptr;
  1376. blockTarget = labelInstr->GetBasicBlock();
  1377. if (blockTarget == nullptr)
  1378. {
  1379. blockTarget = BasicBlock::New(this);
  1380. labelInstr->SetBasicBlock(blockTarget);
  1381. }
  1382. if (labelInstr->m_isLoopTop)
  1383. {
  1384. blockTarget->isLoopHeader = true;
  1385. this->hasLoop = true;
  1386. }
  1387. return blockTarget;
  1388. }
  1389. ///----------------------------------------------------------------------------
  1390. ///
  1391. /// FlowGraph::AddEdge
  1392. ///
  1393. /// Add an edge connecting the two given blocks.
  1394. ///
  1395. ///----------------------------------------------------------------------------
  1396. FlowEdge *
  1397. FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
  1398. {
  1399. FlowEdge * edge = FlowEdge::New(this);
  1400. edge->SetPred(blockPred);
  1401. edge->SetSucc(blockSucc);
  1402. blockPred->AddSucc(edge, this);
  1403. blockSucc->AddPred(edge, this);
  1404. return edge;
  1405. }
  1406. ///----------------------------------------------------------------------------
  1407. ///
  1408. /// FlowGraph::Destroy
  1409. ///
  1410. /// Remove all references to FG structures from the IR in preparation for freeing
  1411. /// the FG.
  1412. ///
  1413. ///----------------------------------------------------------------------------
  1414. void
  1415. FlowGraph::Destroy(void)
  1416. {
  1417. BOOL fHasTry = this->func->HasTry();
  1418. if (fHasTry)
  1419. {
  1420. // Do unreachable code removal up front to avoid problems
  1421. // with unreachable back edges, etc.
  1422. this->RemoveUnreachableBlocks();
  1423. }
  1424. FOREACH_BLOCK_ALL(block, this)
  1425. {
  1426. IR::Instr * firstInstr = block->GetFirstInstr();
  1427. if (block->isDeleted && !block->isDead)
  1428. {
  1429. if (firstInstr->IsLabelInstr())
  1430. {
  1431. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1432. labelInstr->UnlinkBasicBlock();
  1433. // Removing the label for non try blocks as we have a deleted block which has the label instruction
  1434. // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
  1435. // i.e. helper introduced by polymorphic inlining bailout.
  1436. // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
  1437. if (!fHasTry)
  1438. {
  1439. if (this->func->GetJITFunctionBody()->IsCoroutine())
  1440. {
  1441. // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
  1442. this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
  1443. {
  1444. if (labelInstr == yorl.Second())
  1445. {
  1446. labelInstr->m_hasNonBranchRef = false;
  1447. this->func->RemoveYieldOffsetResumeLabel(yorl);
  1448. return true;
  1449. }
  1450. return false;
  1451. });
  1452. }
  1453. Assert(labelInstr->IsUnreferenced());
  1454. labelInstr->Remove();
  1455. }
  1456. }
  1457. continue;
  1458. }
  1459. if (block->isLoopHeader && !block->isDead)
  1460. {
  1461. // Mark the tail block of this loop (the last back-edge). The register allocator
  1462. // uses this to lexically find loops.
  1463. BasicBlock *loopTail = nullptr;
  1464. AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
  1465. "Label not marked as loop top...");
  1466. FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
  1467. {
  1468. FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
  1469. {
  1470. if (succ == block)
  1471. {
  1472. loopTail = loopBlock;
  1473. break;
  1474. }
  1475. } NEXT_SUCCESSOR_BLOCK;
  1476. } NEXT_BLOCK_IN_LOOP;
  1477. if (loopTail)
  1478. {
  1479. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  1480. block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
  1481. }
  1482. else
  1483. {
  1484. // This loop doesn't have a back-edge: that is, it is not a loop
  1485. // anymore...
  1486. firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
  1487. }
  1488. }
  1489. if (fHasTry)
  1490. {
  1491. this->UpdateRegionForBlock(block);
  1492. }
  1493. if (firstInstr->IsLabelInstr())
  1494. {
  1495. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1496. labelInstr->UnlinkBasicBlock();
  1497. if (labelInstr->IsUnreferenced() && !fHasTry)
  1498. {
  1499. // This is an unreferenced label, probably added by FG building.
  1500. // Delete it now to make extended basic blocks visible.
  1501. if (firstInstr == block->GetLastInstr())
  1502. {
  1503. labelInstr->Remove();
  1504. continue;
  1505. }
  1506. else
  1507. {
  1508. labelInstr->Remove();
  1509. }
  1510. }
  1511. }
  1512. // We don't run the globopt with try/catch, don't need to remove branch to next for fall through blocks
  1513. IR::Instr * lastInstr = block->GetLastInstr();
  1514. if (!fHasTry && lastInstr->IsBranchInstr())
  1515. {
  1516. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1517. if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
  1518. {
  1519. // Remove branch to next
  1520. branchInstr->Remove();
  1521. }
  1522. }
  1523. }
  1524. NEXT_BLOCK;
  1525. #if DBG
  1526. if (fHasTry)
  1527. {
  1528. // Now that all blocks have regions, we should see consistently propagated regions at all
  1529. // block boundaries.
  1530. FOREACH_BLOCK(block, this)
  1531. {
  1532. Region * region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1533. Region * predRegion = nullptr;
  1534. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1535. {
  1536. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1537. if (predBlock->GetLastInstr() == nullptr)
  1538. {
  1539. AssertMsg(region == predRegion, "Bad region propagation through empty block");
  1540. }
  1541. else
  1542. {
  1543. switch (predBlock->GetLastInstr()->m_opcode)
  1544. {
  1545. case Js::OpCode::TryCatch:
  1546. case Js::OpCode::TryFinally:
  1547. AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
  1548. if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
  1549. {
  1550. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
  1551. {
  1552. AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
  1553. }
  1554. else
  1555. {
  1556. AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
  1557. }
  1558. }
  1559. else
  1560. {
  1561. AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
  1562. }
  1563. break;
  1564. case Js::OpCode::Leave:
  1565. AssertMsg(region == predRegion->GetParent() || (predRegion->GetType() == RegionTypeTry && predRegion->GetMatchingFinallyRegion(false) == region) ||
  1566. (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1567. break;
  1568. case Js::OpCode::LeaveNull:
  1569. AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1570. break;
  1571. // If the try region has a branch out of the loop,
  1572. // - the branch is moved out of the loop as part of break block removal, and
  1573. // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
  1574. // Otherwise,
  1575. // - FullJit: BrOnException is removed in the forward pass.
  1576. case Js::OpCode::BrOnException:
  1577. Assert(!this->func->DoGlobOpt());
  1578. break;
  1579. case Js::OpCode::BrOnNoException:
  1580. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally ||
  1581. // A BrOnException from finally to early exit can be converted to BrOnNoException and Br
  1582. // The Br block maybe a common successor block for early exit along with the BrOnNoException block
  1583. // Region from Br block will be picked up from a predecessor which is not BrOnNoException due to early exit
  1584. // See test0() in test/EH/tryfinallytests.js
  1585. (predRegion->GetType() == RegionTypeFinally && predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit));
  1586. break;
  1587. case Js::OpCode::Br:
  1588. if (predBlock->GetLastInstr()->AsBranchInstr()->m_leaveConvToBr)
  1589. {
  1590. // Leave converted to Br in finally region
  1591. AssertMsg(region == predRegion->GetParent(), "Bad region prop in finally");
  1592. }
  1593. else if (region->GetType() == RegionTypeCatch && region != predRegion)
  1594. {
  1595. AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1596. }
  1597. else if (region->GetType() == RegionTypeFinally && region != predRegion)
  1598. {
  1599. // We may be left with edges from finally region to early exit
  1600. AssertMsg(predRegion->IsNonExceptingFinally() || predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1601. }
  1602. else
  1603. {
  1604. // We may be left with edges from finally region to early exit
  1605. AssertMsg(predRegion->IsNonExceptingFinally() || region == predRegion, "Bad region propagation through interior block");
  1606. }
  1607. break;
  1608. default:
  1609. break;
  1610. }
  1611. }
  1612. }
  1613. NEXT_PREDECESSOR_BLOCK;
  1614. switch (region->GetType())
  1615. {
  1616. case RegionTypeRoot:
  1617. Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false));
  1618. break;
  1619. case RegionTypeTry:
  1620. if ((this->func->IsSimpleJit() && this->func->hasBailout) || !this->func->DoOptimizeTry())
  1621. {
  1622. Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false)));
  1623. }
  1624. else
  1625. {
  1626. Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && region->GetMatchingFinallyRegion(false)));
  1627. }
  1628. break;
  1629. case RegionTypeCatch:
  1630. case RegionTypeFinally:
  1631. Assert(region->GetMatchingTryRegion());
  1632. break;
  1633. }
  1634. }
  1635. NEXT_BLOCK;
  1636. FOREACH_BLOCK_DEAD_OR_ALIVE(block, this)
  1637. {
  1638. if (block->GetFirstInstr()->IsLabelInstr())
  1639. {
  1640. IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
  1641. if (labelInstr->IsUnreferenced())
  1642. {
  1643. // This is an unreferenced label, probably added by FG building.
  1644. // Delete it now to make extended basic blocks visible.
  1645. labelInstr->Remove();
  1646. }
  1647. }
  1648. } NEXT_BLOCK_DEAD_OR_ALIVE;
  1649. }
  1650. #endif
  1651. this->func->isFlowGraphValid = false;
  1652. }
  1653. bool FlowGraph::IsEHTransitionInstr(IR::Instr *instr)
  1654. {
  1655. Js::OpCode op = instr->m_opcode;
  1656. return (op == Js::OpCode::TryCatch || op == Js::OpCode::TryFinally || op == Js::OpCode::Leave || op == Js::OpCode::LeaveNull);
  1657. }
  1658. BasicBlock * FlowGraph::GetPredecessorForRegionPropagation(BasicBlock *block)
  1659. {
  1660. BasicBlock *ehPred = nullptr;
  1661. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1662. {
  1663. Region * predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1664. if (IsEHTransitionInstr(predBlock->GetLastInstr()) && predRegion)
  1665. {
  1666. // MGTODO : change this to return, once you know there can exist only one eh transitioning pred
  1667. Assert(ehPred == nullptr);
  1668. ehPred = predBlock;
  1669. }
  1670. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1671. }
  1672. NEXT_PREDECESSOR_BLOCK;
  1673. return ehPred;
  1674. }
  1675. // Propagate the region forward from the block's predecessor(s), tracking the effect
  1676. // of the flow transition. Record the region in the block-to-region map provided
  1677. // and on the label at the entry to the block (if any).
  1678. // We need to know the end of finally for inserting edge at the end of finally to early exit
  1679. // Store it in regToFinallyEndMap as we visit blocks instead of recomputing later while adding early exit edges
  1680. void
  1681. FlowGraph::UpdateRegionForBlock(BasicBlock * block)
  1682. {
  1683. Region *region;
  1684. Region * predRegion = nullptr;
  1685. IR::Instr * tryInstr = nullptr;
  1686. IR::Instr * firstInstr = block->GetFirstInstr();
  1687. if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1688. {
  1689. Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1690. return;
  1691. }
  1692. if (block == this->blockList)
  1693. {
  1694. // Head of the graph: create the root region.
  1695. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1696. }
  1697. else if (this->leaveNullLabelToFinallyLabelMap && block->GetLastInstr()->m_opcode == Js::OpCode::LeaveNull)
  1698. {
  1699. // We hold on to the LeaveNull block by adding BrOnException from loop tops
  1700. // So that LeaveNull doesn't get removed due to unreachable block elimination
  1701. // We need the finally end block to insert edge from finally to early exit
  1702. // If a LeaveNull block had only BrOnException edges from predecessor
  1703. // We will end up in inaccurate region propagation if we propagate from the predecessor edges
  1704. // So pick up the finally region from the map
  1705. IR::LabelInstr * finallyLabel = this->leaveNullLabelToFinallyLabelMap->Item(block->GetFirstInstr()->AsLabelInstr());
  1706. Assert(finallyLabel);
  1707. region = finallyLabel->GetRegion();
  1708. if (this->regToFinallyEndMap)
  1709. {
  1710. regToFinallyEndMap->Item(region, block);
  1711. }
  1712. }
  1713. else
  1714. {
  1715. // Propagate the region forward by finding a predecessor we've already processed.
  1716. region = nullptr;
  1717. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1718. {
  1719. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1720. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1721. if (predRegion != nullptr)
  1722. {
  1723. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1724. break;
  1725. }
  1726. }
  1727. NEXT_PREDECESSOR_BLOCK;
  1728. }
  1729. Assert(region || block->GetPredList()->Count() == 0);
  1730. if (region && !region->ehBailoutData)
  1731. {
  1732. region->AllocateEHBailoutData(this->func, tryInstr);
  1733. }
  1734. Assert(firstInstr->IsLabelInstr());
  1735. if (firstInstr->IsLabelInstr())
  1736. {
  1737. // Record the region on the label and make sure it stays around as a region
  1738. // marker if we're entering a region at this point.
  1739. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1740. labelInstr->SetRegion(region);
  1741. if (region != predRegion)
  1742. {
  1743. labelInstr->m_hasNonBranchRef = true;
  1744. }
  1745. // One of the pred blocks maybe an eh region, in that case it is important to mark this label's m_hasNonBranchRef
  1746. // If not later in codegen, this label can get deleted. And during SccLiveness, region is propagated to newly created labels in lowerer from the previous label's region
  1747. // We can end up assigning an eh region to a label in a non eh region. And if there is a bailout in such a region, bad things will happen in the interpreter :)
  1748. // See test2() in tryfinallytests.js
  1749. if (region && region->GetType() == RegionTypeRoot && !labelInstr->m_hasNonBranchRef)
  1750. {
  1751. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1752. {
  1753. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1754. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1755. if (predRegion != region)
  1756. {
  1757. labelInstr->m_hasNonBranchRef = true;
  1758. break;
  1759. }
  1760. }
  1761. NEXT_PREDECESSOR_BLOCK;
  1762. }
  1763. }
  1764. }
  1765. void
  1766. FlowGraph::UpdateRegionForBlockFromEHPred(BasicBlock * block, bool reassign)
  1767. {
  1768. Region *region = nullptr;
  1769. Region * predRegion = nullptr;
  1770. IR::Instr * tryInstr = nullptr;
  1771. IR::Instr * firstInstr = block->GetFirstInstr();
  1772. if (!reassign && firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1773. {
  1774. Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1775. return;
  1776. }
  1777. if (block->isDead || block->isDeleted)
  1778. {
  1779. // We can end up calling this function with such blocks, return doing nothing
  1780. // See test5() in tryfinallytests.js
  1781. return;
  1782. }
  1783. if (block == this->blockList)
  1784. {
  1785. // Head of the graph: create the root region.
  1786. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1787. }
  1788. else if (this->leaveNullLabelToFinallyLabelMap && block->GetLastInstr()->m_opcode == Js::OpCode::LeaveNull)
  1789. {
  1790. // We hold on to the LeaveNull block by adding BrOnException from loop tops
  1791. // So that LeaveNull doesn't get removed due to unreachable block elimination
  1792. // We need the finally end block to insert edge from finally to early exit
  1793. // If a LeaveNull block had only BrOnException edges from predecessor
  1794. // We will end up in inaccurate region propagation if we propagate from the predecessor edges
  1795. // So pick up the finally region from the map
  1796. Assert(this->leaveNullLabelToFinallyLabelMap->ContainsKey(block->GetFirstInstr()->AsLabelInstr()));
  1797. IR::LabelInstr * finallyLabel = this->leaveNullLabelToFinallyLabelMap->Item(block->GetFirstInstr()->AsLabelInstr());
  1798. Assert(finallyLabel);
  1799. region = finallyLabel->GetRegion();
  1800. if (this->regToFinallyEndMap)
  1801. {
  1802. regToFinallyEndMap->Item(region, block);
  1803. }
  1804. }
  1805. else if (block->GetPredList()->Count() == 1)
  1806. {
  1807. BasicBlock *predBlock = block->GetPredList()->Head()->GetPred();
  1808. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1809. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1810. Assert(predRegion);
  1811. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1812. }
  1813. else
  1814. {
  1815. // Propagate the region forward by finding a predecessor we've already processed.
  1816. // Since we do break block remval after region propagation, we cannot pick the first predecessor which has an assigned region
  1817. // If there is a eh transitioning pred, we pick that
  1818. // There cannot be more than one eh transitioning pred (?)
  1819. BasicBlock *ehPred = this->GetPredecessorForRegionPropagation(block);
  1820. if (ehPred)
  1821. {
  1822. predRegion = ehPred->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1823. Assert(predRegion != nullptr);
  1824. region = this->PropagateRegionFromPred(block, ehPred, predRegion, tryInstr);
  1825. }
  1826. else
  1827. {
  1828. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1829. {
  1830. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1831. if (predRegion != nullptr)
  1832. {
  1833. if ((predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnException || predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnNoException) &&
  1834. predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit)
  1835. {
  1836. Assert(predRegion->IsNonExceptingFinally());
  1837. // BrOnException from finally region to early exit
  1838. // Skip this edge
  1839. continue;
  1840. }
  1841. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::Br &&
  1842. predBlock->GetLastInstr()->GetPrevRealInstr()->m_opcode == Js::OpCode::BrOnNoException)
  1843. {
  1844. Assert(predBlock->GetLastInstr()->GetPrevRealInstr()->AsBranchInstr()->m_brFinallyToEarlyExit);
  1845. Assert(predRegion->IsNonExceptingFinally());
  1846. // BrOnException from finally region to early exit changed to BrOnNoException and Br during break block removal
  1847. continue;
  1848. }
  1849. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1850. break;
  1851. }
  1852. }
  1853. NEXT_PREDECESSOR_BLOCK;
  1854. }
  1855. }
  1856. Assert(region || block->GetPredList()->Count() == 0 || block->firstInstr->AsLabelInstr()->GetRegion());
  1857. if (region)
  1858. {
  1859. if (!region->ehBailoutData)
  1860. {
  1861. region->AllocateEHBailoutData(this->func, tryInstr);
  1862. }
  1863. Assert(firstInstr->IsLabelInstr());
  1864. if (firstInstr->IsLabelInstr())
  1865. {
  1866. // Record the region on the label and make sure it stays around as a region
  1867. // marker if we're entering a region at this point.
  1868. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1869. labelInstr->SetRegion(region);
  1870. if (region != predRegion)
  1871. {
  1872. labelInstr->m_hasNonBranchRef = true;
  1873. }
  1874. }
  1875. }
  1876. }
  1877. Region *
  1878. FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
  1879. {
  1880. // Propagate predRegion to region, looking at the flow transition for an opcode
  1881. // that affects the region.
  1882. Region * region = nullptr;
  1883. IR::Instr * predLastInstr = predBlock->GetLastInstr();
  1884. IR::Instr * firstInstr = block->GetFirstInstr();
  1885. if (predLastInstr == nullptr)
  1886. {
  1887. // Empty block: trivially propagate the region.
  1888. region = predRegion;
  1889. }
  1890. else
  1891. {
  1892. Region * tryRegion = nullptr;
  1893. IR::LabelInstr * tryInstrNext = nullptr;
  1894. switch (predLastInstr->m_opcode)
  1895. {
  1896. case Js::OpCode::TryCatch:
  1897. // Entry to a try-catch. See whether we're entering the try or the catch
  1898. // by looking for the handler label.
  1899. Assert(predLastInstr->m_next->IsLabelInstr());
  1900. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1901. tryRegion = tryInstrNext->GetRegion();
  1902. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1903. {
  1904. region = Region::New(RegionTypeCatch, predRegion, this->func);
  1905. Assert(tryRegion);
  1906. region->SetMatchingTryRegion(tryRegion);
  1907. tryRegion->SetMatchingCatchRegion(region);
  1908. }
  1909. else
  1910. {
  1911. region = Region::New(RegionTypeTry, predRegion, this->func);
  1912. tryInstr = predLastInstr;
  1913. }
  1914. break;
  1915. case Js::OpCode::TryFinally:
  1916. // Entry to a try-finally. See whether we're entering the try or the finally
  1917. // by looking for the handler label.
  1918. Assert(predLastInstr->m_next->IsLabelInstr());
  1919. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1920. tryRegion = tryInstrNext->GetRegion();
  1921. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1922. {
  1923. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1924. region = Region::New(RegionTypeFinally, predRegion, this->func);
  1925. region->SetMatchingTryRegion(tryRegion);
  1926. tryRegion->SetMatchingFinallyRegion(region, true);
  1927. tryInstr = predLastInstr;
  1928. }
  1929. else
  1930. {
  1931. region = Region::New(RegionTypeTry, predRegion, this->func);
  1932. tryInstr = predLastInstr;
  1933. }
  1934. break;
  1935. case Js::OpCode::Leave:
  1936. // When there is an unconditional Leave within a finally due to early exit,
  1937. // we may not be able to find LeaveNull, due to unreachable block elimination
  1938. // Update the endOfFinally here to handle such cases
  1939. if (this->regToFinallyEndMap && predRegion->IsNonExceptingFinally())
  1940. {
  1941. BasicBlock *endOfFinally = regToFinallyEndMap->ContainsKey(predRegion) ? regToFinallyEndMap->Item(predRegion) : nullptr;
  1942. if (!endOfFinally)
  1943. {
  1944. regToFinallyEndMap->Add(predRegion, predBlock);
  1945. }
  1946. else if (endOfFinally && endOfFinally->GetLastInstr()->m_opcode != Js::OpCode::LeaveNull)
  1947. {
  1948. regToFinallyEndMap->Item(predRegion, predBlock);
  1949. }
  1950. }
  1951. if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
  1952. {
  1953. tryRegion = predRegion;
  1954. Assert(tryRegion->GetMatchingFinallyRegion(true) != nullptr);
  1955. region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
  1956. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1957. region->SetMatchingTryRegion(tryRegion);
  1958. tryRegion->SetMatchingFinallyRegion(region, false);
  1959. break;
  1960. }
  1961. // Exiting a try or handler. Retrieve the current region's parent.
  1962. region = predRegion->GetParent();
  1963. if (region == nullptr)
  1964. {
  1965. // We found a Leave in the root region- this can only happen when a jitted loop body
  1966. // in a try block has a return statement.
  1967. Assert(this->func->IsLoopBodyInTry());
  1968. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1969. region = predRegion;
  1970. }
  1971. break;
  1972. case Js::OpCode::LeaveNull:
  1973. // Exiting a try or handler. Retrieve the current region's parent.
  1974. region = predRegion->GetParent();
  1975. if (region == nullptr)
  1976. {
  1977. // We found a Leave in the root region- this can only happen when a jitted loop body
  1978. // in a try block has a return statement.
  1979. Assert(this->func->IsLoopBodyInTry());
  1980. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1981. region = predRegion;
  1982. }
  1983. break;
  1984. case Js::OpCode::BailOnException:
  1985. // Infinite loop, no edge to non excepting finally
  1986. if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
  1987. {
  1988. tryRegion = predRegion->GetMatchingTryRegion();
  1989. region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
  1990. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1991. region->SetMatchingTryRegion(tryRegion);
  1992. tryRegion->SetMatchingFinallyRegion(region, false);
  1993. }
  1994. break;
  1995. case Js::OpCode::BrOnException:
  1996. // Infinite loop inside another EH region within finally,
  1997. // We have added edges for all infinite loops inside a finally, identify that and transition to parent
  1998. if (predRegion->GetType() != RegionTypeFinally && firstInstr->GetNextRealInstr()->m_opcode == Js::OpCode::LeaveNull)
  1999. {
  2000. region = predRegion->GetParent();
  2001. }
  2002. else
  2003. {
  2004. region = predRegion;
  2005. }
  2006. break;
  2007. default:
  2008. // Normal (non-EH) transition: just propagate the region.
  2009. region = predRegion;
  2010. break;
  2011. }
  2012. }
  2013. return region;
  2014. }
  2015. void
  2016. FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
  2017. {
  2018. if (loop)
  2019. {
  2020. bool found = false;
  2021. FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
  2022. {
  2023. if (loopBlock == targetBlock)
  2024. {
  2025. found = true;
  2026. break;
  2027. }
  2028. } NEXT_BLOCK_IN_LOOP_EDITING;
  2029. if (found)
  2030. {
  2031. if (postTarget)
  2032. {
  2033. iter.Next();
  2034. }
  2035. iter.InsertBefore(compBlock);
  2036. }
  2037. InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
  2038. }
  2039. }
  2040. // Insert a block on the given edge
  2041. BasicBlock *
  2042. FlowGraph::InsertAirlockBlock(FlowEdge * edge)
  2043. {
  2044. BasicBlock * airlockBlock = BasicBlock::New(this);
  2045. BasicBlock * sourceBlock = edge->GetPred();
  2046. BasicBlock * sinkBlock = edge->GetSucc();
  2047. BasicBlock * sinkPrevBlock = sinkBlock->prev;
  2048. IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
  2049. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  2050. airlockBlock->loop = sinkBlock->loop;
  2051. airlockBlock->SetBlockNum(this->blockCount++);
  2052. #ifdef DBG
  2053. airlockBlock->isAirLockBlock = true;
  2054. #endif
  2055. //
  2056. // Fixup block linkage
  2057. //
  2058. // airlock block is inserted right before sourceBlock
  2059. airlockBlock->prev = sinkBlock->prev;
  2060. sinkBlock->prev = airlockBlock;
  2061. airlockBlock->next = sinkBlock;
  2062. airlockBlock->prev->next = airlockBlock;
  2063. //
  2064. // Fixup flow edges
  2065. //
  2066. sourceBlock->RemoveSucc(sinkBlock, this, false);
  2067. // Add sourceBlock -> airlockBlock
  2068. this->AddEdge(sourceBlock, airlockBlock);
  2069. // Add airlockBlock -> sinkBlock
  2070. edge->SetPred(airlockBlock);
  2071. airlockBlock->AddSucc(edge, this);
  2072. // Fixup data use count
  2073. airlockBlock->SetDataUseCount(1);
  2074. sourceBlock->DecrementDataUseCount();
  2075. //
  2076. // Fixup IR
  2077. //
  2078. // Maintain the instruction region for inlining
  2079. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  2080. Func * sinkLabelFunc = sinkLabel->m_func;
  2081. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  2082. sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
  2083. airlockBlock->SetFirstInstr(airlockLabel);
  2084. airlockLabel->SetBasicBlock(airlockBlock);
  2085. // Add br to sinkBlock from airlock block
  2086. IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  2087. airlockBr->SetByteCodeOffset(sinkLabel);
  2088. airlockLabel->InsertAfter(airlockBr);
  2089. airlockBlock->SetLastInstr(airlockBr);
  2090. airlockLabel->SetByteCodeOffset(sinkLabel);
  2091. // Fixup flow out of sourceBlock
  2092. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  2093. if (sourceBr->IsMultiBranch())
  2094. {
  2095. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
  2096. Assert(replaced);
  2097. }
  2098. else if (sourceBr->GetTarget() == sinkLabel)
  2099. {
  2100. sourceBr->SetTarget(airlockLabel);
  2101. }
  2102. if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
  2103. {
  2104. if (!sinkPrevBlock->isDeleted)
  2105. {
  2106. FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
  2107. if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
  2108. {
  2109. BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
  2110. compensationBlock->IncrementDataUseCount();
  2111. // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
  2112. compensationBlock->isAirLockCompensationBlock = true;
  2113. }
  2114. }
  2115. }
  2116. #if DBG_DUMP
  2117. this->Dump(true, _u("\n After insertion of airlock block \n"));
  2118. #endif
  2119. return airlockBlock;
  2120. }
  2121. // Insert a block on the given edge
  2122. BasicBlock *
  2123. FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
  2124. {
  2125. BasicBlock * compBlock = BasicBlock::New(this);
  2126. BasicBlock * sourceBlock = edge->GetPred();
  2127. BasicBlock * sinkBlock = edge->GetSucc();
  2128. BasicBlock * fallthroughBlock = sourceBlock->next;
  2129. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  2130. compBlock->SetBlockNum(this->blockCount++);
  2131. if (insertToLoopList)
  2132. {
  2133. // For flow graph edits in
  2134. if (sinkBlockLoop)
  2135. {
  2136. if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
  2137. {
  2138. // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
  2139. // Insert it to all the parent loop lists.
  2140. compBlock->loop = sinkBlock->loop->parent;
  2141. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
  2142. }
  2143. else
  2144. {
  2145. compBlock->loop = sinkBlock->loop;
  2146. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
  2147. }
  2148. #ifdef DBG
  2149. compBlock->isBreakCompensationBlockAtSink = true;
  2150. #endif
  2151. }
  2152. else
  2153. {
  2154. compBlock->loop = sourceBlock->loop;
  2155. InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
  2156. #ifdef DBG
  2157. compBlock->isBreakCompensationBlockAtSource = true;
  2158. #endif
  2159. }
  2160. }
  2161. //
  2162. // Fixup block linkage
  2163. //
  2164. // compensation block is inserted right after sourceBlock
  2165. compBlock->next = fallthroughBlock;
  2166. fallthroughBlock->prev = compBlock;
  2167. compBlock->prev = sourceBlock;
  2168. sourceBlock->next = compBlock;
  2169. //
  2170. // Fixup flow edges
  2171. //
  2172. sourceBlock->RemoveSucc(sinkBlock, this, false);
  2173. // Add sourceBlock -> airlockBlock
  2174. this->AddEdge(sourceBlock, compBlock);
  2175. // Add airlockBlock -> sinkBlock
  2176. edge->SetPred(compBlock);
  2177. compBlock->AddSucc(edge, this);
  2178. //
  2179. // Fixup IR
  2180. //
  2181. // Maintain the instruction region for inlining
  2182. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  2183. Func * sinkLabelFunc = sinkLabel->m_func;
  2184. IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  2185. sourceLastInstr->InsertAfter(compLabel);
  2186. compBlock->SetFirstInstr(compLabel);
  2187. compLabel->SetBasicBlock(compBlock);
  2188. // Add br to sinkBlock from compensation block
  2189. IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  2190. compBr->SetByteCodeOffset(sinkLabel);
  2191. compLabel->InsertAfter(compBr);
  2192. compBlock->SetLastInstr(compBr);
  2193. compLabel->SetByteCodeOffset(sinkLabel);
  2194. // Fixup flow out of sourceBlock
  2195. if (sourceLastInstr->IsBranchInstr())
  2196. {
  2197. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  2198. Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
  2199. if (sourceBr->IsMultiBranch())
  2200. {
  2201. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
  2202. Assert(replaced);
  2203. }
  2204. }
  2205. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  2206. (this->func->IsSimpleJit() && this->func->hasBailout));
  2207. if (assignRegionsBeforeGlobopt)
  2208. {
  2209. UpdateRegionForBlockFromEHPred(compBlock);
  2210. }
  2211. return compBlock;
  2212. }
  2213. void
  2214. FlowGraph::RemoveUnreachableBlocks()
  2215. {
  2216. AnalysisAssert(this->blockList);
  2217. FOREACH_BLOCK(block, this)
  2218. {
  2219. block->isVisited = false;
  2220. }
  2221. NEXT_BLOCK;
  2222. this->blockList->isVisited = true;
  2223. FOREACH_BLOCK_EDITING(block, this)
  2224. {
  2225. if (block->isVisited)
  2226. {
  2227. FOREACH_SUCCESSOR_BLOCK(succ, block)
  2228. {
  2229. succ->isVisited = true;
  2230. } NEXT_SUCCESSOR_BLOCK;
  2231. }
  2232. else
  2233. {
  2234. this->RemoveBlock(block);
  2235. }
  2236. }
  2237. NEXT_BLOCK_EDITING;
  2238. }
  2239. // If block has no predecessor, remove it.
  2240. bool
  2241. FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
  2242. {
  2243. bool isDead = false;
  2244. if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
  2245. {
  2246. isDead = true;
  2247. }
  2248. else if (block->isLoopHeader)
  2249. {
  2250. // A dead loop still has back-edges pointing to it...
  2251. isDead = true;
  2252. FOREACH_PREDECESSOR_BLOCK(pred, block)
  2253. {
  2254. if (!block->loop->IsDescendentOrSelf(pred->loop))
  2255. {
  2256. isDead = false;
  2257. }
  2258. } NEXT_PREDECESSOR_BLOCK;
  2259. }
  2260. if (isDead)
  2261. {
  2262. this->RemoveBlock(block, globOpt);
  2263. return true;
  2264. }
  2265. return false;
  2266. }
  2267. IR::Instr *
  2268. FlowGraph::PeepTypedCm(IR::Instr *instr)
  2269. {
  2270. // Basic pattern, peep:
  2271. // t1 = CmEq a, b
  2272. // BrTrue_I4 $L1, t1
  2273. // Into:
  2274. // t1 = 1
  2275. // BrEq $L1, a, b
  2276. // t1 = 0
  2277. IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
  2278. // find intermediate Lds
  2279. IR::Instr * instrLd = nullptr;
  2280. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  2281. {
  2282. instrLd = instrNext;
  2283. instrNext = instrNext->GetNextRealInstrOrLabel();
  2284. }
  2285. IR::Instr * instrLd2 = nullptr;
  2286. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  2287. {
  2288. instrLd2 = instrNext;
  2289. instrNext = instrNext->GetNextRealInstrOrLabel();
  2290. }
  2291. // Find BrTrue/BrFalse
  2292. IR::Instr *instrBr;
  2293. bool brIsTrue;
  2294. if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
  2295. {
  2296. instrBr = instrNext;
  2297. brIsTrue = true;
  2298. }
  2299. else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
  2300. {
  2301. instrBr = instrNext;
  2302. brIsTrue = false;
  2303. }
  2304. else
  2305. {
  2306. return nullptr;
  2307. }
  2308. AssertMsg(instrLd || (!instrLd && !instrLd2), "Either instrLd is non-null or both null");
  2309. // if we have intermediate Lds, then make sure pattern is:
  2310. // t1 = CmEq a, b
  2311. // t2 = Ld_A t1
  2312. // BrTrue $L1, t2
  2313. if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
  2314. {
  2315. return nullptr;
  2316. }
  2317. if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
  2318. {
  2319. return nullptr;
  2320. }
  2321. // Make sure we have:
  2322. // t1 = CmEq a, b
  2323. // BrTrue/BrFalse t1
  2324. if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
  2325. {
  2326. return nullptr;
  2327. }
  2328. IR::Opnd * src1 = instr->UnlinkSrc1();
  2329. IR::Opnd * src2 = instr->UnlinkSrc2();
  2330. IR::Instr * instrNew;
  2331. IR::Opnd * tmpOpnd;
  2332. if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
  2333. {
  2334. Assert(src1->IsInt32());
  2335. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  2336. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
  2337. instrNew->SetByteCodeOffset(instr);
  2338. instr->InsertBefore(instrNew);
  2339. src1 = tmpOpnd;
  2340. }
  2341. if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
  2342. {
  2343. Assert(src2->IsInt32());
  2344. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  2345. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
  2346. instrNew->SetByteCodeOffset(instr);
  2347. instr->InsertBefore(instrNew);
  2348. src2 = tmpOpnd;
  2349. }
  2350. instrBr->ReplaceSrc1(src1);
  2351. instrBr->SetSrc2(src2);
  2352. Js::OpCode newOpcode;
  2353. switch (instr->m_opcode)
  2354. {
  2355. case Js::OpCode::CmEq_I4:
  2356. newOpcode = Js::OpCode::BrEq_I4;
  2357. break;
  2358. case Js::OpCode::CmGe_I4:
  2359. newOpcode = Js::OpCode::BrGe_I4;
  2360. break;
  2361. case Js::OpCode::CmGt_I4:
  2362. newOpcode = Js::OpCode::BrGt_I4;
  2363. break;
  2364. case Js::OpCode::CmLt_I4:
  2365. newOpcode = Js::OpCode::BrLt_I4;
  2366. break;
  2367. case Js::OpCode::CmLe_I4:
  2368. newOpcode = Js::OpCode::BrLe_I4;
  2369. break;
  2370. case Js::OpCode::CmUnGe_I4:
  2371. newOpcode = Js::OpCode::BrUnGe_I4;
  2372. break;
  2373. case Js::OpCode::CmUnGt_I4:
  2374. newOpcode = Js::OpCode::BrUnGt_I4;
  2375. break;
  2376. case Js::OpCode::CmUnLt_I4:
  2377. newOpcode = Js::OpCode::BrUnLt_I4;
  2378. break;
  2379. case Js::OpCode::CmUnLe_I4:
  2380. newOpcode = Js::OpCode::BrUnLe_I4;
  2381. break;
  2382. case Js::OpCode::CmNeq_I4:
  2383. newOpcode = Js::OpCode::BrNeq_I4;
  2384. break;
  2385. case Js::OpCode::CmEq_A:
  2386. newOpcode = Js::OpCode::BrEq_A;
  2387. break;
  2388. case Js::OpCode::CmGe_A:
  2389. newOpcode = Js::OpCode::BrGe_A;
  2390. break;
  2391. case Js::OpCode::CmGt_A:
  2392. newOpcode = Js::OpCode::BrGt_A;
  2393. break;
  2394. case Js::OpCode::CmLt_A:
  2395. newOpcode = Js::OpCode::BrLt_A;
  2396. break;
  2397. case Js::OpCode::CmLe_A:
  2398. newOpcode = Js::OpCode::BrLe_A;
  2399. break;
  2400. case Js::OpCode::CmUnGe_A:
  2401. newOpcode = Js::OpCode::BrUnGe_A;
  2402. break;
  2403. case Js::OpCode::CmUnGt_A:
  2404. newOpcode = Js::OpCode::BrUnGt_A;
  2405. break;
  2406. case Js::OpCode::CmUnLt_A:
  2407. newOpcode = Js::OpCode::BrUnLt_A;
  2408. break;
  2409. case Js::OpCode::CmUnLe_A:
  2410. newOpcode = Js::OpCode::BrUnLe_A;
  2411. break;
  2412. case Js::OpCode::CmNeq_A:
  2413. newOpcode = Js::OpCode::BrNeq_A;
  2414. break;
  2415. case Js::OpCode::CmSrEq_A:
  2416. newOpcode = Js::OpCode::BrSrEq_A;
  2417. break;
  2418. case Js::OpCode::CmSrNeq_A:
  2419. newOpcode = Js::OpCode::BrSrNeq_A;
  2420. break;
  2421. default:
  2422. newOpcode = Js::OpCode::InvalidOpCode;
  2423. Assume(UNREACHED);
  2424. }
  2425. instrBr->m_opcode = newOpcode;
  2426. if (brIsTrue)
  2427. {
  2428. instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2429. instr->m_opcode = Js::OpCode::Ld_I4;
  2430. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2431. instrNew->SetByteCodeOffset(instrBr);
  2432. instrBr->InsertAfter(instrNew);
  2433. if (instrLd)
  2434. {
  2435. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2436. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2437. instrNew->SetByteCodeOffset(instrBr);
  2438. instrBr->InsertAfter(instrNew);
  2439. if (instrLd2)
  2440. {
  2441. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2442. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2443. instrNew->SetByteCodeOffset(instrBr);
  2444. instrBr->InsertAfter(instrNew);
  2445. }
  2446. }
  2447. }
  2448. else
  2449. {
  2450. instrBr->AsBranchInstr()->Invert();
  2451. instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2452. instr->m_opcode = Js::OpCode::Ld_I4;
  2453. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2454. instrNew->SetByteCodeOffset(instrBr);
  2455. instrBr->InsertAfter(instrNew);
  2456. if (instrLd)
  2457. {
  2458. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2459. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2460. instrNew->SetByteCodeOffset(instrBr);
  2461. instrBr->InsertAfter(instrNew);
  2462. if (instrLd2)
  2463. {
  2464. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2465. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2466. instrNew->SetByteCodeOffset(instrBr);
  2467. instrBr->InsertAfter(instrNew);
  2468. }
  2469. }
  2470. }
  2471. return instrBr;
  2472. }
  2473. IR::Instr *
  2474. FlowGraph::PeepCm(IR::Instr *instr)
  2475. {
  2476. // Basic pattern, peep:
  2477. // t1 = CmEq a, b
  2478. // t2 = Ld_A t1
  2479. // BrTrue $L1, t2
  2480. // Into:
  2481. // t1 = True
  2482. // t2 = True
  2483. // BrEq $L1, a, b
  2484. // t1 = False
  2485. // t2 = False
  2486. //
  2487. // The true/false Ld_A's will most likely end up being dead-stores...
  2488. // Alternate Pattern
  2489. // t1= CmEq a, b
  2490. // BrTrue $L1, t1
  2491. // Into:
  2492. // BrEq $L1, a, b
  2493. Func *func = instr->m_func;
  2494. // Find Ld_A
  2495. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  2496. IR::Instr *inlineeEndInstr = nullptr;
  2497. IR::Instr *instrNew;
  2498. IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
  2499. IR::Instr *instrByteCode = instrNext;
  2500. bool ldFound = false;
  2501. IR::Opnd *brSrc = instr->GetDst();
  2502. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
  2503. {
  2504. ldFound = true;
  2505. instrLd = instrNext;
  2506. brSrc = instrNext->GetDst();
  2507. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  2508. {
  2509. return nullptr;
  2510. }
  2511. instrNext = instrLd->GetNextRealInstrOrLabel();
  2512. // Is there a second Ld_A?
  2513. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
  2514. {
  2515. // We have:
  2516. // t1 = Cm
  2517. // t2 = t1 // ldSrc = t1
  2518. // t3 = t2 // ldDst = t3
  2519. // BrTrue/BrFalse t3
  2520. instrLd2 = instrNext;
  2521. brSrc = instrLd2->GetDst();
  2522. instrNext = instrLd2->GetNextRealInstrOrLabel();
  2523. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  2524. {
  2525. return nullptr;
  2526. }
  2527. }
  2528. }
  2529. // Skip over InlineeEnd
  2530. if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
  2531. {
  2532. inlineeEndInstr = instrNext;
  2533. instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
  2534. }
  2535. // Find BrTrue/BrFalse
  2536. bool brIsTrue;
  2537. if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
  2538. {
  2539. brIsTrue = true;
  2540. }
  2541. else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
  2542. {
  2543. brIsTrue = false;
  2544. }
  2545. else
  2546. {
  2547. return nullptr;
  2548. }
  2549. IR::Instr *instrBr = instrNext;
  2550. // Make sure we have:
  2551. // t1 = Ld_A
  2552. // BrTrue/BrFalse t1
  2553. if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
  2554. {
  2555. return nullptr;
  2556. }
  2557. //
  2558. // We have a match. Generate the new branch
  2559. //
  2560. // BrTrue/BrFalse t1
  2561. // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
  2562. // and sym id of t1 for proper restoration on a bailout before the branch.
  2563. Func* origBrFunc = instrBr->m_func;
  2564. uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
  2565. uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
  2566. bool origBranchSrcOpndIsJITOpt = instrBr->GetSrc1()->GetIsJITOptimizedReg();
  2567. instrBr->Unlink();
  2568. instr->InsertBefore(instrBr);
  2569. instrBr->ClearByteCodeOffset();
  2570. instrBr->SetByteCodeOffset(instr);
  2571. instrBr->FreeSrc1();
  2572. instrBr->SetSrc1(instr->UnlinkSrc1());
  2573. instrBr->SetSrc2(instr->UnlinkSrc2());
  2574. instrBr->m_func = instr->m_func;
  2575. Js::OpCode newOpcode = Js::OpCode::InvalidOpCode;
  2576. switch(instr->m_opcode)
  2577. {
  2578. case Js::OpCode::CmEq_A:
  2579. newOpcode = Js::OpCode::BrEq_A;
  2580. break;
  2581. case Js::OpCode::CmGe_A:
  2582. newOpcode = Js::OpCode::BrGe_A;
  2583. break;
  2584. case Js::OpCode::CmGt_A:
  2585. newOpcode = Js::OpCode::BrGt_A;
  2586. break;
  2587. case Js::OpCode::CmLt_A:
  2588. newOpcode = Js::OpCode::BrLt_A;
  2589. break;
  2590. case Js::OpCode::CmLe_A:
  2591. newOpcode = Js::OpCode::BrLe_A;
  2592. break;
  2593. case Js::OpCode::CmUnGe_A:
  2594. newOpcode = Js::OpCode::BrUnGe_A;
  2595. break;
  2596. case Js::OpCode::CmUnGt_A:
  2597. newOpcode = Js::OpCode::BrUnGt_A;
  2598. break;
  2599. case Js::OpCode::CmUnLt_A:
  2600. newOpcode = Js::OpCode::BrUnLt_A;
  2601. break;
  2602. case Js::OpCode::CmUnLe_A:
  2603. newOpcode = Js::OpCode::BrUnLe_A;
  2604. break;
  2605. case Js::OpCode::CmNeq_A:
  2606. newOpcode = Js::OpCode::BrNeq_A;
  2607. break;
  2608. case Js::OpCode::CmSrEq_A:
  2609. newOpcode = Js::OpCode::BrSrEq_A;
  2610. break;
  2611. case Js::OpCode::CmSrNeq_A:
  2612. newOpcode = Js::OpCode::BrSrNeq_A;
  2613. break;
  2614. default:
  2615. Assert(UNREACHED);
  2616. __assume(UNREACHED);
  2617. }
  2618. instrBr->m_opcode = newOpcode;
  2619. IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
  2620. IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetFalseAddr(), IR::AddrOpndKindDynamicVar, func, true);
  2621. trueOpnd->SetValueType(ValueType::Boolean);
  2622. falseOpnd->SetValueType(ValueType::Boolean);
  2623. if (ldFound)
  2624. {
  2625. // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
  2626. if (brIsTrue)
  2627. {
  2628. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  2629. instrNew->SetByteCodeOffset(instrBr);
  2630. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2631. instrBr->InsertBefore(instrNew);
  2632. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
  2633. instrNew->SetByteCodeOffset(instrBr);
  2634. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2635. instrBr->InsertBefore(instrNew);
  2636. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
  2637. instrLd->InsertBefore(instrNew);
  2638. instrNew->SetByteCodeOffset(instrLd);
  2639. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2640. instrLd->ReplaceSrc1(falseOpnd);
  2641. if (instrLd2)
  2642. {
  2643. instrLd2->ReplaceSrc1(falseOpnd);
  2644. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
  2645. instrBr->InsertBefore(instrNew);
  2646. instrNew->SetByteCodeOffset(instrBr);
  2647. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2648. }
  2649. }
  2650. else
  2651. {
  2652. instrBr->AsBranchInstr()->Invert();
  2653. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
  2654. instrBr->InsertBefore(instrNew);
  2655. instrNew->SetByteCodeOffset(instrBr);
  2656. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2657. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
  2658. instrBr->InsertBefore(instrNew);
  2659. instrNew->SetByteCodeOffset(instrBr);
  2660. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2661. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
  2662. instrLd->InsertBefore(instrNew);
  2663. instrNew->SetByteCodeOffset(instrLd);
  2664. instrLd->ReplaceSrc1(trueOpnd);
  2665. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2666. if (instrLd2)
  2667. {
  2668. instrLd2->ReplaceSrc1(trueOpnd);
  2669. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  2670. instrBr->InsertBefore(instrNew);
  2671. instrNew->SetByteCodeOffset(instrBr);
  2672. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2673. }
  2674. }
  2675. }
  2676. // Fix InlineeEnd
  2677. if (inlineeEndInstr)
  2678. {
  2679. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2680. }
  2681. if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  2682. {
  2683. Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
  2684. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  2685. instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
  2686. }
  2687. instr->Remove();
  2688. //
  2689. // Try optimizing through a second branch.
  2690. // Peep:
  2691. //
  2692. // t2 = True;
  2693. // BrTrue $L1
  2694. // ...
  2695. // L1:
  2696. // t1 = Ld_A t2
  2697. // BrTrue $L2
  2698. //
  2699. // Into:
  2700. // t2 = True;
  2701. // t1 = True;
  2702. // BrTrue $L2 <---
  2703. // ...
  2704. // L1:
  2705. // t1 = Ld_A t2
  2706. // BrTrue $L2
  2707. //
  2708. // This cleanup helps expose second level Cm peeps.
  2709. IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2710. // Skip over branch to branch
  2711. while (instrLd3->m_opcode == Js::OpCode::Br)
  2712. {
  2713. instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2714. }
  2715. // Find Ld_A
  2716. if (instrLd3->m_opcode != Js::OpCode::Ld_A)
  2717. {
  2718. return instrBr;
  2719. }
  2720. IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
  2721. IR::Instr *inlineeEndInstr2 = nullptr;
  2722. // InlineeEnd?
  2723. // REVIEW: Can we handle 2 inlineeEnds?
  2724. if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
  2725. {
  2726. inlineeEndInstr2 = instrBr2;
  2727. instrBr2 = instrBr2->GetNextRealInstrOrLabel();
  2728. }
  2729. // Find branch
  2730. bool brIsTrue2;
  2731. if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
  2732. {
  2733. brIsTrue2 = true;
  2734. }
  2735. else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
  2736. {
  2737. brIsTrue2 = false;
  2738. }
  2739. else
  2740. {
  2741. return nullptr;
  2742. }
  2743. // Make sure Ld_A operates on the right tmps.
  2744. if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
  2745. {
  2746. return nullptr;
  2747. }
  2748. if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
  2749. {
  2750. return nullptr;
  2751. }
  2752. // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
  2753. if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
  2754. {
  2755. StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
  2756. if (IR::Instr::FindRegUseInRange(symLd3, instrBr->m_next, instrLd3))
  2757. {
  2758. return nullptr;
  2759. }
  2760. }
  2761. //
  2762. // We have a match!
  2763. //
  2764. if(inlineeEndInstr2)
  2765. {
  2766. origBrFunc = instrBr2->m_func;
  2767. origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
  2768. origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
  2769. }
  2770. // Fix Ld_A
  2771. if (brIsTrue)
  2772. {
  2773. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
  2774. instrBr->InsertBefore(instrNew);
  2775. instrNew->SetByteCodeOffset(instrBr);
  2776. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2777. }
  2778. else
  2779. {
  2780. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
  2781. instrBr->InsertBefore(instrNew);
  2782. instrNew->SetByteCodeOffset(instrBr);
  2783. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2784. }
  2785. IR::LabelInstr *brTarget2;
  2786. // Retarget branch
  2787. if (brIsTrue2 == brIsTrue)
  2788. {
  2789. brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
  2790. }
  2791. else
  2792. {
  2793. brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
  2794. brTarget2->SetByteCodeOffset(instrBr2->m_next);
  2795. instrBr2->InsertAfter(brTarget2);
  2796. }
  2797. instrBr->AsBranchInstr()->SetTarget(brTarget2);
  2798. // InlineeEnd?
  2799. if (inlineeEndInstr2)
  2800. {
  2801. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2802. }
  2803. return instrBr;
  2804. }
  2805. void
  2806. FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId)
  2807. {
  2808. // Helper for PeepsCm code.
  2809. //
  2810. // We've skipped some InlineeEnd. Globopt expects to see these
  2811. // on all flow paths out of the inlinee. Insert an InlineeEnd
  2812. // on the new path:
  2813. // BrEq $L1, a, b
  2814. // Becomes:
  2815. // BrNeq $L2, a, b
  2816. // InlineeEnd
  2817. // Br $L1
  2818. // L2:
  2819. instrBr->AsBranchInstr()->Invert();
  2820. IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
  2821. newBr->SetByteCodeOffset(origByteCodeOffset);
  2822. instrBr->InsertAfter(newBr);
  2823. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
  2824. newLabel->SetByteCodeOffset(instrBytecode);
  2825. newBr->InsertAfter(newLabel);
  2826. instrBr->AsBranchInstr()->SetTarget(newLabel);
  2827. IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
  2828. newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
  2829. newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
  2830. newInlineeEnd->SetByteCodeOffset(instrBytecode);
  2831. newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
  2832. newBr->InsertBefore(newInlineeEnd);
  2833. IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc, origByteCodeOffset);
  2834. useOrigBranchSrcInstr->SetRemovedOpndSymbol(origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2835. newBr->InsertBefore(useOrigBranchSrcInstr);
  2836. uint newBrFnNumber = newBr->m_func->GetFunctionNumber();
  2837. Assert(newBrFnNumber == origBrFunc->GetFunctionNumber());
  2838. // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
  2839. // Only case when they can be same is recursive calls - inlinee and inliner are the same function
  2840. Assert(newBrFnNumber != inlineeEndInstr->m_func->GetFunctionNumber() ||
  2841. newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->GetFunctionNumber());
  2842. }
  2843. BasicBlock *
  2844. BasicBlock::New(FlowGraph * graph)
  2845. {
  2846. BasicBlock * block;
  2847. block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
  2848. return block;
  2849. }
  2850. void
  2851. BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
  2852. {
  2853. this->predList.Prepend(graph->alloc, edge);
  2854. }
  2855. void
  2856. BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
  2857. {
  2858. this->succList.Prepend(graph->alloc, edge);
  2859. }
  2860. void
  2861. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
  2862. {
  2863. this->RemovePred(block, graph, true, false);
  2864. }
  2865. void
  2866. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
  2867. {
  2868. this->RemoveSucc(block, graph, true, false);
  2869. }
  2870. void
  2871. BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
  2872. {
  2873. this->RemovePred(block, graph, true, true);
  2874. }
  2875. void
  2876. BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
  2877. {
  2878. this->RemoveSucc(block, graph, true, true);
  2879. }
  2880. void
  2881. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
  2882. {
  2883. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2884. {
  2885. if (edge->GetPred() == block)
  2886. {
  2887. BasicBlock *blockSucc = edge->GetSucc();
  2888. if (moveToDead)
  2889. {
  2890. iter.MoveCurrentTo(this->GetDeadPredList());
  2891. }
  2892. else
  2893. {
  2894. iter.RemoveCurrent(graph->alloc);
  2895. }
  2896. if (doCleanSucc)
  2897. {
  2898. block->RemoveSucc(this, graph, false, moveToDead);
  2899. }
  2900. if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
  2901. {
  2902. Loop *loop = blockSucc->loop;
  2903. loop->isDead = true;
  2904. }
  2905. return;
  2906. }
  2907. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2908. AssertMsg(UNREACHED, "Edge not found.");
  2909. }
  2910. void
  2911. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
  2912. {
  2913. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2914. {
  2915. if (edge->GetSucc() == block)
  2916. {
  2917. if (moveToDead)
  2918. {
  2919. iter.MoveCurrentTo(this->GetDeadSuccList());
  2920. }
  2921. else
  2922. {
  2923. iter.RemoveCurrent(graph->alloc);
  2924. }
  2925. if (doCleanPred)
  2926. {
  2927. block->RemovePred(this, graph, false, moveToDead);
  2928. }
  2929. if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
  2930. {
  2931. Loop *loop = block->loop;
  2932. loop->isDead = true;
  2933. }
  2934. return;
  2935. }
  2936. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2937. AssertMsg(UNREACHED, "Edge not found.");
  2938. }
  2939. void
  2940. BasicBlock::UnlinkPred(BasicBlock *block)
  2941. {
  2942. this->UnlinkPred(block, true);
  2943. }
  2944. void
  2945. BasicBlock::UnlinkSucc(BasicBlock *block)
  2946. {
  2947. this->UnlinkSucc(block, true);
  2948. }
  2949. void
  2950. BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
  2951. {
  2952. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2953. {
  2954. if (edge->GetPred() == block)
  2955. {
  2956. iter.UnlinkCurrent();
  2957. if (doCleanSucc)
  2958. {
  2959. block->UnlinkSucc(this, false);
  2960. }
  2961. return;
  2962. }
  2963. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2964. AssertMsg(UNREACHED, "Edge not found.");
  2965. }
  2966. void
  2967. BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
  2968. {
  2969. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2970. {
  2971. if (edge->GetSucc() == block)
  2972. {
  2973. iter.UnlinkCurrent();
  2974. if (doCleanPred)
  2975. {
  2976. block->UnlinkPred(this, false);
  2977. }
  2978. return;
  2979. }
  2980. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2981. AssertMsg(UNREACHED, "Edge not found.");
  2982. }
  2983. bool
  2984. BasicBlock::IsLandingPad()
  2985. {
  2986. BasicBlock * nextBlock = this->GetNext();
  2987. return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
  2988. }
  2989. IR::Instr *
  2990. FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
  2991. {
  2992. IR::Instr *instrPrev = instr->m_prev;
  2993. if (globOpt)
  2994. {
  2995. // Removing block during glob opt. Need to maintain the graph so that
  2996. // bailout will record the byte code use in case the dead code is exposed
  2997. // by dyno-pogo optimization (where bailout need the byte code uses from
  2998. // the dead blocks where it may not be dead after bailing out)
  2999. if (instr->IsLabelInstr())
  3000. {
  3001. instr->AsLabelInstr()->m_isLoopTop = false;
  3002. return instr;
  3003. }
  3004. else if (instr->IsByteCodeUsesInstr())
  3005. {
  3006. return instr;
  3007. }
  3008. /*
  3009. * Scope object has to be implicitly live whenever Heap Arguments object is live.
  3010. * - When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
  3011. */
  3012. Js::OpCode opcode = instr->m_opcode;
  3013. if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt(this->func) &&
  3014. globOpt->CurrentBlockData()->IsArgumentsOpnd(instr->GetSrc1()) && instr->m_func->GetScopeObjSym())
  3015. {
  3016. IR::ByteCodeUsesInstr * byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  3017. byteCodeUsesInstr->SetNonOpndSymbol(instr->m_func->GetScopeObjSym()->m_id);
  3018. instr->InsertAfter(byteCodeUsesInstr);
  3019. }
  3020. IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
  3021. if (newByteCodeUseInstr != nullptr)
  3022. {
  3023. // We don't care about property used in these instruction
  3024. // It is only necessary for field copy prop so that we will keep the implicit call
  3025. // up to the copy prop location.
  3026. newByteCodeUseInstr->propertySymUse = nullptr;
  3027. if (opcode == Js::OpCode::Yield)
  3028. {
  3029. IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
  3030. while (instrLabel->m_opcode != Js::OpCode::Label)
  3031. {
  3032. instrLabel = instrLabel->m_next;
  3033. }
  3034. func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
  3035. instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
  3036. }
  3037. // Save the last instruction to update the block with
  3038. return newByteCodeUseInstr;
  3039. }
  3040. else
  3041. {
  3042. return instrPrev;
  3043. }
  3044. }
  3045. else
  3046. {
  3047. instr->Remove();
  3048. return instrPrev;
  3049. }
  3050. }
  3051. void
  3052. FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
  3053. {
  3054. Assert(!block->isDead && !block->isDeleted);
  3055. IR::Instr * lastInstr = nullptr;
  3056. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  3057. {
  3058. if (instr->m_opcode == Js::OpCode::FunctionExit)
  3059. {
  3060. // Removing FunctionExit causes problems downstream...
  3061. // We could change the opcode, or have FunctionEpilog/FunctionExit to get
  3062. // rid of the epilog.
  3063. break;
  3064. }
  3065. if (instr == block->GetFirstInstr())
  3066. {
  3067. Assert(instr->IsLabelInstr());
  3068. instr->AsLabelInstr()->m_isLoopTop = false;
  3069. }
  3070. else
  3071. {
  3072. lastInstr = this->RemoveInstr(instr, globOpt);
  3073. }
  3074. } NEXT_INSTR_IN_BLOCK_EDITING;
  3075. if (lastInstr)
  3076. {
  3077. block->SetLastInstr(lastInstr);
  3078. }
  3079. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
  3080. {
  3081. edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
  3082. } NEXT_SLISTBASECOUNTED_ENTRY;
  3083. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
  3084. {
  3085. edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
  3086. } NEXT_SLISTBASECOUNTED_ENTRY;
  3087. if (block->isLoopHeader && this->loopList)
  3088. {
  3089. // If loop graph is built, remove loop from loopList
  3090. Loop **pPrevLoop = &this->loopList;
  3091. while (*pPrevLoop != block->loop)
  3092. {
  3093. pPrevLoop = &((*pPrevLoop)->next);
  3094. }
  3095. *pPrevLoop = (*pPrevLoop)->next;
  3096. this->hasLoop = (this->loopList != nullptr);
  3097. }
  3098. if (globOpt != nullptr)
  3099. {
  3100. block->isDead = true;
  3101. block->GetPredList()->MoveTo(block->GetDeadPredList());
  3102. block->GetSuccList()->MoveTo(block->GetDeadSuccList());
  3103. }
  3104. if (tailDuping)
  3105. {
  3106. block->isDead = true;
  3107. }
  3108. block->isDeleted = true;
  3109. block->SetDataUseCount(0);
  3110. }
  3111. void
  3112. BasicBlock::UnlinkInstr(IR::Instr * instr)
  3113. {
  3114. Assert(this->Contains(instr));
  3115. Assert(this->GetFirstInstr() != this->GetLastInstr());
  3116. if (instr == this->GetFirstInstr())
  3117. {
  3118. Assert(!this->GetFirstInstr()->IsLabelInstr());
  3119. this->SetFirstInstr(instr->m_next);
  3120. }
  3121. else if (instr == this->GetLastInstr())
  3122. {
  3123. this->SetLastInstr(instr->m_prev);
  3124. }
  3125. instr->Unlink();
  3126. }
  3127. void
  3128. BasicBlock::RemoveInstr(IR::Instr * instr)
  3129. {
  3130. Assert(this->Contains(instr));
  3131. if (instr == this->GetFirstInstr())
  3132. {
  3133. this->SetFirstInstr(instr->m_next);
  3134. }
  3135. else if (instr == this->GetLastInstr())
  3136. {
  3137. this->SetLastInstr(instr->m_prev);
  3138. }
  3139. instr->Remove();
  3140. }
  3141. void
  3142. BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
  3143. {
  3144. Assert(this->Contains(beforeThisInstr));
  3145. beforeThisInstr->InsertBefore(newInstr);
  3146. if(this->GetFirstInstr() == beforeThisInstr)
  3147. {
  3148. Assert(!beforeThisInstr->IsLabelInstr());
  3149. this->SetFirstInstr(newInstr);
  3150. }
  3151. }
  3152. void
  3153. BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
  3154. {
  3155. Assert(this->Contains(afterThisInstr));
  3156. afterThisInstr->InsertAfter(newInstr);
  3157. if (this->GetLastInstr() == afterThisInstr)
  3158. {
  3159. Assert(afterThisInstr->HasFallThrough());
  3160. this->SetLastInstr(newInstr);
  3161. }
  3162. }
  3163. void
  3164. BasicBlock::InsertAfter(IR::Instr *newInstr)
  3165. {
  3166. Assert(this->GetLastInstr()->HasFallThrough());
  3167. this->GetLastInstr()->InsertAfter(newInstr);
  3168. this->SetLastInstr(newInstr);
  3169. }
  3170. void
  3171. Loop::SetHasCall()
  3172. {
  3173. Loop * current = this;
  3174. do
  3175. {
  3176. if (current->hasCall)
  3177. {
  3178. #if DBG
  3179. current = current->parent;
  3180. while (current)
  3181. {
  3182. Assert(current->hasCall);
  3183. current = current->parent;
  3184. }
  3185. #endif
  3186. break;
  3187. }
  3188. current->hasCall = true;
  3189. current = current->parent;
  3190. }
  3191. while (current != nullptr);
  3192. }
  3193. void
  3194. Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
  3195. {
  3196. Loop * current = this;
  3197. do
  3198. {
  3199. if ((current->implicitCallFlags & newFlags) == newFlags)
  3200. {
  3201. #if DBG
  3202. current = current->parent;
  3203. while (current)
  3204. {
  3205. Assert((current->implicitCallFlags & newFlags) == newFlags);
  3206. current = current->parent;
  3207. }
  3208. #endif
  3209. break;
  3210. }
  3211. newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
  3212. current->implicitCallFlags = newFlags;
  3213. current = current->parent;
  3214. }
  3215. while (current != nullptr);
  3216. }
  3217. Js::ImplicitCallFlags
  3218. Loop::GetImplicitCallFlags()
  3219. {
  3220. if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
  3221. {
  3222. if (this->parent == nullptr)
  3223. {
  3224. // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
  3225. this->implicitCallFlags = Js::ImplicitCall_None;
  3226. }
  3227. else
  3228. {
  3229. // We don't have any information, get it from the parent and hope for the best
  3230. this->implicitCallFlags = this->parent->GetImplicitCallFlags();
  3231. }
  3232. }
  3233. return this->implicitCallFlags;
  3234. }
  3235. bool
  3236. Loop::CanDoFieldCopyProp()
  3237. {
  3238. #if DBG_DUMP
  3239. if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
  3240. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  3241. {
  3242. Output::Print(_u("fieldcopyprop disabled because external: loop count: %d"), GetLoopNumber());
  3243. GetFunc()->DumpFullFunctionName();
  3244. Output::Print(_u("\n"));
  3245. Output::Flush();
  3246. }
  3247. #endif
  3248. return GlobOpt::ImplicitCallFlagsAllowOpts(this);
  3249. }
  3250. bool
  3251. Loop::CanDoFieldHoist()
  3252. {
  3253. // We can do field hoist wherever we can do copy prop
  3254. return CanDoFieldCopyProp();
  3255. }
  3256. bool
  3257. Loop::CanHoistInvariants() const
  3258. {
  3259. Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
  3260. if (PHASE_OFF(Js::InvariantsPhase, func))
  3261. {
  3262. return false;
  3263. }
  3264. return true;
  3265. }
  3266. IR::LabelInstr *
  3267. Loop::GetLoopTopInstr() const
  3268. {
  3269. IR::LabelInstr * instr = nullptr;
  3270. if (this->topFunc->isFlowGraphValid)
  3271. {
  3272. instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
  3273. }
  3274. else
  3275. {
  3276. // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
  3277. instr = this->loopTopLabel;
  3278. }
  3279. if (instr)
  3280. {
  3281. Assert(instr->m_isLoopTop);
  3282. }
  3283. return instr;
  3284. }
  3285. void
  3286. Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
  3287. {
  3288. this->loopTopLabel = loopTop;
  3289. }
  3290. #if DBG_DUMP
  3291. uint
  3292. Loop::GetLoopNumber() const
  3293. {
  3294. IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
  3295. if (loopTopInstr->IsProfiledLabelInstr())
  3296. {
  3297. return loopTopInstr->AsProfiledLabelInstr()->loopNum;
  3298. }
  3299. return Js::LoopHeader::NoLoop;
  3300. }
  3301. bool
  3302. BasicBlock::Contains(IR::Instr * instr)
  3303. {
  3304. FOREACH_INSTR_IN_BLOCK(blockInstr, this)
  3305. {
  3306. if (instr == blockInstr)
  3307. {
  3308. return true;
  3309. }
  3310. }
  3311. NEXT_INSTR_IN_BLOCK;
  3312. return false;
  3313. }
  3314. #endif
  3315. FlowEdge *
  3316. FlowEdge::New(FlowGraph * graph)
  3317. {
  3318. FlowEdge * edge;
  3319. edge = JitAnew(graph->alloc, FlowEdge);
  3320. return edge;
  3321. }
  3322. bool
  3323. Loop::IsDescendentOrSelf(Loop const * loop) const
  3324. {
  3325. Loop const * currentLoop = loop;
  3326. while (currentLoop != nullptr)
  3327. {
  3328. if (currentLoop == this)
  3329. {
  3330. return true;
  3331. }
  3332. currentLoop = currentLoop->parent;
  3333. }
  3334. return false;
  3335. }
  3336. void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
  3337. {
  3338. BasicBlock *block;
  3339. if (instr->m_next->IsLabelInstr())
  3340. {
  3341. block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
  3342. block->RemoveInstr(instr);
  3343. }
  3344. else if (instr->IsLabelInstr())
  3345. {
  3346. block = instr->AsLabelInstr()->GetBasicBlock();
  3347. block->RemoveInstr(instr);
  3348. }
  3349. else
  3350. {
  3351. Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
  3352. instr->Remove();
  3353. }
  3354. }
  3355. bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
  3356. {
  3357. // Look for an unsigned constant, or the result of an unsigned shift by zero
  3358. if (!src->IsRegOpnd())
  3359. {
  3360. return false;
  3361. }
  3362. if (!src->AsRegOpnd()->m_sym->IsSingleDef())
  3363. {
  3364. return false;
  3365. }
  3366. if (src->AsRegOpnd()->m_sym->IsIntConst())
  3367. {
  3368. int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
  3369. if (intConst >= 0)
  3370. {
  3371. *pShrSrc1 = src;
  3372. return true;
  3373. }
  3374. else
  3375. {
  3376. return false;
  3377. }
  3378. }
  3379. IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
  3380. if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
  3381. {
  3382. return false;
  3383. }
  3384. IR::Opnd *shrCnt = shrUInstr->GetSrc2();
  3385. if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
  3386. {
  3387. return false;
  3388. }
  3389. IR::Opnd *shrSrc = shrUInstr->GetSrc1();
  3390. *pShrSrc1 = shrSrc;
  3391. return true;
  3392. }
  3393. bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
  3394. {
  3395. IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
  3396. IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
  3397. IR::Opnd *newSrc1;
  3398. IR::Opnd *newSrc2;
  3399. // Look for something like:
  3400. // t1 = ShrU_A x, 0
  3401. // t2 = 10;
  3402. // BrGt t1, t2, L
  3403. //
  3404. // Peep to:
  3405. //
  3406. // t1 = ShrU_A x, 0
  3407. // t2 = 10;
  3408. // t3 = Or_A x, 0
  3409. // BrUnGt x, t3, L
  3410. // ByteCodeUse t1
  3411. //
  3412. // Hopefully dead-store can get rid of the ShrU
  3413. if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
  3414. {
  3415. return false;
  3416. }
  3417. if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
  3418. {
  3419. return false;
  3420. }
  3421. if (!cmpInstr->GetSrc2())
  3422. {
  3423. return false;
  3424. }
  3425. if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
  3426. {
  3427. return false;
  3428. }
  3429. if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
  3430. {
  3431. return false;
  3432. }
  3433. switch(cmpInstr->m_opcode)
  3434. {
  3435. case Js::OpCode::BrEq_A:
  3436. case Js::OpCode::BrNeq_A:
  3437. case Js::OpCode::BrSrEq_A:
  3438. case Js::OpCode::BrSrNeq_A:
  3439. case Js::OpCode::CmEq_A:
  3440. case Js::OpCode::CmNeq_A:
  3441. case Js::OpCode::CmSrEq_A:
  3442. case Js::OpCode::CmSrNeq_A:
  3443. break;
  3444. case Js::OpCode::BrLe_A:
  3445. cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
  3446. break;
  3447. case Js::OpCode::BrLt_A:
  3448. cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
  3449. break;
  3450. case Js::OpCode::BrGe_A:
  3451. cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
  3452. break;
  3453. case Js::OpCode::BrGt_A:
  3454. cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
  3455. break;
  3456. case Js::OpCode::CmLe_A:
  3457. cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
  3458. break;
  3459. case Js::OpCode::CmLt_A:
  3460. cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
  3461. break;
  3462. case Js::OpCode::CmGe_A:
  3463. cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
  3464. break;
  3465. case Js::OpCode::CmGt_A:
  3466. cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
  3467. break;
  3468. default:
  3469. return false;
  3470. }
  3471. IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr);
  3472. cmpInstr->InsertAfter(bytecodeInstr);
  3473. if (cmpSrc1 != newSrc1)
  3474. {
  3475. if (cmpSrc1->IsRegOpnd() && !cmpSrc1->GetIsJITOptimizedReg())
  3476. {
  3477. bytecodeInstr->Set(cmpSrc1);
  3478. }
  3479. IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc1->GetType(), cmpInstr->m_func);
  3480. IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc1, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
  3481. orZero->SetByteCodeOffset(cmpInstr);
  3482. cmpInstr->InsertBefore(orZero);
  3483. cmpInstr->ReplaceSrc1(unsignedSrc);
  3484. if (newSrc1->IsRegOpnd())
  3485. {
  3486. cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  3487. orZero->GetSrc1()->SetIsJITOptimizedReg(true);
  3488. }
  3489. }
  3490. if (cmpSrc2 != newSrc2)
  3491. {
  3492. if (cmpSrc2->IsRegOpnd() && !cmpSrc2->GetIsJITOptimizedReg())
  3493. {
  3494. bytecodeInstr->Set(cmpSrc2);
  3495. }
  3496. IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc2->GetType(), cmpInstr->m_func);
  3497. IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc2, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
  3498. orZero->SetByteCodeOffset(cmpInstr);
  3499. cmpInstr->InsertBefore(orZero);
  3500. cmpInstr->ReplaceSrc2(unsignedSrc);
  3501. if (newSrc2->IsRegOpnd())
  3502. {
  3503. cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  3504. orZero->GetSrc1()->SetIsJITOptimizedReg(true);
  3505. }
  3506. }
  3507. return true;
  3508. }
  3509. #if DBG
  3510. void
  3511. FlowGraph::VerifyLoopGraph()
  3512. {
  3513. FOREACH_BLOCK(block, this)
  3514. {
  3515. Loop *loop = block->loop;
  3516. FOREACH_SUCCESSOR_BLOCK(succ, block)
  3517. {
  3518. if (loop == succ->loop)
  3519. {
  3520. Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
  3521. continue;
  3522. }
  3523. if (succ->isLoopHeader)
  3524. {
  3525. Assert(succ->loop->parent == loop
  3526. || (!loop->IsDescendentOrSelf(succ->loop)));
  3527. continue;
  3528. }
  3529. Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
  3530. } NEXT_SUCCESSOR_BLOCK;
  3531. if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
  3532. {
  3533. // Make sure all break blocks have been removed.
  3534. if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTry()))
  3535. {
  3536. Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
  3537. }
  3538. }
  3539. } NEXT_BLOCK;
  3540. }
  3541. #endif
  3542. #if DBG_DUMP
  3543. void
  3544. FlowGraph::Dump(bool onlyOnVerboseMode, const char16 *form)
  3545. {
  3546. if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  3547. {
  3548. if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
  3549. {
  3550. if (form)
  3551. {
  3552. Output::Print(form);
  3553. }
  3554. this->Dump();
  3555. }
  3556. }
  3557. }
  3558. void
  3559. FlowGraph::Dump()
  3560. {
  3561. Output::Print(_u("\nFlowGraph\n"));
  3562. FOREACH_BLOCK(block, this)
  3563. {
  3564. Loop * loop = block->loop;
  3565. while (loop)
  3566. {
  3567. Output::Print(_u(" "));
  3568. loop = loop->parent;
  3569. }
  3570. block->DumpHeader(false);
  3571. } NEXT_BLOCK;
  3572. Output::Print(_u("\nLoopGraph\n"));
  3573. for (Loop *loop = this->loopList; loop; loop = loop->next)
  3574. {
  3575. Output::Print(_u("\nLoop\n"));
  3576. FOREACH_BLOCK_IN_LOOP(block, loop)
  3577. {
  3578. block->DumpHeader(false);
  3579. }NEXT_BLOCK_IN_LOOP;
  3580. Output::Print(_u("Loop Ends\n"));
  3581. }
  3582. }
  3583. void
  3584. BasicBlock::DumpHeader(bool insertCR)
  3585. {
  3586. if (insertCR)
  3587. {
  3588. Output::Print(_u("\n"));
  3589. }
  3590. Output::Print(_u("BLOCK %d:"), this->number);
  3591. if (this->isDead)
  3592. {
  3593. Output::Print(_u(" **** DEAD ****"));
  3594. }
  3595. if (this->isBreakBlock)
  3596. {
  3597. Output::Print(_u(" **** Break Block ****"));
  3598. }
  3599. else if (this->isAirLockBlock)
  3600. {
  3601. Output::Print(_u(" **** Air lock Block ****"));
  3602. }
  3603. else if (this->isBreakCompensationBlockAtSource)
  3604. {
  3605. Output::Print(_u(" **** Break Source Compensation Code ****"));
  3606. }
  3607. else if (this->isBreakCompensationBlockAtSink)
  3608. {
  3609. Output::Print(_u(" **** Break Sink Compensation Code ****"));
  3610. }
  3611. else if (this->isAirLockCompensationBlock)
  3612. {
  3613. Output::Print(_u(" **** Airlock block Compensation Code ****"));
  3614. }
  3615. if (!this->predList.Empty())
  3616. {
  3617. BOOL fFirst = TRUE;
  3618. Output::Print(_u(" In("));
  3619. FOREACH_PREDECESSOR_BLOCK(blockPred, this)
  3620. {
  3621. if (!fFirst)
  3622. {
  3623. Output::Print(_u(", "));
  3624. }
  3625. Output::Print(_u("%d"), blockPred->GetBlockNum());
  3626. fFirst = FALSE;
  3627. }
  3628. NEXT_PREDECESSOR_BLOCK;
  3629. Output::Print(_u(")"));
  3630. }
  3631. if (!this->succList.Empty())
  3632. {
  3633. BOOL fFirst = TRUE;
  3634. Output::Print(_u(" Out("));
  3635. FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
  3636. {
  3637. if (!fFirst)
  3638. {
  3639. Output::Print(_u(", "));
  3640. }
  3641. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  3642. fFirst = FALSE;
  3643. }
  3644. NEXT_SUCCESSOR_BLOCK;
  3645. Output::Print(_u(")"));
  3646. }
  3647. if (!this->deadPredList.Empty())
  3648. {
  3649. BOOL fFirst = TRUE;
  3650. Output::Print(_u(" DeadIn("));
  3651. FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
  3652. {
  3653. if (!fFirst)
  3654. {
  3655. Output::Print(_u(", "));
  3656. }
  3657. Output::Print(_u("%d"), blockPred->GetBlockNum());
  3658. fFirst = FALSE;
  3659. }
  3660. NEXT_DEAD_PREDECESSOR_BLOCK;
  3661. Output::Print(_u(")"));
  3662. }
  3663. if (!this->deadSuccList.Empty())
  3664. {
  3665. BOOL fFirst = TRUE;
  3666. Output::Print(_u(" DeadOut("));
  3667. FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
  3668. {
  3669. if (!fFirst)
  3670. {
  3671. Output::Print(_u(", "));
  3672. }
  3673. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  3674. fFirst = FALSE;
  3675. }
  3676. NEXT_DEAD_SUCCESSOR_BLOCK;
  3677. Output::Print(_u(")"));
  3678. }
  3679. if (this->loop)
  3680. {
  3681. Output::Print(_u(" Loop(%d) header: %d"), this->loop->loopNumber, this->loop->GetHeadBlock()->GetBlockNum());
  3682. if (this->loop->parent)
  3683. {
  3684. Output::Print(_u(" parent(%d): %d"), this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
  3685. }
  3686. if (this->loop->GetHeadBlock() == this)
  3687. {
  3688. Output::SkipToColumn(50);
  3689. Output::Print(_u("Call Exp/Imp: "));
  3690. if (this->loop->GetHasCall())
  3691. {
  3692. Output::Print(_u("yes/"));
  3693. }
  3694. else
  3695. {
  3696. Output::Print(_u(" no/"));
  3697. }
  3698. Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
  3699. }
  3700. }
  3701. Output::Print(_u("\n"));
  3702. if (insertCR)
  3703. {
  3704. Output::Print(_u("\n"));
  3705. }
  3706. }
  3707. void
  3708. BasicBlock::Dump()
  3709. {
  3710. // Dumping the first instruction (label) will dump the block header as well.
  3711. FOREACH_INSTR_IN_BLOCK(instr, this)
  3712. {
  3713. instr->Dump();
  3714. }
  3715. NEXT_INSTR_IN_BLOCK;
  3716. }
  3717. void
  3718. AddPropertyCacheBucket::Dump() const
  3719. {
  3720. Assert(this->initialType != nullptr);
  3721. Assert(this->finalType != nullptr);
  3722. Output::Print(_u(" initial type: 0x%x, final type: 0x%x "), this->initialType->GetAddr(), this->finalType->GetAddr());
  3723. }
  3724. void
  3725. ObjTypeGuardBucket::Dump() const
  3726. {
  3727. Assert(this->guardedPropertyOps != nullptr);
  3728. this->guardedPropertyOps->Dump();
  3729. }
  3730. void
  3731. ObjWriteGuardBucket::Dump() const
  3732. {
  3733. Assert(this->writeGuards != nullptr);
  3734. this->writeGuards->Dump();
  3735. }
  3736. #endif
  3737. void
  3738. BasicBlock::CleanUpValueMaps()
  3739. {
  3740. // Don't do cleanup if it's been done recently.
  3741. // Landing pad could get optimized twice...
  3742. // We want the same info out the first and second time. So always cleanup.
  3743. // Increasing the cleanup threshold count for asmjs to 500
  3744. uint cleanupCount = (!this->globOptData.globOpt->GetIsAsmJSFunc()) ? CONFIG_FLAG(GoptCleanupThreshold) : CONFIG_FLAG(AsmGoptCleanupThreshold);
  3745. if (!this->IsLandingPad() && this->globOptData.globOpt->instrCountSinceLastCleanUp < cleanupCount)
  3746. {
  3747. return;
  3748. }
  3749. this->globOptData.globOpt->instrCountSinceLastCleanUp = 0;
  3750. JitArenaAllocator* tempAlloc = this->globOptData.globOpt->tempAlloc;
  3751. GlobHashTable *thisTable = this->globOptData.symToValueMap;
  3752. BVSparse<JitArenaAllocator> deadSymsBv(tempAlloc);
  3753. BVSparse<JitArenaAllocator> keepAliveSymsBv(tempAlloc);
  3754. BVSparse<JitArenaAllocator> availableValueNumbers(tempAlloc);
  3755. availableValueNumbers.Copy(this->globOptData.globOpt->byteCodeConstantValueNumbersBv);
  3756. BVSparse<JitArenaAllocator> *upwardExposedUses = this->upwardExposedUses;
  3757. BVSparse<JitArenaAllocator> *upwardExposedFields = this->upwardExposedFields;
  3758. bool isInLoop = !!this->loop;
  3759. BVSparse<JitArenaAllocator> symsInCallSequence(tempAlloc);
  3760. SListBase<IR::Opnd *> * callSequence = this->globOptData.callSequence;
  3761. if (callSequence && !callSequence->Empty())
  3762. {
  3763. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, callSequence)
  3764. {
  3765. StackSym * sym = opnd->GetStackSym();
  3766. symsInCallSequence.Set(sym->m_id);
  3767. }
  3768. }
  3769. NEXT_SLISTBASE_ENTRY;
  3770. for (uint i = 0; i < thisTable->tableSize; i++)
  3771. {
  3772. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  3773. {
  3774. bool isSymUpwardExposed = upwardExposedUses->Test(bucket.value->m_id) || upwardExposedFields->Test(bucket.value->m_id);
  3775. if (!isSymUpwardExposed && symsInCallSequence.Test(bucket.value->m_id))
  3776. {
  3777. // Don't remove/shrink sym-value pair if the sym is referenced in callSequence even if the sym is dead according to backward data flow.
  3778. // This is possible in some edge cases that an infinite loop is involved when evaluating parameter for a function (between StartCall and Call),
  3779. // there is no backward data flow into the infinite loop block, but non empty callSequence still populates to it in this (forward) pass
  3780. // which causes error when looking up value for the syms in callSequence (cannot find the value).
  3781. // It would cause error to fill out the bailout information for the loop blocks.
  3782. // Remove dead syms from callSequence has some risk because there are various associated counters which need to be consistent.
  3783. continue;
  3784. }
  3785. // Make sure symbol was created before backward pass.
  3786. // If symbols isn't upward exposed, mark it as dead.
  3787. // If a symbol was copy-prop'd in a loop prepass, the upwardExposedUses info could be wrong. So wait until we are out of the loop before clearing it.
  3788. if ((SymID)bucket.value->m_id <= this->globOptData.globOpt->maxInitialSymID && !isSymUpwardExposed
  3789. && (!isInLoop || !this->globOptData.globOpt->prePassCopyPropSym->Test(bucket.value->m_id)))
  3790. {
  3791. Value *val = bucket.element;
  3792. ValueInfo *valueInfo = val->GetValueInfo();
  3793. Sym * sym = bucket.value;
  3794. Sym *symStore = valueInfo->GetSymStore();
  3795. if (symStore && symStore == bucket.value)
  3796. {
  3797. // Keep constants around, as we don't know if there will be further uses
  3798. if (!bucket.element->GetValueInfo()->IsVarConstant() && !bucket.element->GetValueInfo()->HasIntConstantValue())
  3799. {
  3800. // Symbol may still be a copy-prop candidate. Wait before deleting it.
  3801. deadSymsBv.Set(bucket.value->m_id);
  3802. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  3803. // created in backward pass and so their symIds > maxInitialSymID.
  3804. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  3805. {
  3806. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  3807. }
  3808. }
  3809. availableValueNumbers.Set(val->GetValueNumber());
  3810. }
  3811. else
  3812. {
  3813. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  3814. // created in backward pass and so their symIds > maxInitialSymID. Perhaps we could remove
  3815. // it explicitly here, but would it work alright with the iterator?
  3816. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  3817. {
  3818. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  3819. }
  3820. // Not a copy-prop candidate; delete it right away.
  3821. iter.RemoveCurrent(thisTable->alloc);
  3822. this->globOptData.liveInt32Syms->Clear(sym->m_id);
  3823. this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  3824. this->globOptData.liveFloat64Syms->Clear(sym->m_id);
  3825. }
  3826. }
  3827. else
  3828. {
  3829. Sym * sym = bucket.value;
  3830. if (sym->IsPropertySym() && !this->globOptData.liveFields->Test(sym->m_id))
  3831. {
  3832. // Remove propertySyms which are not live anymore.
  3833. iter.RemoveCurrent(thisTable->alloc);
  3834. this->globOptData.liveInt32Syms->Clear(sym->m_id);
  3835. this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  3836. this->globOptData.liveFloat64Syms->Clear(sym->m_id);
  3837. }
  3838. else
  3839. {
  3840. // Look at the copy-prop candidate. We don't want to get rid of the data for a symbol which is
  3841. // a copy-prop candidate.
  3842. Value *val = bucket.element;
  3843. ValueInfo *valueInfo = val->GetValueInfo();
  3844. Sym *symStore = valueInfo->GetSymStore();
  3845. if (symStore && symStore != bucket.value)
  3846. {
  3847. keepAliveSymsBv.Set(symStore->m_id);
  3848. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  3849. {
  3850. keepAliveSymsBv.Set(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  3851. }
  3852. }
  3853. availableValueNumbers.Set(val->GetValueNumber());
  3854. }
  3855. }
  3856. } NEXT_SLISTBASE_ENTRY_EDITING;
  3857. }
  3858. deadSymsBv.Minus(&keepAliveSymsBv);
  3859. // Now cleanup exprToValueMap table
  3860. ExprHashTable *thisExprTable = this->globOptData.exprToValueMap;
  3861. bool oldHasCSECandidatesValue = this->globOptData.hasCSECandidates; // Could be false if none need bailout.
  3862. this->globOptData.hasCSECandidates = false;
  3863. for (uint i = 0; i < thisExprTable->tableSize; i++)
  3864. {
  3865. FOREACH_SLISTBASE_ENTRY_EDITING(ExprHashBucket, bucket, &thisExprTable->table[i], iter)
  3866. {
  3867. ExprHash hash = bucket.value;
  3868. ValueNumber src1ValNum = hash.GetSrc1ValueNumber();
  3869. ValueNumber src2ValNum = hash.GetSrc2ValueNumber();
  3870. // If src1Val or src2Val are not available anymore, no point keeping this CSE candidate
  3871. bool removeCurrent = false;
  3872. if ((src1ValNum && !availableValueNumbers.Test(src1ValNum))
  3873. || (src2ValNum && !availableValueNumbers.Test(src2ValNum)))
  3874. {
  3875. removeCurrent = true;
  3876. }
  3877. else
  3878. {
  3879. // If we are keeping this value, make sure we also keep the symStore in the value table
  3880. removeCurrent = true; // Remove by default, unless it's set to false later below.
  3881. Value *val = bucket.element;
  3882. if (val)
  3883. {
  3884. Sym *symStore = val->GetValueInfo()->GetSymStore();
  3885. if (symStore)
  3886. {
  3887. Value *symStoreVal = this->globOptData.FindValue(symStore);
  3888. if (symStoreVal && symStoreVal->GetValueNumber() == val->GetValueNumber())
  3889. {
  3890. removeCurrent = false;
  3891. deadSymsBv.Clear(symStore->m_id);
  3892. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  3893. {
  3894. deadSymsBv.Clear(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  3895. }
  3896. }
  3897. }
  3898. }
  3899. }
  3900. if(removeCurrent)
  3901. {
  3902. iter.RemoveCurrent(thisExprTable->alloc);
  3903. }
  3904. else
  3905. {
  3906. this->globOptData.hasCSECandidates = oldHasCSECandidatesValue;
  3907. }
  3908. } NEXT_SLISTBASE_ENTRY_EDITING;
  3909. }
  3910. FOREACH_BITSET_IN_SPARSEBV(dead_id, &deadSymsBv)
  3911. {
  3912. thisTable->Clear(dead_id);
  3913. }
  3914. NEXT_BITSET_IN_SPARSEBV;
  3915. if (!deadSymsBv.IsEmpty())
  3916. {
  3917. if (this->func->IsJitInDebugMode())
  3918. {
  3919. // Do not remove non-temp local vars from liveVarSyms (i.e. do not let them become dead).
  3920. // We will need to restore all initialized/used so far non-temp local during bail out.
  3921. // (See BackwardPass::ProcessBailOutInfo)
  3922. Assert(this->func->m_nonTempLocalVars);
  3923. BVSparse<JitArenaAllocator> tempBv(tempAlloc);
  3924. tempBv.Minus(&deadSymsBv, this->func->m_nonTempLocalVars);
  3925. this->globOptData.liveVarSyms->Minus(&tempBv);
  3926. #if DBG
  3927. tempBv.And(this->globOptData.liveInt32Syms, this->func->m_nonTempLocalVars);
  3928. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveInt32Syms?");
  3929. tempBv.And(this->globOptData.liveLossyInt32Syms, this->func->m_nonTempLocalVars);
  3930. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveLossyInt32Syms?");
  3931. tempBv.And(this->globOptData.liveFloat64Syms, this->func->m_nonTempLocalVars);
  3932. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveFloat64Syms?");
  3933. #endif
  3934. }
  3935. else
  3936. {
  3937. this->globOptData.liveVarSyms->Minus(&deadSymsBv);
  3938. }
  3939. this->globOptData.liveInt32Syms->Minus(&deadSymsBv);
  3940. this->globOptData.liveLossyInt32Syms->Minus(&deadSymsBv);
  3941. this->globOptData.liveFloat64Syms->Minus(&deadSymsBv);
  3942. }
  3943. JitAdelete(this->globOptData.globOpt->alloc, upwardExposedUses);
  3944. this->upwardExposedUses = nullptr;
  3945. JitAdelete(this->globOptData.globOpt->alloc, upwardExposedFields);
  3946. this->upwardExposedFields = nullptr;
  3947. if (this->cloneStrCandidates)
  3948. {
  3949. JitAdelete(this->globOptData.globOpt->alloc, this->cloneStrCandidates);
  3950. this->cloneStrCandidates = nullptr;
  3951. }
  3952. }
  3953. void
  3954. BasicBlock::MergePredBlocksValueMaps(GlobOpt* globOpt)
  3955. {
  3956. Assert(!globOpt->isCallHelper);
  3957. // We keep a local temporary copy for the merge
  3958. GlobOptBlockData blockData(this->globOptData.curFunc);
  3959. if (!globOpt->isRecursiveCallOnLandingPad)
  3960. {
  3961. blockData.NullOutBlockData(globOpt, globOpt->func);
  3962. }
  3963. else
  3964. {
  3965. // If we are going over the landing pad again after field PRE, just start again
  3966. // with the value table where we left off.
  3967. blockData.CopyBlockData(&this->globOptData);
  3968. return;
  3969. }
  3970. BVSparse<JitArenaAllocator> symsRequiringCompensation(globOpt->tempAlloc);
  3971. {
  3972. BVSparse<JitArenaAllocator> symsCreatedForMerge(globOpt->tempAlloc);
  3973. bool forceTypeSpecOnLoopHeader = true;
  3974. FOREACH_PREDECESSOR_BLOCK(pred, this)
  3975. {
  3976. if (pred->globOptData.callSequence && pred->globOptData.callSequence->Empty())
  3977. {
  3978. JitAdelete(globOpt->alloc, pred->globOptData.callSequence);
  3979. pred->globOptData.callSequence = nullptr;
  3980. }
  3981. if (this->isLoopHeader && globOpt->IsLoopPrePass() && globOpt->prePassLoop == this->loop && this->loop->IsDescendentOrSelf(pred->loop))
  3982. {
  3983. // Loop back-edge.
  3984. // First pass on loop runs optimistically, without doing transforms.
  3985. // Skip this edge for now.
  3986. continue;
  3987. }
  3988. PathDependentInfo *const pathDependentInfo = __edge->GetPathDependentInfo();
  3989. PathDependentInfoToRestore pathDependentInfoToRestore;
  3990. if (pathDependentInfo)
  3991. {
  3992. pathDependentInfoToRestore = globOpt->UpdatePathDependentInfo(pathDependentInfo);
  3993. }
  3994. Assert(pred->GetDataUseCount());
  3995. // First pred?
  3996. if (blockData.symToValueMap == nullptr)
  3997. {
  3998. // Only one edge?
  3999. if (pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr)
  4000. {
  4001. blockData.ReuseBlockData(&pred->globOptData);
  4002. // Don't need to restore the old value info
  4003. pathDependentInfoToRestore.Clear();
  4004. }
  4005. else
  4006. {
  4007. blockData.CloneBlockData(this, pred);
  4008. }
  4009. }
  4010. else
  4011. {
  4012. const bool isLoopPrePass = globOpt->IsLoopPrePass();
  4013. blockData.MergeBlockData(
  4014. this,
  4015. pred,
  4016. isLoopPrePass ? nullptr : &symsRequiringCompensation,
  4017. isLoopPrePass ? nullptr : &symsCreatedForMerge,
  4018. forceTypeSpecOnLoopHeader);
  4019. forceTypeSpecOnLoopHeader = false; // can force type-spec on the loop header only for the first back edge.
  4020. }
  4021. // Restore the value for the next edge
  4022. if (pathDependentInfo)
  4023. {
  4024. globOpt->RestorePathDependentInfo(pathDependentInfo, pathDependentInfoToRestore);
  4025. __edge->ClearPathDependentInfo(globOpt->alloc);
  4026. }
  4027. } NEXT_PREDECESSOR_BLOCK;
  4028. }
  4029. // Consider: We can recreate values for hoisted field so it can copy prop out of the loop
  4030. if (blockData.symToValueMap == nullptr)
  4031. {
  4032. Assert(blockData.hoistableFields == nullptr);
  4033. blockData.InitBlockData(globOpt, globOpt->func);
  4034. }
  4035. else if (blockData.hoistableFields)
  4036. {
  4037. Assert(globOpt->TrackHoistableFields());
  4038. blockData.hoistableFields->And(this->globOptData.liveFields);
  4039. }
  4040. if (!globOpt->DoObjTypeSpec())
  4041. {
  4042. // Object type specialization is off, but if copy prop is on (e.g., /force:fieldhoist) we're not clearing liveFields,
  4043. // so we may be letting type syms slip through this block.
  4044. globOpt->KillAllObjectTypes(this->globOptData.liveFields);
  4045. }
  4046. this->globOptData.CopyBlockData(&blockData);
  4047. if (globOpt->IsLoopPrePass())
  4048. {
  4049. Assert(this->loop);
  4050. if(globOpt->DoBoundCheckHoist())
  4051. {
  4052. globOpt->SetInductionVariableValueNumbers(&blockData);
  4053. }
  4054. if (this->isLoopHeader && globOpt->rootLoopPrePass == this->loop)
  4055. {
  4056. // Capture bail out info in case we have optimization that needs it
  4057. Assert(this->loop->bailOutInfo == nullptr);
  4058. IR::Instr * firstInstr = this->GetFirstInstr();
  4059. this->loop->bailOutInfo = JitAnew(globOpt->func->m_alloc, BailOutInfo,
  4060. firstInstr->GetByteCodeOffset(), firstInstr->m_func);
  4061. globOpt->FillBailOutInfo(this, this->loop->bailOutInfo);
  4062. #if ENABLE_DEBUG_CONFIG_OPTIONS
  4063. this->loop->bailOutInfo->bailOutOpcode = Js::OpCode::LoopBodyStart;
  4064. #endif
  4065. }
  4066. // If loop pre-pass, don't insert convert from type-spec to var
  4067. return;
  4068. }
  4069. this->CleanUpValueMaps();
  4070. Sym *symIV = nullptr;
  4071. // Clean up the syms requiring compensation by checking the final value in the merged block to see if the sym still requires
  4072. // compensation. All the while, create a mapping from sym to value info in the merged block. This dictionary helps avoid a
  4073. // value lookup in the merged block per predecessor.
  4074. SymToValueInfoMap symsRequiringCompensationToMergedValueInfoMap(globOpt->tempAlloc);
  4075. if(!symsRequiringCompensation.IsEmpty())
  4076. {
  4077. const SymTable *const symTable = func->m_symTable;
  4078. FOREACH_BITSET_IN_SPARSEBV(id, &symsRequiringCompensation)
  4079. {
  4080. Sym *const sym = symTable->Find(id);
  4081. Assert(sym);
  4082. Value *const value = blockData.FindValue(sym);
  4083. if(!value)
  4084. {
  4085. continue;
  4086. }
  4087. ValueInfo *const valueInfo = value->GetValueInfo();
  4088. if(!valueInfo->IsArrayValueInfo())
  4089. {
  4090. continue;
  4091. }
  4092. // At least one new sym was created while merging and associated with the merged value info, so those syms will
  4093. // require compensation in predecessors. For now, the dead store phase is relied upon to remove compensation that is
  4094. // dead due to no further uses of the new sym.
  4095. symsRequiringCompensationToMergedValueInfoMap.Add(sym, valueInfo);
  4096. } NEXT_BITSET_IN_SPARSEBV;
  4097. symsRequiringCompensation.ClearAll();
  4098. }
  4099. if (this->isLoopHeader)
  4100. {
  4101. BVSparse<JitArenaAllocator> * tempBv = globOpt->tempBv;
  4102. // Values on the back-edge in the prepass may be conservative for syms defined in the loop, and type specialization in
  4103. // the prepass is not reflective of the value, but rather, is used to determine whether the sym should be specialized
  4104. // around the loop. Additionally, some syms that are used before defined in the loop may be specialized in the loop
  4105. // header despite not being specialized in the landing pad. Now that the type specialization bit-vectors are merged,
  4106. // specialize the corresponding value infos in the loop header too.
  4107. Assert(tempBv->IsEmpty());
  4108. Loop *const loop = this->loop;
  4109. SymTable *const symTable = func->m_symTable;
  4110. JitArenaAllocator *const alloc = globOpt->alloc;
  4111. // Int-specialized syms
  4112. tempBv->Or(loop->likelyIntSymsUsedBeforeDefined, loop->symsDefInLoop);
  4113. tempBv->And(blockData.liveInt32Syms);
  4114. tempBv->Minus(blockData.liveLossyInt32Syms);
  4115. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4116. {
  4117. StackSym *const varSym = symTable->FindStackSym(id);
  4118. Assert(varSym);
  4119. Value *const value = blockData.FindValue(varSym);
  4120. Assert(value);
  4121. ValueInfo *const valueInfo = value->GetValueInfo();
  4122. if(!valueInfo->IsInt())
  4123. {
  4124. globOpt->ChangeValueInfo(nullptr, value, valueInfo->SpecializeToInt32(alloc));
  4125. }
  4126. } NEXT_BITSET_IN_SPARSEBV;
  4127. // Float-specialized syms
  4128. tempBv->Or(loop->likelyNumberSymsUsedBeforeDefined, loop->symsDefInLoop);
  4129. tempBv->Or(loop->forceFloat64SymsOnEntry);
  4130. tempBv->And(blockData.liveFloat64Syms);
  4131. GlobOptBlockData &landingPadBlockData = loop->landingPad->globOptData;
  4132. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4133. {
  4134. StackSym *const varSym = symTable->FindStackSym(id);
  4135. Assert(varSym);
  4136. // If the type-spec sym is null or if the sym is not float-specialized in the loop landing pad, the sym may have
  4137. // been merged to float on a loop back-edge when it was live as float on the back-edge, and live as int in the loop
  4138. // header. In this case, compensation inserted in the loop landing pad will use BailOutNumberOnly, and so it is
  4139. // guaranteed that the value will be float. Otherwise, if the type-spec sym exists, its field can be checked to see
  4140. // if it's prevented from being anything but a number.
  4141. StackSym *const typeSpecSym = varSym->GetFloat64EquivSym(nullptr);
  4142. if(!typeSpecSym ||
  4143. typeSpecSym->m_requiresBailOnNotNumber ||
  4144. !landingPadBlockData.IsFloat64TypeSpecialized(varSym))
  4145. {
  4146. Value *const value = blockData.FindValue(varSym);
  4147. if(value)
  4148. {
  4149. ValueInfo *const valueInfo = value->GetValueInfo();
  4150. if(!valueInfo->IsNumber())
  4151. {
  4152. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToFloat64(alloc));
  4153. }
  4154. }
  4155. else
  4156. {
  4157. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::Float), varSym);
  4158. }
  4159. }
  4160. } NEXT_BITSET_IN_SPARSEBV;
  4161. #ifdef ENABLE_SIMDJS
  4162. // SIMD_JS
  4163. // Simd128 type-spec syms
  4164. BVSparse<JitArenaAllocator> tempBv2(globOpt->tempAlloc);
  4165. // For syms we made alive in loop header because of hoisting, use-before-def, or def in Loop body, set their valueInfo to definite.
  4166. // Make live on header AND in one of forceSimd128* or likelySimd128* vectors.
  4167. tempBv->Or(loop->likelySimd128F4SymsUsedBeforeDefined, loop->symsDefInLoop);
  4168. tempBv->Or(loop->likelySimd128I4SymsUsedBeforeDefined);
  4169. tempBv->Or(loop->forceSimd128F4SymsOnEntry);
  4170. tempBv->Or(loop->forceSimd128I4SymsOnEntry);
  4171. tempBv2.Or(blockData.liveSimd128F4Syms, blockData.liveSimd128I4Syms);
  4172. tempBv->And(&tempBv2);
  4173. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4174. {
  4175. StackSym * typeSpecSym = nullptr;
  4176. StackSym *const varSym = symTable->FindStackSym(id);
  4177. Assert(varSym);
  4178. if (blockData.liveSimd128F4Syms->Test(id))
  4179. {
  4180. typeSpecSym = varSym->GetSimd128F4EquivSym(nullptr);
  4181. if (!typeSpecSym || !landingPadBlockData.IsSimd128F4TypeSpecialized(varSym))
  4182. {
  4183. Value *const value = blockData.FindValue(varSym);
  4184. if (value)
  4185. {
  4186. ValueInfo *const valueInfo = value->GetValueInfo();
  4187. if (!valueInfo->IsSimd128Float32x4())
  4188. {
  4189. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToSimd128F4(alloc));
  4190. }
  4191. }
  4192. else
  4193. {
  4194. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Float32x4), varSym), varSym);
  4195. }
  4196. }
  4197. }
  4198. else if (blockData.liveSimd128I4Syms->Test(id))
  4199. {
  4200. typeSpecSym = varSym->GetSimd128I4EquivSym(nullptr);
  4201. if (!typeSpecSym || !landingPadBlockData.IsSimd128I4TypeSpecialized(varSym))
  4202. {
  4203. Value *const value = blockData.FindValue(varSym);
  4204. if (value)
  4205. {
  4206. ValueInfo *const valueInfo = value->GetValueInfo();
  4207. if (!valueInfo->IsSimd128Int32x4())
  4208. {
  4209. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToSimd128I4(alloc));
  4210. }
  4211. }
  4212. else
  4213. {
  4214. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Int32x4), varSym), varSym);
  4215. }
  4216. }
  4217. }
  4218. else
  4219. {
  4220. Assert(UNREACHED);
  4221. }
  4222. } NEXT_BITSET_IN_SPARSEBV;
  4223. #endif
  4224. tempBv->ClearAll();
  4225. }
  4226. // We need to handle the case where a symbol is type-spec'd coming from some predecessors,
  4227. // but not from others.
  4228. //
  4229. // We can do this by inserting the right conversion in the predecessor block, but we
  4230. // can only do this if we are the first successor of that block, since the previous successors
  4231. // would have already been processed. Instead, we'll need to break the edge and insert a block
  4232. // (airlock block) to put in the conversion code.
  4233. Assert(globOpt->tempBv->IsEmpty());
  4234. BVSparse<JitArenaAllocator> tempBv2(globOpt->tempAlloc);
  4235. BVSparse<JitArenaAllocator> tempBv3(globOpt->tempAlloc);
  4236. BVSparse<JitArenaAllocator> tempBv4(globOpt->tempAlloc);
  4237. #ifdef ENABLE_SIMDJS
  4238. // SIMD_JS
  4239. BVSparse<JitArenaAllocator> simd128F4SymsToUnbox(globOpt->tempAlloc);
  4240. BVSparse<JitArenaAllocator> simd128I4SymsToUnbox(globOpt->tempAlloc);
  4241. #endif
  4242. FOREACH_PREDECESSOR_EDGE_EDITING(edge, this, iter)
  4243. {
  4244. BasicBlock *pred = edge->GetPred();
  4245. if (pred->loop && pred->loop->GetHeadBlock() == this)
  4246. {
  4247. pred->DecrementDataUseCount();
  4248. // Skip loop back-edges. We will handle these when we get to the exit blocks.
  4249. continue;
  4250. }
  4251. BasicBlock *orgPred = nullptr;
  4252. if (pred->isAirLockCompensationBlock)
  4253. {
  4254. Assert(pred->GetPredList()->HasOne());
  4255. orgPred = pred;
  4256. pred = (pred->GetPredList()->Head())->GetPred();
  4257. }
  4258. // Lossy int in the merged block, and no int in the predecessor - need a lossy conversion to int
  4259. tempBv2.Minus(blockData.liveLossyInt32Syms, pred->globOptData.liveInt32Syms);
  4260. // Lossless int in the merged block, and no lossless int in the predecessor - need a lossless conversion to int
  4261. tempBv3.Minus(blockData.liveInt32Syms, this->globOptData.liveLossyInt32Syms);
  4262. globOpt->tempBv->Minus(pred->globOptData.liveInt32Syms, pred->globOptData.liveLossyInt32Syms);
  4263. tempBv3.Minus(globOpt->tempBv);
  4264. globOpt->tempBv->Minus(blockData.liveVarSyms, pred->globOptData.liveVarSyms);
  4265. tempBv4.Minus(blockData.liveFloat64Syms, pred->globOptData.liveFloat64Syms);
  4266. bool symIVNeedsSpecializing = (symIV && !pred->globOptData.liveInt32Syms->Test(symIV->m_id) && !tempBv3.Test(symIV->m_id));
  4267. #ifdef ENABLE_SIMDJS
  4268. // SIMD_JS
  4269. simd128F4SymsToUnbox.Minus(blockData.liveSimd128F4Syms, pred->globOptData.liveSimd128F4Syms);
  4270. simd128I4SymsToUnbox.Minus(blockData.liveSimd128I4Syms, pred->globOptData.liveSimd128I4Syms);
  4271. #endif
  4272. if (!globOpt->tempBv->IsEmpty() ||
  4273. !tempBv2.IsEmpty() ||
  4274. !tempBv3.IsEmpty() ||
  4275. !tempBv4.IsEmpty() ||
  4276. #ifdef ENABLE_SIMDJS
  4277. !simd128F4SymsToUnbox.IsEmpty() ||
  4278. !simd128I4SymsToUnbox.IsEmpty() ||
  4279. #endif
  4280. symIVNeedsSpecializing ||
  4281. symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  4282. {
  4283. // We can't un-specialize a symbol in a predecessor if we've already processed
  4284. // a successor of that block. Instead, insert a new block on the flow edge
  4285. // (an airlock block) and do the un-specialization there.
  4286. //
  4287. // Alternatively, the current block could be an exit block out of this loop, and so the predecessor may exit the
  4288. // loop. In that case, if the predecessor may continue into the loop without exiting, then we need an airlock block
  4289. // to do the appropriate conversions only on the exit path (preferring not to do the conversions inside the loop).
  4290. // If, on the other hand, the predecessor always flows into the current block, then it always exits, so we don't need
  4291. // an airlock block and can just do the conversions in the predecessor.
  4292. if (pred->GetSuccList()->Head()->GetSucc() != this||
  4293. (pred->loop && pred->loop->parent == this->loop && pred->GetSuccList()->Count() > 1))
  4294. {
  4295. BasicBlock *airlockBlock = nullptr;
  4296. if (!orgPred)
  4297. {
  4298. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !globOpt->IsLoopPrePass())
  4299. {
  4300. Output::Print(_u("TRACE: "));
  4301. Output::Print(_u("Inserting airlock block to convert syms to var between block %d and %d\n"),
  4302. pred->GetBlockNum(), this->GetBlockNum());
  4303. Output::Flush();
  4304. }
  4305. airlockBlock = globOpt->func->m_fg->InsertAirlockBlock(edge);
  4306. }
  4307. else
  4308. {
  4309. Assert(orgPred->isAirLockCompensationBlock);
  4310. airlockBlock = orgPred;
  4311. pred->DecrementDataUseCount();
  4312. airlockBlock->isAirLockCompensationBlock = false; // This is airlock block now. So remove the attribute.
  4313. }
  4314. globOpt->CloneBlockData(airlockBlock, pred);
  4315. pred = airlockBlock;
  4316. }
  4317. if (!globOpt->tempBv->IsEmpty())
  4318. {
  4319. globOpt->ToVar(globOpt->tempBv, pred);
  4320. }
  4321. if (!tempBv2.IsEmpty())
  4322. {
  4323. globOpt->ToInt32(&tempBv2, pred, true /* lossy */);
  4324. }
  4325. if (!tempBv3.IsEmpty())
  4326. {
  4327. globOpt->ToInt32(&tempBv3, pred, false /* lossy */);
  4328. }
  4329. if (!tempBv4.IsEmpty())
  4330. {
  4331. globOpt->ToFloat64(&tempBv4, pred);
  4332. }
  4333. if (symIVNeedsSpecializing)
  4334. {
  4335. globOpt->tempBv->ClearAll();
  4336. globOpt->tempBv->Set(symIV->m_id);
  4337. globOpt->ToInt32(globOpt->tempBv, pred, false /* lossy */);
  4338. }
  4339. if(symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  4340. {
  4341. globOpt->InsertValueCompensation(pred, symsRequiringCompensationToMergedValueInfoMap);
  4342. }
  4343. #ifdef ENABLE_SIMDJS
  4344. // SIMD_JS
  4345. if (!simd128F4SymsToUnbox.IsEmpty())
  4346. {
  4347. globOpt->ToTypeSpec(&simd128F4SymsToUnbox, pred, TySimd128F4, IR::BailOutSimd128F4Only);
  4348. }
  4349. if (!simd128I4SymsToUnbox.IsEmpty())
  4350. {
  4351. globOpt->ToTypeSpec(&simd128I4SymsToUnbox, pred, TySimd128I4, IR::BailOutSimd128I4Only);
  4352. }
  4353. #endif
  4354. }
  4355. } NEXT_PREDECESSOR_EDGE_EDITING;
  4356. FOREACH_PREDECESSOR_EDGE(edge, this)
  4357. {
  4358. // Peak Memory optimization:
  4359. // These are in an arena, but putting them on the free list greatly reduces
  4360. // the peak memory used by the global optimizer for complex flow graphs.
  4361. BasicBlock *pred = edge->GetPred();
  4362. if (!this->isLoopHeader || this->loop != pred->loop)
  4363. {
  4364. // Skip airlock compensation block as we are not going to walk this block.
  4365. if (pred->isAirLockCompensationBlock)
  4366. {
  4367. pred->DecrementDataUseCount();
  4368. Assert(pred->GetPredList()->HasOne());
  4369. pred = (pred->GetPredList()->Head())->GetPred();
  4370. }
  4371. if (pred->DecrementDataUseCount() == 0 && (!this->loop || this->loop->landingPad != pred))
  4372. {
  4373. if (!(pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr))
  4374. {
  4375. pred->globOptData.DeleteBlockData();
  4376. }
  4377. else
  4378. {
  4379. pred->globOptData.NullOutBlockData(globOpt, globOpt->func);
  4380. }
  4381. }
  4382. }
  4383. } NEXT_PREDECESSOR_EDGE;
  4384. globOpt->tempBv->ClearAll();
  4385. Assert(!globOpt->IsLoopPrePass()); // We already early return if we are in prepass
  4386. if (this->isLoopHeader)
  4387. {
  4388. Loop *const loop = this->loop;
  4389. // Save values live on loop entry, such that we can adjust the state of the
  4390. // values on the back-edge to match.
  4391. loop->varSymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4392. loop->varSymsOnEntry->Copy(this->globOptData.liveVarSyms);
  4393. loop->int32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4394. loop->int32SymsOnEntry->Copy(this->globOptData.liveInt32Syms);
  4395. loop->lossyInt32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4396. loop->lossyInt32SymsOnEntry->Copy(this->globOptData.liveLossyInt32Syms);
  4397. loop->float64SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4398. loop->float64SymsOnEntry->Copy(this->globOptData.liveFloat64Syms);
  4399. #ifdef ENABLE_SIMDJS
  4400. // SIMD_JS
  4401. loop->simd128F4SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4402. loop->simd128F4SymsOnEntry->Copy(this->globOptData.liveSimd128F4Syms);
  4403. loop->simd128I4SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4404. loop->simd128I4SymsOnEntry->Copy(this->globOptData.liveSimd128I4Syms);
  4405. #endif
  4406. loop->liveFieldsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4407. loop->liveFieldsOnEntry->Copy(this->globOptData.liveFields);
  4408. if(globOpt->DoBoundCheckHoist() && loop->inductionVariables)
  4409. {
  4410. globOpt->FinalizeInductionVariables(loop, &blockData);
  4411. if(globOpt->DoLoopCountBasedBoundCheckHoist())
  4412. {
  4413. globOpt->DetermineDominatingLoopCountableBlock(loop, this);
  4414. }
  4415. }
  4416. }
  4417. else if (!this->loop)
  4418. {
  4419. this->SetDataUseCount(this->GetSuccList()->Count());
  4420. }
  4421. else if(this == this->loop->dominatingLoopCountableBlock)
  4422. {
  4423. globOpt->DetermineLoopCount(this->loop);
  4424. }
  4425. }
  4426. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock)
  4427. {
  4428. toBlock->globOptData.CloneBlockData(toBlock, fromBlock);
  4429. }
  4430. void
  4431. GlobOpt::CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  4432. {
  4433. ValueSet *const valuesToKillOnCalls = JitAnew(this->alloc, ValueSet, this->alloc);
  4434. toData->valuesToKillOnCalls = valuesToKillOnCalls;
  4435. // Values are shared between symbols with the same ValueNumber.
  4436. // Use a dictionary to share the clone values.
  4437. ValueSetByValueNumber *const valuesCreatedForClone = this->valuesCreatedForClone;
  4438. Assert(valuesCreatedForClone);
  4439. Assert(valuesCreatedForClone->Count() == 0);
  4440. DebugOnly(ValueSetByValueNumber originalValues(tempAlloc, 64));
  4441. const uint tableSize = toData->symToValueMap->tableSize;
  4442. SListBase<GlobHashBucket> *const table = toData->symToValueMap->table;
  4443. for (uint i = 0; i < tableSize; i++)
  4444. {
  4445. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &table[i])
  4446. {
  4447. Value *value = bucket.element;
  4448. ValueNumber valueNum = value->GetValueNumber();
  4449. #if DBG
  4450. // Ensure that the set of values in fromData contains only one value per value number. Byte-code constant values
  4451. // are reused in multiple blocks without cloning, so exclude those value numbers.
  4452. {
  4453. Value *const previouslyClonedOriginalValue = originalValues.Lookup(valueNum);
  4454. if (previouslyClonedOriginalValue)
  4455. {
  4456. if (!byteCodeConstantValueNumbersBv->Test(valueNum))
  4457. {
  4458. Assert(value == previouslyClonedOriginalValue);
  4459. }
  4460. }
  4461. else
  4462. {
  4463. originalValues.Add(value);
  4464. }
  4465. }
  4466. #endif
  4467. Value *newValue = valuesCreatedForClone->Lookup(valueNum);
  4468. if (!newValue)
  4469. {
  4470. newValue = CopyValue(value, valueNum);
  4471. TrackMergedValueForKills(newValue, toData, nullptr);
  4472. valuesCreatedForClone->Add(newValue);
  4473. }
  4474. bucket.element = newValue;
  4475. } NEXT_SLISTBASE_ENTRY;
  4476. }
  4477. valuesCreatedForClone->Clear();
  4478. ProcessValueKills(toBlock, toData);
  4479. }
  4480. PRECandidatesList * GlobOpt::FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc)
  4481. {
  4482. // Iterate over the value table looking for propertySyms which are candidates to
  4483. // pre-load in the landing pad for field PRE
  4484. GlobHashTable *valueTable = block->globOptData.symToValueMap;
  4485. Loop *loop = block->loop;
  4486. PRECandidatesList *candidates = nullptr;
  4487. for (uint i = 0; i < valueTable->tableSize; i++)
  4488. {
  4489. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &valueTable->table[i])
  4490. {
  4491. Sym *sym = bucket.value;
  4492. if (!sym->IsPropertySym())
  4493. {
  4494. continue;
  4495. }
  4496. PropertySym *propertySym = sym->AsPropertySym();
  4497. // Field should be live on the back-edge
  4498. if (!block->globOptData.liveFields->Test(propertySym->m_id))
  4499. {
  4500. continue;
  4501. }
  4502. // Field should be live in the landing pad as well
  4503. if (!loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  4504. {
  4505. continue;
  4506. }
  4507. Value *value = bucket.element;
  4508. Sym *symStore = value->GetValueInfo()->GetSymStore();
  4509. if (!symStore || !symStore->IsStackSym())
  4510. {
  4511. continue;
  4512. }
  4513. // Check upwardExposed in case of:
  4514. // s1 = 0;
  4515. // loop:
  4516. // = o.x;
  4517. // foo();
  4518. // o.x = s1;
  4519. // Can't thrash s1 in loop top.
  4520. if (!symStore->AsStackSym()->IsSingleDef() || loop->GetHeadBlock()->upwardExposedUses->Test(symStore->m_id))
  4521. {
  4522. // If symStore isn't singleDef, we need to make sure it still has the same value.
  4523. // This usually fails if we are not aggressive at transferring values in the prepass.
  4524. Value **pSymStoreFromValue = valueTable->Get(symStore->m_id);
  4525. // Consider: We should be fine if symStore isn't live in landing pad...
  4526. if (!pSymStoreFromValue || (*pSymStoreFromValue)->GetValueNumber() != value->GetValueNumber())
  4527. {
  4528. continue;
  4529. }
  4530. }
  4531. BasicBlock *landingPad = loop->landingPad;
  4532. Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
  4533. if (!landingPadValue)
  4534. {
  4535. // Value should be added as initial value or already be there.
  4536. return nullptr;
  4537. }
  4538. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  4539. if (!ldInstr)
  4540. {
  4541. continue;
  4542. }
  4543. if (!candidates)
  4544. {
  4545. candidates = Anew(alloc, PRECandidatesList, alloc);
  4546. }
  4547. candidates->Prepend(&bucket);
  4548. } NEXT_SLISTBASE_ENTRY;
  4549. }
  4550. return candidates;
  4551. }
  4552. void GlobOpt::InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  4553. {
  4554. if (toBlock->isLoopHeader // isLoopBackEdge
  4555. && toBlock->cloneStrCandidates
  4556. && !IsLoopPrePass())
  4557. {
  4558. Loop *loop = toBlock->loop;
  4559. BasicBlock *landingPad = loop->landingPad;
  4560. const SymTable *const symTable = func->m_symTable;
  4561. Assert(tempBv->IsEmpty());
  4562. tempBv->And(toBlock->cloneStrCandidates, fromData->isTempSrc);
  4563. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4564. {
  4565. StackSym *const sym = (StackSym *)symTable->Find(id);
  4566. Assert(sym);
  4567. if (!landingPad->globOptData.liveVarSyms->Test(id)
  4568. || !fromData->liveVarSyms->Test(id))
  4569. {
  4570. continue;
  4571. }
  4572. Value * landingPadValue = landingPad->globOptData.FindValue(sym);
  4573. if (landingPadValue == nullptr)
  4574. {
  4575. continue;
  4576. }
  4577. Value * loopValue = fromData->FindValue(sym);
  4578. if (loopValue == nullptr)
  4579. {
  4580. continue;
  4581. }
  4582. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  4583. ValueInfo *loopValueInfo = loopValue->GetValueInfo();
  4584. if (landingPadValueInfo->IsLikelyString()
  4585. && loopValueInfo->IsLikelyString())
  4586. {
  4587. IR::Instr *cloneStr = IR::Instr::New(Js::OpCode::CloneStr, this->func);
  4588. IR::RegOpnd *opnd = IR::RegOpnd::New(sym, IRType::TyVar, this->func);
  4589. cloneStr->SetDst(opnd);
  4590. cloneStr->SetSrc1(opnd);
  4591. if (loop->bailOutInfo->bailOutInstr)
  4592. {
  4593. loop->bailOutInfo->bailOutInstr->InsertBefore(cloneStr);
  4594. }
  4595. else
  4596. {
  4597. landingPad->InsertAfter(cloneStr);
  4598. }
  4599. toData->isTempSrc->Set(id);
  4600. }
  4601. }
  4602. NEXT_BITSET_IN_SPARSEBV;
  4603. tempBv->ClearAll();
  4604. }
  4605. }