FlowGraph.cpp 176 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. FlowGraph *
  7. FlowGraph::New(Func * func, JitArenaAllocator * alloc)
  8. {
  9. FlowGraph * graph;
  10. graph = JitAnew(alloc, FlowGraph, func, alloc);
  11. return graph;
  12. }
  13. // When there is an early exit within an EH region,
  14. // Leave instructions are inserted in the bytecode to jump up the region tree one by one
  15. // We delete this Leave chain of instructions, and add an edge to Finally
  16. IR::LabelInstr * FlowGraph::DeleteLeaveChainBlocks(IR::BranchInstr *leaveInstr, IR::Instr * &instrPrev)
  17. {
  18. // Cleanup Rest of the Leave chain
  19. IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
  20. Assert(leaveTarget->GetNextRealInstr()->IsBranchInstr());
  21. IR::BranchInstr *leaveChain = leaveTarget->GetNextRealInstr()->AsBranchInstr();
  22. IR::LabelInstr * curLabel = leaveTarget->AsLabelInstr();
  23. while (leaveChain->m_opcode != Js::OpCode::Br)
  24. {
  25. Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
  26. IR::LabelInstr * nextLabel = leaveChain->m_next->AsLabelInstr();
  27. leaveChain = nextLabel->m_next->AsBranchInstr();
  28. BasicBlock *curBlock = curLabel->GetBasicBlock();
  29. this->RemoveBlock(curBlock);
  30. curLabel = nextLabel;
  31. }
  32. instrPrev = leaveChain->m_next;
  33. IR::LabelInstr * exitLabel = leaveChain->GetTarget();
  34. BasicBlock * curBlock = curLabel->GetBasicBlock();
  35. this->RemoveBlock(curBlock);
  36. return exitLabel;
  37. }
  38. bool FlowGraph::Dominates(Region *region1, Region *region2)
  39. {
  40. Assert(region1);
  41. Assert(region2);
  42. Region *startR1 = region1;
  43. Region *startR2 = region2;
  44. int region1Depth = 0;
  45. while (startR1 != nullptr)
  46. {
  47. region1Depth++;
  48. startR1 = startR1->GetParent();
  49. }
  50. int region2Depth = 0;
  51. while (startR2 != nullptr)
  52. {
  53. region2Depth++;
  54. startR2 = startR2->GetParent();
  55. }
  56. return region1Depth > region2Depth;
  57. }
  58. bool FlowGraph::DoesExitLabelDominate(IR::BranchInstr *leaveInstr)
  59. {
  60. IR::LabelInstr * leaveTarget = leaveInstr->GetTarget();
  61. Assert(leaveTarget->GetNextRealInstr()->IsBranchInstr());
  62. IR::BranchInstr *leaveChain = leaveTarget->GetNextRealInstr()->AsBranchInstr();
  63. while (leaveChain->m_opcode != Js::OpCode::Br)
  64. {
  65. Assert(leaveChain->m_opcode == Js::OpCode::Leave || leaveChain->m_opcode == Js::OpCode::BrOnException);
  66. IR::LabelInstr * nextLabel = leaveChain->m_next->AsLabelInstr();
  67. leaveChain = nextLabel->m_next->AsBranchInstr();
  68. }
  69. IR::LabelInstr * exitLabel = leaveChain->GetTarget();
  70. return Dominates(exitLabel->GetRegion(), finallyLabelStack->Top()->GetRegion());
  71. }
  72. bool FlowGraph::CheckIfEarlyExitAndAddEdgeToFinally(IR::BranchInstr *leaveInstr, Region *currentRegion, Region *branchTargetRegion, IR::Instr * &instrPrev, IR::LabelInstr * &exitLabel)
  73. {
  74. if (finallyLabelStack->Empty())
  75. {
  76. return false;
  77. }
  78. if (currentRegion->GetType() == RegionTypeTry)
  79. {
  80. if (currentRegion->GetMatchingCatchRegion() == nullptr)
  81. {
  82. // try of try-finally
  83. bool isEarly =
  84. (branchTargetRegion != currentRegion->GetMatchingFinallyRegion(false) &&
  85. branchTargetRegion != currentRegion->GetMatchingFinallyRegion(true));
  86. if (!isEarly) return false;
  87. if (DoesExitLabelDominate(leaveInstr)) return false;
  88. // Cleanup Rest of the Leave chain
  89. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  90. return true;
  91. }
  92. // try of try-catch
  93. IR::BranchInstr *leaveChain = leaveInstr;
  94. IR::LabelInstr * leaveTarget = leaveChain->GetTarget();
  95. IR::Instr *target = leaveTarget->GetNextRealInstr();
  96. if (target->m_opcode == Js::OpCode::Br)
  97. {
  98. IR::BranchInstr *tryExit = target->AsBranchInstr();
  99. instrPrev = tryExit;
  100. return false;
  101. }
  102. if (DoesExitLabelDominate(leaveInstr)) return false;
  103. // Cleanup Rest of the Leave chain
  104. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  105. return true;
  106. }
  107. if (currentRegion->GetType() == RegionTypeCatch)
  108. {
  109. // We don't care for early exits in catch blocks, because we bailout anyway
  110. return false;
  111. }
  112. Assert(currentRegion->GetType() == RegionTypeFinally);
  113. // All Leave's inside Finally region are early exits
  114. // Since we execute non-excepting Finallys in JIT now, we should convert Leave to Br
  115. if (DoesExitLabelDominate(leaveInstr)) return false;
  116. exitLabel = DeleteLeaveChainBlocks(leaveInstr, instrPrev);
  117. return true;
  118. }
  119. ///----------------------------------------------------------------------------
  120. ///
  121. /// FlowGraph::Build
  122. ///
  123. /// Construct flow graph and loop structures for the current state of the function.
  124. ///
  125. ///----------------------------------------------------------------------------
  126. void
  127. FlowGraph::Build(void)
  128. {
  129. Func * func = this->func;
  130. BEGIN_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  131. this->RunPeeps();
  132. END_CODEGEN_PHASE(func, Js::FGPeepsPhase);
  133. // We don't optimize fully with SimpleJit. But, when JIT loop body is enabled, we do support
  134. // bailing out from a simple jitted function to do a full jit of a loop body in the function
  135. // (BailOnSimpleJitToFullJitLoopBody). For that purpose, we need the flow from try to handler.
  136. if (this->func->HasTry() && (this->func->DoOptimizeTry() ||
  137. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody())))
  138. {
  139. this->catchLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  140. }
  141. if (this->func->HasFinally() && (this->func->DoOptimizeTry() ||
  142. (this->func->IsSimpleJit() && this->func->GetJITFunctionBody()->DoJITLoopBody())))
  143. {
  144. this->finallyLabelStack = JitAnew(this->alloc, SList<IR::LabelInstr*>, this->alloc);
  145. this->regToFinallyEndMap = JitAnew(this->alloc, RegionToFinallyEndMapType, this->alloc, 0);
  146. }
  147. IR::Instr * currLastInstr = nullptr;
  148. BasicBlock * currBlock = nullptr;
  149. BasicBlock * nextBlock = nullptr;
  150. bool hasCall = false;
  151. FOREACH_INSTR_IN_FUNC_BACKWARD_EDITING(instr, instrPrev, func)
  152. {
  153. if (currLastInstr == nullptr || instr->EndsBasicBlock())
  154. {
  155. // Start working on a new block.
  156. // If we're currently processing a block, then wrap it up before beginning a new one.
  157. if (currLastInstr != nullptr)
  158. {
  159. nextBlock = currBlock;
  160. currBlock = this->AddBlock(instr->m_next, currLastInstr, nextBlock);
  161. currBlock->hasCall = hasCall;
  162. hasCall = false;
  163. }
  164. currLastInstr = instr;
  165. }
  166. if (instr->StartsBasicBlock())
  167. {
  168. // Insert a BrOnException after the loop top if we are in a try-catch/try-finally. This is required to
  169. // model flow from the loop to the catch/finally block for loops that don't have a break condition.
  170. if (instr->IsLabelInstr() && instr->AsLabelInstr()->m_isLoopTop && instr->m_next->m_opcode != Js::OpCode::BrOnException)
  171. {
  172. IR::BranchInstr * brOnException = nullptr;
  173. IR::Instr *instrNext = instr->m_next;
  174. if (this->catchLabelStack && !this->catchLabelStack->Empty())
  175. {
  176. brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->catchLabelStack->Top(), instr->m_func);
  177. instr->InsertAfter(brOnException);
  178. }
  179. if (this->finallyLabelStack && !this->finallyLabelStack->Empty())
  180. {
  181. brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, this->finallyLabelStack->Top(), instr->m_func);
  182. instr->InsertAfter(brOnException);
  183. }
  184. if (brOnException)
  185. {
  186. instrPrev = instrNext->m_prev;
  187. continue;
  188. }
  189. }
  190. // Wrap up the current block and get ready to process a new one.
  191. nextBlock = currBlock;
  192. currBlock = this->AddBlock(instr, currLastInstr, nextBlock);
  193. currBlock->hasCall = hasCall;
  194. hasCall = false;
  195. currLastInstr = nullptr;
  196. }
  197. switch (instr->m_opcode)
  198. {
  199. case Js::OpCode::Catch:
  200. Assert(instr->m_prev->IsLabelInstr());
  201. if (this->catchLabelStack)
  202. {
  203. this->catchLabelStack->Push(instr->m_prev->AsLabelInstr());
  204. }
  205. break;
  206. case Js::OpCode::Finally:
  207. {
  208. if (!this->finallyLabelStack)
  209. {
  210. break;
  211. }
  212. //To enable globopt on functions with try finallys we transform the flowgraph as below :
  213. // TryFinally L1
  214. // <try code>
  215. // Leave L2
  216. // L2 : Br L3
  217. // L1 : <finally code>
  218. // LeaveNull
  219. // L3 : <code after try finally>
  220. //
  221. //to:
  222. //
  223. // TryFinally L1
  224. // <try code>
  225. // BrOnException L1
  226. // Leave L1'
  227. // L1 : BailOnException
  228. // L2 : Finally
  229. // <finally code>
  230. // LeaveNull
  231. // L3: <code after try finally>
  232. //We generate 2 flow edges from the try - an exception path and a non exception path.
  233. //This transformation enables us to optimize on the non-excepting finally path
  234. Assert(instr->m_prev->IsLabelInstr());
  235. IR::LabelInstr * finallyLabel = instr->m_prev->AsLabelInstr();
  236. // Find leave label
  237. Assert(finallyLabel->m_prev->m_opcode == Js::OpCode::Br && finallyLabel->m_prev->m_prev->m_opcode == Js::OpCode::Label);
  238. IR::Instr * insertPoint = finallyLabel->m_prev;
  239. IR::LabelInstr * leaveTarget = finallyLabel->m_prev->m_prev->AsLabelInstr();
  240. leaveTarget->Unlink();
  241. finallyLabel->InsertBefore(leaveTarget);
  242. finallyLabel->Unlink();
  243. insertPoint->InsertBefore(finallyLabel);
  244. // Bailout from the opcode following Finally
  245. IR::Instr * bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
  246. insertPoint->InsertBefore(bailOnException);
  247. insertPoint->Remove();
  248. this->finallyLabelStack->Push(finallyLabel);
  249. Assert(leaveTarget->labelRefs.HasOne());
  250. IR::BranchInstr * brOnException = IR::BranchInstr::New(Js::OpCode::BrOnException, finallyLabel, instr->m_func);
  251. leaveTarget->labelRefs.Head()->InsertBefore(brOnException);
  252. instrPrev = instr->m_prev;
  253. }
  254. break;
  255. case Js::OpCode::TryCatch:
  256. if (this->catchLabelStack)
  257. {
  258. AssertOrFailFast(!this->catchLabelStack->Empty());
  259. this->catchLabelStack->Pop();
  260. }
  261. break;
  262. case Js::OpCode::TryFinally:
  263. if (this->finallyLabelStack)
  264. {
  265. AssertOrFailFast(!this->finallyLabelStack->Empty());
  266. this->finallyLabelStack->Pop();
  267. }
  268. break;
  269. case Js::OpCode::CloneBlockScope:
  270. case Js::OpCode::CloneInnerScopeSlots:
  271. // It would be nice to do this in IRBuilder, but doing so gives us
  272. // trouble when doing the DoSlotArrayCheck since it assume single def
  273. // of the sym to do its check properly. So instead we assign the dst
  274. // here in FlowGraph.
  275. instr->SetDst(instr->GetSrc1());
  276. break;
  277. }
  278. if (OpCodeAttr::UseAllFields(instr->m_opcode))
  279. {
  280. // UseAllFields opcode are call instruction or opcode that would call.
  281. hasCall = true;
  282. if (OpCodeAttr::CallInstr(instr->m_opcode))
  283. {
  284. if (!instr->isCallInstrProtectedByNoProfileBailout)
  285. {
  286. instr->m_func->SetHasCallsOnSelfAndParents();
  287. }
  288. // For ARM, ARM64 & X64 (non x86) because of their register calling convention
  289. // the ArgOuts need to be moved next to the call.
  290. #if !defined(_M_IX86)
  291. IR::Instr* argInsertInstr = instr;
  292. instr->IterateArgInstrs([&](IR::Instr* argInstr)
  293. {
  294. if (argInstr->m_opcode != Js::OpCode::LdSpreadIndices &&
  295. argInstr->m_opcode != Js::OpCode::ArgOut_A_Dynamic &&
  296. argInstr->m_opcode != Js::OpCode::ArgOut_A_FromStackArgs &&
  297. argInstr->m_opcode != Js::OpCode::ArgOut_A_SpreadArg)
  298. {
  299. // don't have bailout in asm.js so we don't need BytecodeArgOutCapture
  300. if (!argInstr->m_func->GetJITFunctionBody()->IsAsmJsMode())
  301. {
  302. // Need to always generate byte code arg out capture,
  303. // because bailout can't restore from the arg out as it is
  304. // replaced by new sym for register calling convention in lower
  305. argInstr->GenerateBytecodeArgOutCapture();
  306. }
  307. // Check if the instruction is already next
  308. if (argInstr != argInsertInstr->m_prev)
  309. {
  310. // It is not, move it.
  311. argInstr->Move(argInsertInstr);
  312. }
  313. argInsertInstr = argInstr;
  314. }
  315. return false;
  316. });
  317. #endif
  318. }
  319. }
  320. }
  321. NEXT_INSTR_IN_FUNC_BACKWARD_EDITING;
  322. this->func->isFlowGraphValid = true;
  323. Assert(!this->catchLabelStack || this->catchLabelStack->Empty());
  324. Assert(!this->finallyLabelStack || this->finallyLabelStack->Empty());
  325. // We've been walking backward so that edge lists would be in the right order. Now walk the blocks
  326. // forward to number the blocks in lexical order.
  327. unsigned int blockNum = 0;
  328. FOREACH_BLOCK(block, this)
  329. {
  330. block->SetBlockNum(blockNum++);
  331. }NEXT_BLOCK;
  332. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  333. this->RemoveUnreachableBlocks();
  334. // Regions need to be assigned before Globopt because:
  335. // 1. FullJit: The Backward Pass will set the write-through symbols on the regions and the forward pass will
  336. // use this information to insert ToVars for those symbols. Also, for a symbol determined as write-through
  337. // in the try region to be restored correctly by the bailout, it should not be removed from the
  338. // byteCodeUpwardExposedUsed upon a def in the try region (the def might be preempted by an exception).
  339. //
  340. // 2. SimpleJit: Same case of correct restoration as above applies in SimpleJit too. However, the only bailout
  341. // we have in Simple Jitted code right now is BailOnSimpleJitToFullJitLoopBody, installed in IRBuilder. So,
  342. // for now, we can just check if the func has a bailout to assign regions pre globopt while running SimpleJit.
  343. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  344. (this->func->IsSimpleJit() && this->func->hasBailout));
  345. blockNum = 0;
  346. FOREACH_BLOCK_ALL(block, this)
  347. {
  348. block->SetBlockNum(blockNum++);
  349. if (assignRegionsBeforeGlobopt)
  350. {
  351. if (block->isDeleted && !block->isDead)
  352. {
  353. continue;
  354. }
  355. this->UpdateRegionForBlock(block);
  356. }
  357. } NEXT_BLOCK_ALL;
  358. AssertMsg(blockNum == this->blockCount, "Block count is out of whack");
  359. #if DBG_DUMP
  360. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  361. {
  362. if (assignRegionsBeforeGlobopt)
  363. {
  364. Output::Print(_u("Before adding early exit edges\n"));
  365. FOREACH_BLOCK_ALL(block, this)
  366. {
  367. block->DumpHeader(true);
  368. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  369. if (region)
  370. {
  371. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  372. _u("RegionTypeRoot"),
  373. _u("RegionTypeTry"),
  374. _u("RegionTypeCatch"),
  375. _u("RegionTypeFinally") };
  376. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  377. }
  378. } NEXT_BLOCK_ALL;
  379. this->func->Dump();
  380. }
  381. }
  382. #endif
  383. if (this->finallyLabelStack)
  384. {
  385. Assert(this->finallyLabelStack->Empty());
  386. // Add s0 definition at the beginning of the function
  387. // We need this because - s0 symbol can get added to bytcodeUpwardExposed use when there are early returns,
  388. // And globopt will complain that s0 is uninitialized, because we define it to undefined only at the end of the function
  389. const auto addrOpnd = IR::AddrOpnd::New(this->func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, this->func, true);
  390. addrOpnd->SetValueType(ValueType::Undefined);
  391. IR::RegOpnd *regOpnd = IR::RegOpnd::New(this->func->m_symTable->FindStackSym(0), TyVar, this->func);
  392. IR::Instr *ldRet = IR::Instr::New(Js::OpCode::Ld_A, regOpnd, addrOpnd, this->func);
  393. this->func->m_headInstr->GetNextRealInstr()->InsertBefore(ldRet);
  394. IR::LabelInstr * currentLabel = nullptr;
  395. // look for early exits from a try, and insert bailout
  396. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, func)
  397. {
  398. if (instr->IsLabelInstr())
  399. {
  400. currentLabel = instr->AsLabelInstr();
  401. }
  402. else if (instr->m_opcode == Js::OpCode::TryFinally)
  403. {
  404. Assert(instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionTypeFinally);
  405. this->finallyLabelStack->Push(instr->AsBranchInstr()->GetTarget());
  406. }
  407. else if (instr->m_opcode == Js::OpCode::Leave)
  408. {
  409. IR::LabelInstr *branchTarget = instr->AsBranchInstr()->GetTarget();
  410. IR::LabelInstr *exitLabel = nullptr;
  411. // An early exit (break, continue, return) from an EH region appears as Leave opcode
  412. // When there is an early exit within a try finally, we have to execute finally code
  413. // Currently we bailout on early exits
  414. // For all such edges add edge from eh region -> finally and finally -> earlyexit
  415. Assert(currentLabel != nullptr);
  416. if (currentLabel && CheckIfEarlyExitAndAddEdgeToFinally(instr->AsBranchInstr(), currentLabel->GetRegion(), branchTarget->GetRegion(), instrNext, exitLabel))
  417. {
  418. Assert(exitLabel);
  419. IR::Instr * bailOnEarlyExit = IR::BailOutInstr::New(Js::OpCode::BailOnEarlyExit, IR::BailOutOnEarlyExit, instr, instr->m_func);
  420. instr->InsertBefore(bailOnEarlyExit);
  421. IR::LabelInstr *exceptFinallyLabel = this->finallyLabelStack->Top();
  422. IR::LabelInstr *nonExceptFinallyLabel = exceptFinallyLabel->m_next->m_next->AsLabelInstr();
  423. // It is possible for the finally region to have a non terminating loop, in which case the end of finally is eliminated
  424. // We can skip adding edge from finally to early exit in this case
  425. IR::Instr * leaveToFinally = IR::BranchInstr::New(Js::OpCode::Leave, exceptFinallyLabel, this->func);
  426. instr->InsertBefore(leaveToFinally);
  427. instr->Remove();
  428. this->AddEdge(currentLabel->GetBasicBlock(), exceptFinallyLabel->GetBasicBlock());
  429. if (this->regToFinallyEndMap->ContainsKey(nonExceptFinallyLabel->GetRegion()))
  430. {
  431. BasicBlock * finallyEndBlock = this->regToFinallyEndMap->Item(nonExceptFinallyLabel->GetRegion());
  432. Assert(finallyEndBlock);
  433. Assert(finallyEndBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() == nonExceptFinallyLabel->GetRegion());
  434. InsertEdgeFromFinallyToEarlyExit(finallyEndBlock, exitLabel);
  435. }
  436. }
  437. else if (currentLabel && currentLabel->GetRegion()->GetType() == RegionTypeFinally)
  438. {
  439. Assert(currentLabel->GetRegion()->GetMatchingTryRegion()->GetMatchingFinallyRegion(false) == currentLabel->GetRegion());
  440. // Convert Leave to Br because we execute non-excepting Finally in native code
  441. instr->m_opcode = Js::OpCode::Br;
  442. #if DBG
  443. instr->AsBranchInstr()->m_leaveConvToBr = true;
  444. #endif
  445. }
  446. }
  447. else if (instr->m_opcode == Js::OpCode::Finally)
  448. {
  449. AssertOrFailFast(!this->finallyLabelStack->Empty());
  450. this->finallyLabelStack->Pop();
  451. }
  452. }
  453. NEXT_INSTR_IN_FUNC_EDITING;
  454. this->RemoveUnreachableBlocks();
  455. blockNum = 0;
  456. FOREACH_BLOCK_ALL(block, this)
  457. {
  458. block->SetBlockNum(blockNum++);
  459. } NEXT_BLOCK_ALL;
  460. }
  461. this->FindLoops();
  462. #if DBG_DUMP
  463. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  464. {
  465. if (assignRegionsBeforeGlobopt)
  466. {
  467. Output::Print(_u("After adding early exit edges/Before CanonicalizeLoops\n"));
  468. FOREACH_BLOCK_ALL(block, this)
  469. {
  470. block->DumpHeader(true);
  471. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  472. if (region)
  473. {
  474. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  475. _u("RegionTypeRoot"),
  476. _u("RegionTypeTry"),
  477. _u("RegionTypeCatch"),
  478. _u("RegionTypeFinally") };
  479. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  480. }
  481. } NEXT_BLOCK_ALL;
  482. this->func->Dump();
  483. }
  484. }
  485. #endif
  486. bool breakBlocksRelocated = this->CanonicalizeLoops();
  487. blockNum = 0;
  488. FOREACH_BLOCK_ALL(block, this)
  489. {
  490. block->SetBlockNum(blockNum++);
  491. } NEXT_BLOCK_ALL;
  492. #if DBG
  493. FOREACH_BLOCK_ALL(block, this)
  494. {
  495. if (assignRegionsBeforeGlobopt)
  496. {
  497. if (block->GetFirstInstr()->AsLabelInstr()->GetRegion())
  498. {
  499. Assert(block->GetFirstInstr()->AsLabelInstr()->GetRegion()->ehBailoutData);
  500. }
  501. }
  502. } NEXT_BLOCK_ALL;
  503. #endif
  504. #if DBG_DUMP
  505. if (PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  506. {
  507. if (assignRegionsBeforeGlobopt)
  508. {
  509. Output::Print(_u("After CanonicalizeLoops\n"));
  510. FOREACH_BLOCK_ALL(block, this)
  511. {
  512. block->DumpHeader(true);
  513. Region *region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  514. if (region)
  515. {
  516. const char16 * regMap[] = { _u("RegionTypeInvalid"),
  517. _u("RegionTypeRoot"),
  518. _u("RegionTypeTry"),
  519. _u("RegionTypeCatch"),
  520. _u("RegionTypeFinally") };
  521. Output::Print(_u("Region %p RegionParent %p RegionType %s\n"), region, region->GetParent(), regMap[region->GetType()]);
  522. }
  523. } NEXT_BLOCK_ALL;
  524. this->func->Dump();
  525. }
  526. }
  527. #endif
  528. if (breakBlocksRelocated)
  529. {
  530. // Sort loop lists only if there is break block removal.
  531. SortLoopLists();
  532. }
  533. #if DBG
  534. this->VerifyLoopGraph();
  535. #endif
  536. #if DBG_DUMP
  537. this->Dump(false, nullptr);
  538. #endif
  539. }
  540. void FlowGraph::InsertEdgeFromFinallyToEarlyExit(BasicBlock *finallyEndBlock, IR::LabelInstr *exitLabel)
  541. {
  542. IR::Instr * lastInstr = finallyEndBlock->GetLastInstr();
  543. IR::LabelInstr * lastLabel = finallyEndBlock->GetFirstInstr()->AsLabelInstr();
  544. Assert(lastInstr->m_opcode == Js::OpCode::LeaveNull || lastInstr->m_opcode == Js::OpCode::Leave || lastInstr->m_opcode == Js::OpCode::BrOnException);
  545. // Add a new block, add BrOnException to earlyexit, assign region
  546. // Finally
  547. // ...
  548. // L1:
  549. // LeaveNull
  550. // to
  551. // Finally
  552. // ...
  553. // L1:
  554. // BrOnException earlyExitLabel
  555. // L1':
  556. // LeaveNull
  557. BasicBlock *nextBB = finallyEndBlock->GetNext();
  558. IR::LabelInstr *leaveLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  559. lastInstr->InsertBefore(leaveLabel);
  560. this->AddBlock(leaveLabel, lastInstr, nextBB, finallyEndBlock /*prevBlock*/);
  561. leaveLabel->SetRegion(lastLabel->GetRegion());
  562. this->AddEdge(finallyEndBlock, leaveLabel->GetBasicBlock());
  563. // If the Leave/LeaveNull at the end of finally was not preceeded by a Label, we have to create a new block with BrOnException to early exit
  564. if (!lastInstr->GetPrevRealInstrOrLabel()->IsLabelInstr())
  565. {
  566. IR::LabelInstr *brLabel = IR::LabelInstr::New(Js::OpCode::Label, this->func);
  567. leaveLabel->InsertBefore(brLabel);
  568. IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
  569. brToExit->m_brFinallyToEarlyExit = true;
  570. brToExit->SetByteCodeOffset(lastInstr);
  571. leaveLabel->InsertBefore(brToExit);
  572. this->AddBlock(brLabel, brToExit, finallyEndBlock->GetNext(), finallyEndBlock /*prevBlock*/);
  573. brLabel->SetRegion(lastLabel->GetRegion());
  574. this->AddEdge(finallyEndBlock, brLabel->GetBasicBlock());
  575. }
  576. else
  577. {
  578. // If the Leave/LeaveNull at the end of finally was preceeded by a Label, we reuse the block inserting BrOnException to early exit in it
  579. IR::BranchInstr *brToExit = IR::BranchInstr::New(Js::OpCode::BrOnException, exitLabel, this->func);
  580. brToExit->SetByteCodeOffset(lastInstr);
  581. brToExit->m_brFinallyToEarlyExit = true;
  582. leaveLabel->InsertBefore(brToExit);
  583. this->AddEdge(finallyEndBlock, exitLabel->GetBasicBlock());
  584. }
  585. // In case of throw/non-terminating loop, there maybe no edge to the next block
  586. if (this->FindEdge(finallyEndBlock, nextBB))
  587. {
  588. finallyEndBlock->RemoveSucc(nextBB, this);
  589. }
  590. this->regToFinallyEndMap->Item(lastLabel->GetRegion(), leaveLabel->GetBasicBlock());
  591. }
  592. void
  593. FlowGraph::SortLoopLists()
  594. {
  595. // Sort the blocks in loopList
  596. for (Loop *loop = this->loopList; loop; loop = loop->next)
  597. {
  598. unsigned int lastBlockNumber = loop->GetHeadBlock()->GetBlockNum();
  599. // Insertion sort as the blockList is almost sorted in the loop.
  600. FOREACH_BLOCK_IN_LOOP_EDITING(block, loop, iter)
  601. {
  602. if (lastBlockNumber <= block->GetBlockNum())
  603. {
  604. lastBlockNumber = block->GetBlockNum();
  605. }
  606. else
  607. {
  608. iter.UnlinkCurrent();
  609. FOREACH_BLOCK_IN_LOOP_EDITING(insertBlock,loop,newIter)
  610. {
  611. if (insertBlock->GetBlockNum() > block->GetBlockNum())
  612. {
  613. break;
  614. }
  615. }NEXT_BLOCK_IN_LOOP_EDITING;
  616. newIter.InsertBefore(block);
  617. }
  618. }NEXT_BLOCK_IN_LOOP_EDITING;
  619. }
  620. }
  621. void
  622. FlowGraph::RunPeeps()
  623. {
  624. if (this->func->HasTry())
  625. {
  626. return;
  627. }
  628. if (PHASE_OFF(Js::FGPeepsPhase, this->func))
  629. {
  630. return;
  631. }
  632. IR::Instr * instrCm = nullptr;
  633. bool tryUnsignedCmpPeep = false;
  634. FOREACH_INSTR_IN_FUNC_EDITING(instr, instrNext, this->func)
  635. {
  636. switch(instr->m_opcode)
  637. {
  638. case Js::OpCode::Br:
  639. case Js::OpCode::BrEq_I4:
  640. case Js::OpCode::BrGe_I4:
  641. case Js::OpCode::BrGt_I4:
  642. case Js::OpCode::BrLt_I4:
  643. case Js::OpCode::BrLe_I4:
  644. case Js::OpCode::BrUnGe_I4:
  645. case Js::OpCode::BrUnGt_I4:
  646. case Js::OpCode::BrUnLt_I4:
  647. case Js::OpCode::BrUnLe_I4:
  648. case Js::OpCode::BrNeq_I4:
  649. case Js::OpCode::BrEq_A:
  650. case Js::OpCode::BrGe_A:
  651. case Js::OpCode::BrGt_A:
  652. case Js::OpCode::BrLt_A:
  653. case Js::OpCode::BrLe_A:
  654. case Js::OpCode::BrUnGe_A:
  655. case Js::OpCode::BrUnGt_A:
  656. case Js::OpCode::BrUnLt_A:
  657. case Js::OpCode::BrUnLe_A:
  658. case Js::OpCode::BrNotEq_A:
  659. case Js::OpCode::BrNotNeq_A:
  660. case Js::OpCode::BrSrNotEq_A:
  661. case Js::OpCode::BrSrNotNeq_A:
  662. case Js::OpCode::BrNotGe_A:
  663. case Js::OpCode::BrNotGt_A:
  664. case Js::OpCode::BrNotLt_A:
  665. case Js::OpCode::BrNotLe_A:
  666. case Js::OpCode::BrNeq_A:
  667. case Js::OpCode::BrNotNull_A:
  668. case Js::OpCode::BrNotAddr_A:
  669. case Js::OpCode::BrAddr_A:
  670. case Js::OpCode::BrSrEq_A:
  671. case Js::OpCode::BrSrNeq_A:
  672. case Js::OpCode::BrOnHasProperty:
  673. case Js::OpCode::BrOnNoProperty:
  674. case Js::OpCode::BrHasSideEffects:
  675. case Js::OpCode::BrNotHasSideEffects:
  676. case Js::OpCode::BrFncEqApply:
  677. case Js::OpCode::BrFncNeqApply:
  678. case Js::OpCode::BrOnEmpty:
  679. case Js::OpCode::BrOnNotEmpty:
  680. case Js::OpCode::BrFncCachedScopeEq:
  681. case Js::OpCode::BrFncCachedScopeNeq:
  682. case Js::OpCode::BrOnObject_A:
  683. case Js::OpCode::BrOnClassConstructor:
  684. case Js::OpCode::BrOnBaseConstructorKind:
  685. if (tryUnsignedCmpPeep)
  686. {
  687. this->UnsignedCmpPeep(instr);
  688. }
  689. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  690. break;
  691. case Js::OpCode::MultiBr:
  692. // TODO: Run peeps on these as well...
  693. break;
  694. case Js::OpCode::BrTrue_I4:
  695. case Js::OpCode::BrFalse_I4:
  696. case Js::OpCode::BrTrue_A:
  697. case Js::OpCode::BrFalse_A:
  698. if (instrCm)
  699. {
  700. if (instrCm->GetDst()->IsInt32())
  701. {
  702. Assert(instr->m_opcode == Js::OpCode::BrTrue_I4 || instr->m_opcode == Js::OpCode::BrFalse_I4);
  703. instrNext = this->PeepTypedCm(instrCm);
  704. }
  705. else
  706. {
  707. instrNext = this->PeepCm(instrCm);
  708. }
  709. instrCm = nullptr;
  710. if (instrNext == nullptr)
  711. {
  712. // Set instrNext back to the current instr.
  713. instrNext = instr;
  714. }
  715. }
  716. else
  717. {
  718. instrNext = Peeps::PeepBranch(instr->AsBranchInstr());
  719. }
  720. break;
  721. case Js::OpCode::CmEq_I4:
  722. case Js::OpCode::CmGe_I4:
  723. case Js::OpCode::CmGt_I4:
  724. case Js::OpCode::CmLt_I4:
  725. case Js::OpCode::CmLe_I4:
  726. case Js::OpCode::CmNeq_I4:
  727. case Js::OpCode::CmEq_A:
  728. case Js::OpCode::CmGe_A:
  729. case Js::OpCode::CmGt_A:
  730. case Js::OpCode::CmLt_A:
  731. case Js::OpCode::CmLe_A:
  732. case Js::OpCode::CmNeq_A:
  733. case Js::OpCode::CmSrEq_A:
  734. case Js::OpCode::CmSrNeq_A:
  735. if (tryUnsignedCmpPeep)
  736. {
  737. this->UnsignedCmpPeep(instr);
  738. }
  739. case Js::OpCode::CmUnGe_I4:
  740. case Js::OpCode::CmUnGt_I4:
  741. case Js::OpCode::CmUnLt_I4:
  742. case Js::OpCode::CmUnLe_I4:
  743. case Js::OpCode::CmUnGe_A:
  744. case Js::OpCode::CmUnGt_A:
  745. case Js::OpCode::CmUnLt_A:
  746. case Js::OpCode::CmUnLe_A:
  747. // There may be useless branches between the Cm instr and the branch that uses the result.
  748. // So save the last Cm instr seen, and trigger the peep on the next BrTrue/BrFalse.
  749. instrCm = instr;
  750. break;
  751. case Js::OpCode::Label:
  752. if (instr->AsLabelInstr()->IsUnreferenced())
  753. {
  754. instrNext = Peeps::PeepUnreachableLabel(instr->AsLabelInstr(), false);
  755. }
  756. break;
  757. case Js::OpCode::StatementBoundary:
  758. instr->ClearByteCodeOffset();
  759. instr->SetByteCodeOffset(instr->GetNextRealInstrOrLabel());
  760. break;
  761. case Js::OpCode::ShrU_I4:
  762. case Js::OpCode::ShrU_A:
  763. if (tryUnsignedCmpPeep)
  764. {
  765. break;
  766. }
  767. if (instr->GetDst()->AsRegOpnd()->m_sym->IsSingleDef()
  768. && instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->IsTaggableIntConst()
  769. && instr->GetSrc2()->AsRegOpnd()->m_sym->GetIntConstValue() == 0)
  770. {
  771. tryUnsignedCmpPeep = true;
  772. }
  773. break;
  774. default:
  775. Assert(!instr->IsBranchInstr());
  776. }
  777. } NEXT_INSTR_IN_FUNC_EDITING;
  778. }
  779. void
  780. Loop::InsertLandingPad(FlowGraph *fg)
  781. {
  782. BasicBlock *headBlock = this->GetHeadBlock();
  783. // Always create a landing pad. This allows globopt to easily hoist instructions
  784. // and re-optimize the block if needed.
  785. BasicBlock *landingPad = BasicBlock::New(fg);
  786. this->landingPad = landingPad;
  787. IR::Instr * headInstr = headBlock->GetFirstInstr();
  788. IR::LabelInstr *landingPadLabel = IR::LabelInstr::New(Js::OpCode::Label, headInstr->m_func);
  789. landingPadLabel->SetByteCodeOffset(headInstr);
  790. headInstr->InsertBefore(landingPadLabel);
  791. landingPadLabel->SetBasicBlock(landingPad);
  792. landingPadLabel->SetRegion(headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion());
  793. landingPadLabel->m_hasNonBranchRef = headBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef;
  794. landingPad->SetBlockNum(fg->blockCount++);
  795. landingPad->SetFirstInstr(landingPadLabel);
  796. landingPad->SetLastInstr(landingPadLabel);
  797. landingPad->prev = headBlock->prev;
  798. landingPad->prev->next = landingPad;
  799. landingPad->next = headBlock;
  800. headBlock->prev = landingPad;
  801. Loop *parentLoop = this->parent;
  802. landingPad->loop = parentLoop;
  803. // We need to add this block to the block list of the parent loops
  804. while (parentLoop)
  805. {
  806. // Find the head block in the block list of the parent loop
  807. FOREACH_BLOCK_IN_LOOP_EDITING(block, parentLoop, iter)
  808. {
  809. if (block == headBlock)
  810. {
  811. // Add the landing pad to the block list
  812. iter.InsertBefore(landingPad);
  813. break;
  814. }
  815. } NEXT_BLOCK_IN_LOOP_EDITING;
  816. parentLoop = parentLoop->parent;
  817. }
  818. // Fix predecessor flow edges
  819. FOREACH_PREDECESSOR_EDGE_EDITING(edge, headBlock, iter)
  820. {
  821. // Make sure it isn't a back-edge
  822. if (edge->GetPred()->loop != this && !this->IsDescendentOrSelf(edge->GetPred()->loop))
  823. {
  824. if (edge->GetPred()->GetLastInstr()->IsBranchInstr() && headBlock->GetFirstInstr()->IsLabelInstr())
  825. {
  826. IR::BranchInstr *branch = edge->GetPred()->GetLastInstr()->AsBranchInstr();
  827. branch->ReplaceTarget(headBlock->GetFirstInstr()->AsLabelInstr(), landingPadLabel);
  828. }
  829. headBlock->UnlinkPred(edge->GetPred(), false);
  830. landingPad->AddPred(edge, fg);
  831. edge->SetSucc(landingPad);
  832. }
  833. } NEXT_PREDECESSOR_EDGE_EDITING;
  834. fg->AddEdge(landingPad, headBlock);
  835. if (headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion() && headBlock->GetFirstInstr()->AsLabelInstr()->GetRegion()->GetType() != RegionTypeRoot)
  836. {
  837. landingPadLabel->m_hasNonBranchRef = true;
  838. }
  839. }
  840. bool
  841. Loop::RemoveBreakBlocks(FlowGraph *fg)
  842. {
  843. bool breakBlockRelocated = false;
  844. if (PHASE_OFF(Js::RemoveBreakBlockPhase, fg->GetFunc()))
  845. {
  846. return false;
  847. }
  848. BasicBlock *loopTailBlock = nullptr;
  849. FOREACH_BLOCK_IN_LOOP(block, this)
  850. {
  851. loopTailBlock = block;
  852. }NEXT_BLOCK_IN_LOOP;
  853. AnalysisAssert(loopTailBlock);
  854. FOREACH_BLOCK_BACKWARD_IN_RANGE_EDITING(breakBlockEnd, loopTailBlock, this->GetHeadBlock(), blockPrev)
  855. {
  856. while (!this->IsDescendentOrSelf(breakBlockEnd->loop))
  857. {
  858. // Found at least one break block;
  859. breakBlockRelocated = true;
  860. #if DBG
  861. breakBlockEnd->isBreakBlock = true;
  862. #endif
  863. // Find the first block in this break block sequence.
  864. BasicBlock *breakBlockStart = breakBlockEnd;
  865. BasicBlock *breakBlockStartPrev = breakBlockEnd->GetPrev();
  866. // Walk back the blocks until we find a block which belongs to that block.
  867. // Note: We don't really care if there are break blocks corresponding to different loops. We move the blocks conservatively to the end of the loop.
  868. // Algorithm works on one loop at a time.
  869. while((breakBlockStartPrev->loop == breakBlockEnd->loop) || !this->IsDescendentOrSelf(breakBlockStartPrev->loop))
  870. {
  871. breakBlockStart = breakBlockStartPrev;
  872. breakBlockStartPrev = breakBlockStartPrev->GetPrev();
  873. }
  874. #if DBG
  875. breakBlockStart->isBreakBlock = true; // Mark the first block as well.
  876. #endif
  877. BasicBlock *exitLoopTail = loopTailBlock;
  878. // Move these break blocks to the tail of the loop.
  879. fg->MoveBlocksBefore(breakBlockStart, breakBlockEnd, exitLoopTail->next);
  880. #if DBG_DUMP
  881. fg->Dump(true /*needs verbose flag*/, _u("\n After Each iteration of canonicalization \n"));
  882. #endif
  883. // Again be conservative, there are edits to the loop graph. Start fresh for this loop.
  884. breakBlockEnd = loopTailBlock;
  885. blockPrev = breakBlockEnd->prev;
  886. }
  887. } NEXT_BLOCK_BACKWARD_IN_RANGE_EDITING;
  888. return breakBlockRelocated;
  889. }
  890. void
  891. FlowGraph::MoveBlocksBefore(BasicBlock *blockStart, BasicBlock *blockEnd, BasicBlock *insertBlock)
  892. {
  893. BasicBlock *srcPredBlock = blockStart->prev;
  894. BasicBlock *srcNextBlock = blockEnd->next;
  895. BasicBlock *dstPredBlock = insertBlock->prev;
  896. IR::Instr* dstPredBlockLastInstr = dstPredBlock->GetLastInstr();
  897. IR::Instr* blockEndLastInstr = blockEnd->GetLastInstr();
  898. // Fix block linkage
  899. srcPredBlock->next = srcNextBlock;
  900. srcNextBlock->prev = srcPredBlock;
  901. dstPredBlock->next = blockStart;
  902. insertBlock->prev = blockEnd;
  903. blockStart->prev = dstPredBlock;
  904. blockEnd->next = insertBlock;
  905. // Fix instruction linkage
  906. IR::Instr::MoveRangeAfter(blockStart->GetFirstInstr(), blockEndLastInstr, dstPredBlockLastInstr);
  907. // Fix instruction flow
  908. IR::Instr *srcLastInstr = srcPredBlock->GetLastInstr();
  909. if (srcLastInstr->IsBranchInstr() && srcLastInstr->AsBranchInstr()->HasFallThrough())
  910. {
  911. // There was a fallthrough in the break blocks original position.
  912. IR::BranchInstr *srcBranch = srcLastInstr->AsBranchInstr();
  913. IR::Instr *srcBranchNextInstr = srcBranch->GetNextRealInstrOrLabel();
  914. // Save the target and invert the branch.
  915. IR::LabelInstr *srcBranchTarget = srcBranch->GetTarget();
  916. srcPredBlock->InvertBranch(srcBranch);
  917. IR::LabelInstr *srcLabel = blockStart->GetFirstInstr()->AsLabelInstr();
  918. // Point the inverted branch to break block.
  919. srcBranch->SetTarget(srcLabel);
  920. if (srcBranchNextInstr != srcBranchTarget)
  921. {
  922. FlowEdge *srcEdge = this->FindEdge(srcPredBlock, srcBranchTarget->GetBasicBlock());
  923. Assert(srcEdge);
  924. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(srcEdge, true /*insert compensation block to loop list*/, false /*At source*/);
  925. Assert(compensationBlock);
  926. }
  927. }
  928. IR::Instr *dstLastInstr = dstPredBlockLastInstr;
  929. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  930. (this->func->IsSimpleJit() && this->func->hasBailout));
  931. if (dstLastInstr->IsBranchInstr() && dstLastInstr->AsBranchInstr()->HasFallThrough())
  932. {
  933. //There is a fallthrough in the block after which break block is inserted.
  934. FlowEdge *dstEdge = this->FindEdge(dstPredBlock, blockEnd->GetNext());
  935. Assert(dstEdge);
  936. BasicBlock *compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert compensation block to loop list*/, true /*At sink*/);
  937. Assert(compensationBlock);
  938. }
  939. // We have to update region info for blocks whose predecessors changed
  940. if (assignRegionsBeforeGlobopt)
  941. {
  942. UpdateRegionForBlockFromEHPred(dstPredBlock, true);
  943. UpdateRegionForBlockFromEHPred(blockStart, true);
  944. UpdateRegionForBlockFromEHPred(srcNextBlock, true);
  945. }
  946. }
  947. FlowEdge *
  948. FlowGraph::FindEdge(BasicBlock *predBlock, BasicBlock *succBlock)
  949. {
  950. FlowEdge *srcEdge = nullptr;
  951. FOREACH_SUCCESSOR_EDGE(edge, predBlock)
  952. {
  953. if (edge->GetSucc() == succBlock)
  954. {
  955. srcEdge = edge;
  956. break;
  957. }
  958. } NEXT_SUCCESSOR_EDGE;
  959. return srcEdge;
  960. }
  961. void
  962. BasicBlock::InvertBranch(IR::BranchInstr *branch)
  963. {
  964. Assert(this->GetLastInstr() == branch);
  965. Assert(this->GetSuccList()->HasTwo());
  966. branch->Invert();
  967. this->GetSuccList()->Reverse();
  968. }
  969. bool
  970. FlowGraph::CanonicalizeLoops()
  971. {
  972. if (this->func->HasProfileInfo())
  973. {
  974. this->implicitCallFlags = this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags();
  975. for (Loop *loop = this->loopList; loop; loop = loop->next)
  976. {
  977. this->implicitCallFlags = (Js::ImplicitCallFlags)(this->implicitCallFlags | loop->GetImplicitCallFlags());
  978. }
  979. }
  980. #if DBG_DUMP
  981. this->Dump(true, _u("\n Before canonicalizeLoops \n"));
  982. #endif
  983. bool breakBlockRelocated = false;
  984. for (Loop *loop = this->loopList; loop; loop = loop->next)
  985. {
  986. loop->InsertLandingPad(this);
  987. if (!this->func->HasTry() || this->func->DoOptimizeTry())
  988. {
  989. bool relocated = loop->RemoveBreakBlocks(this);
  990. if (!breakBlockRelocated && relocated)
  991. {
  992. breakBlockRelocated = true;
  993. }
  994. }
  995. }
  996. #if DBG_DUMP
  997. this->Dump(true, _u("\n After canonicalizeLoops \n"));
  998. #endif
  999. return breakBlockRelocated;
  1000. }
  1001. // Find the loops in this function, build the loop structure, and build a linked
  1002. // list of the basic blocks in this loop (including blocks of inner loops). The
  1003. // list preserves the reverse post-order of the blocks in the flowgraph block list.
  1004. void
  1005. FlowGraph::FindLoops()
  1006. {
  1007. if (!this->hasLoop)
  1008. {
  1009. return;
  1010. }
  1011. Func * func = this->func;
  1012. FOREACH_BLOCK_BACKWARD_IN_FUNC(block, func)
  1013. {
  1014. if (block->loop != nullptr)
  1015. {
  1016. // Block already visited
  1017. continue;
  1018. }
  1019. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1020. {
  1021. if (succ->isLoopHeader && succ->loop == nullptr)
  1022. {
  1023. // Found a loop back-edge
  1024. BuildLoop(succ, block);
  1025. }
  1026. } NEXT_SUCCESSOR_BLOCK;
  1027. if (block->isLoopHeader && block->loop == nullptr)
  1028. {
  1029. // We would have built a loop for it if it was a loop...
  1030. block->isLoopHeader = false;
  1031. block->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  1032. }
  1033. } NEXT_BLOCK_BACKWARD_IN_FUNC;
  1034. }
  1035. void
  1036. FlowGraph::BuildLoop(BasicBlock *headBlock, BasicBlock *tailBlock, Loop *parentLoop)
  1037. {
  1038. // This function is recursive, so when jitting in the foreground, probe the stack
  1039. if(!func->IsBackgroundJIT())
  1040. {
  1041. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  1042. }
  1043. if (tailBlock->number < headBlock->number)
  1044. {
  1045. // Not a loop. We didn't see any back-edge.
  1046. headBlock->isLoopHeader = false;
  1047. headBlock->GetFirstInstr()->AsLabelInstr()->m_isLoopTop = false;
  1048. return;
  1049. }
  1050. Assert(headBlock->isLoopHeader);
  1051. Loop *loop = JitAnewZ(this->GetFunc()->m_alloc, Loop, this->GetFunc()->m_alloc, this->GetFunc());
  1052. loop->next = this->loopList;
  1053. this->loopList = loop;
  1054. headBlock->loop = loop;
  1055. loop->headBlock = headBlock;
  1056. loop->int32SymsOnEntry = nullptr;
  1057. loop->lossyInt32SymsOnEntry = nullptr;
  1058. // If parentLoop is a parent of loop, it's headBlock better appear first.
  1059. if (parentLoop && loop->headBlock->number > parentLoop->headBlock->number)
  1060. {
  1061. loop->parent = parentLoop;
  1062. parentLoop->isLeaf = false;
  1063. }
  1064. loop->hasDeadStoreCollectionPass = false;
  1065. loop->hasDeadStorePrepass = false;
  1066. loop->memOpInfo = nullptr;
  1067. loop->doMemOp = true;
  1068. NoRecoverMemoryJitArenaAllocator tempAlloc(_u("BE-LoopBuilder"), this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  1069. WalkLoopBlocks(tailBlock, loop, &tempAlloc);
  1070. Assert(loop->GetHeadBlock() == headBlock);
  1071. IR::LabelInstr * firstInstr = loop->GetLoopTopInstr();
  1072. firstInstr->SetLoop(loop);
  1073. if (firstInstr->IsProfiledLabelInstr())
  1074. {
  1075. loop->SetImplicitCallFlags(firstInstr->AsProfiledLabelInstr()->loopImplicitCallFlags);
  1076. if (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsLoopImplicitCallInfoDisabled())
  1077. {
  1078. loop->SetImplicitCallFlags(this->func->GetReadOnlyProfileInfo()->GetImplicitCallFlags());
  1079. }
  1080. loop->SetLoopFlags(firstInstr->AsProfiledLabelInstr()->loopFlags);
  1081. }
  1082. else
  1083. {
  1084. // Didn't collect profile information, don't do optimizations
  1085. loop->SetImplicitCallFlags(Js::ImplicitCall_All);
  1086. }
  1087. }
  1088. Loop::MemCopyCandidate* Loop::MemOpCandidate::AsMemCopy()
  1089. {
  1090. Assert(this->IsMemCopy());
  1091. return (Loop::MemCopyCandidate*)this;
  1092. }
  1093. Loop::MemSetCandidate* Loop::MemOpCandidate::AsMemSet()
  1094. {
  1095. Assert(this->IsMemSet());
  1096. return (Loop::MemSetCandidate*)this;
  1097. }
  1098. void
  1099. Loop::EnsureMemOpVariablesInitialized()
  1100. {
  1101. Assert(this->doMemOp);
  1102. if (this->memOpInfo == nullptr)
  1103. {
  1104. JitArenaAllocator *allocator = this->GetFunc()->GetTopFunc()->m_fg->alloc;
  1105. this->memOpInfo = JitAnewStruct(allocator, Loop::MemOpInfo);
  1106. this->memOpInfo->inductionVariablesUsedAfterLoop = nullptr;
  1107. this->memOpInfo->startIndexOpndCache[0] = nullptr;
  1108. this->memOpInfo->startIndexOpndCache[1] = nullptr;
  1109. this->memOpInfo->startIndexOpndCache[2] = nullptr;
  1110. this->memOpInfo->startIndexOpndCache[3] = nullptr;
  1111. this->memOpInfo->inductionVariableChangeInfoMap = JitAnew(allocator, Loop::InductionVariableChangeInfoMap, allocator);
  1112. this->memOpInfo->inductionVariableOpndPerUnrollMap = JitAnew(allocator, Loop::InductionVariableOpndPerUnrollMap, allocator);
  1113. this->memOpInfo->candidates = JitAnew(allocator, Loop::MemOpList, allocator);
  1114. }
  1115. }
  1116. // Walk the basic blocks backwards until we find the loop header.
  1117. // Mark basic blocks in the loop by looking at the predecessors
  1118. // of blocks known to be in the loop.
  1119. // Recurse on inner loops.
  1120. void
  1121. FlowGraph::WalkLoopBlocks(BasicBlock *block, Loop *loop, JitArenaAllocator *tempAlloc)
  1122. {
  1123. AnalysisAssert(loop);
  1124. BVSparse<JitArenaAllocator> *loopBlocksBv = JitAnew(tempAlloc, BVSparse<JitArenaAllocator>, tempAlloc);
  1125. BasicBlock *tailBlock = block;
  1126. BasicBlock *lastBlock;
  1127. loopBlocksBv->Set(block->GetBlockNum());
  1128. this->AddBlockToLoop(block, loop);
  1129. if (block == loop->headBlock)
  1130. {
  1131. // Single block loop, we're done
  1132. return;
  1133. }
  1134. do
  1135. {
  1136. BOOL isInLoop = loopBlocksBv->Test(block->GetBlockNum());
  1137. FOREACH_SUCCESSOR_BLOCK(succ, block)
  1138. {
  1139. if (succ->isLoopHeader)
  1140. {
  1141. // Found a loop back-edge
  1142. if (loop->headBlock == succ)
  1143. {
  1144. isInLoop = true;
  1145. }
  1146. else if (succ->loop == nullptr || succ->loop->headBlock != succ)
  1147. {
  1148. // Recurse on inner loop
  1149. BuildLoop(succ, block, isInLoop ? loop : nullptr);
  1150. }
  1151. }
  1152. } NEXT_SUCCESSOR_BLOCK;
  1153. if (isInLoop)
  1154. {
  1155. // This block is in the loop. All of it's predecessors should be contained in the loop as well.
  1156. FOREACH_PREDECESSOR_BLOCK(pred, block)
  1157. {
  1158. // Fix up loop parent if it isn't set already.
  1159. // If pred->loop != loop, we're looking at an inner loop, which was already visited.
  1160. // If pred->loop->parent == nullptr, this is the first time we see this loop from an outer
  1161. // loop, so this must be an immediate child.
  1162. if (pred->loop && pred->loop != loop && loop->headBlock->number < pred->loop->headBlock->number
  1163. && (pred->loop->parent == nullptr || pred->loop->parent->headBlock->number < loop->headBlock->number))
  1164. {
  1165. pred->loop->parent = loop;
  1166. loop->isLeaf = false;
  1167. if (pred->loop->hasCall)
  1168. {
  1169. loop->SetHasCall();
  1170. }
  1171. loop->SetImplicitCallFlags(pred->loop->GetImplicitCallFlags());
  1172. }
  1173. // Add pred to loop bit vector
  1174. loopBlocksBv->Set(pred->GetBlockNum());
  1175. } NEXT_PREDECESSOR_BLOCK;
  1176. if (block->loop == nullptr || block->loop->IsDescendentOrSelf(loop))
  1177. {
  1178. block->loop = loop;
  1179. }
  1180. if (block != tailBlock)
  1181. {
  1182. this->AddBlockToLoop(block, loop);
  1183. }
  1184. }
  1185. lastBlock = block;
  1186. block = block->GetPrev();
  1187. } while (lastBlock != loop->headBlock);
  1188. }
  1189. // Add block to this loop, and it's parent loops.
  1190. void
  1191. FlowGraph::AddBlockToLoop(BasicBlock *block, Loop *loop)
  1192. {
  1193. loop->blockList.Prepend(block);
  1194. if (block->hasCall)
  1195. {
  1196. loop->SetHasCall();
  1197. }
  1198. }
  1199. ///----------------------------------------------------------------------------
  1200. ///
  1201. /// FlowGraph::AddBlock
  1202. ///
  1203. /// Finish processing of a new block: hook up successor arcs, note loops, etc.
  1204. ///
  1205. ///----------------------------------------------------------------------------
  1206. BasicBlock *
  1207. FlowGraph::AddBlock(
  1208. IR::Instr * firstInstr,
  1209. IR::Instr * lastInstr,
  1210. BasicBlock * nextBlock,
  1211. BasicBlock *prevBlock)
  1212. {
  1213. BasicBlock * block;
  1214. IR::LabelInstr * labelInstr;
  1215. if (firstInstr->IsLabelInstr())
  1216. {
  1217. labelInstr = firstInstr->AsLabelInstr();
  1218. }
  1219. else
  1220. {
  1221. labelInstr = IR::LabelInstr::New(Js::OpCode::Label, firstInstr->m_func);
  1222. labelInstr->SetByteCodeOffset(firstInstr);
  1223. if (firstInstr->IsEntryInstr())
  1224. {
  1225. firstInstr->InsertAfter(labelInstr);
  1226. }
  1227. else
  1228. {
  1229. firstInstr->InsertBefore(labelInstr);
  1230. }
  1231. firstInstr = labelInstr;
  1232. }
  1233. block = labelInstr->GetBasicBlock();
  1234. if (block == nullptr)
  1235. {
  1236. block = BasicBlock::New(this);
  1237. labelInstr->SetBasicBlock(block);
  1238. // Remember last block in function to target successor of RETs.
  1239. if (!this->tailBlock)
  1240. {
  1241. this->tailBlock = block;
  1242. }
  1243. }
  1244. // Hook up the successor edges
  1245. if (lastInstr->EndsBasicBlock())
  1246. {
  1247. BasicBlock * blockTarget = nullptr;
  1248. if (lastInstr->IsBranchInstr())
  1249. {
  1250. // Hook up a successor edge to the branch target.
  1251. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1252. if(branchInstr->IsMultiBranch())
  1253. {
  1254. BasicBlock * blockMultiBrTarget;
  1255. IR::MultiBranchInstr * multiBranchInstr = branchInstr->AsMultiBrInstr();
  1256. multiBranchInstr->MapUniqueMultiBrLabels([&](IR::LabelInstr * labelInstr) -> void
  1257. {
  1258. blockMultiBrTarget = SetBlockTargetAndLoopFlag(labelInstr);
  1259. this->AddEdge(block, blockMultiBrTarget);
  1260. });
  1261. }
  1262. else
  1263. {
  1264. IR::LabelInstr * targetLabelInstr = branchInstr->GetTarget();
  1265. blockTarget = SetBlockTargetAndLoopFlag(targetLabelInstr);
  1266. if (branchInstr->IsConditional())
  1267. {
  1268. IR::Instr *instrNext = branchInstr->GetNextRealInstrOrLabel();
  1269. if (instrNext->IsLabelInstr())
  1270. {
  1271. SetBlockTargetAndLoopFlag(instrNext->AsLabelInstr());
  1272. }
  1273. }
  1274. }
  1275. }
  1276. else if (lastInstr->m_opcode == Js::OpCode::Ret && block != this->tailBlock)
  1277. {
  1278. blockTarget = this->tailBlock;
  1279. }
  1280. if (blockTarget)
  1281. {
  1282. this->AddEdge(block, blockTarget);
  1283. }
  1284. }
  1285. if (lastInstr->HasFallThrough())
  1286. {
  1287. // Add a branch to next instruction so that we don't have to update the flow graph
  1288. // when the glob opt tries to insert instructions.
  1289. // We don't run the globopt with try/catch, don't need to insert branch to next for fall through blocks.
  1290. if (!this->func->HasTry() && !lastInstr->IsBranchInstr())
  1291. {
  1292. IR::BranchInstr * instr = IR::BranchInstr::New(Js::OpCode::Br,
  1293. lastInstr->m_next->AsLabelInstr(), lastInstr->m_func);
  1294. instr->SetByteCodeOffset(lastInstr->m_next);
  1295. lastInstr->InsertAfter(instr);
  1296. lastInstr = instr;
  1297. }
  1298. this->AddEdge(block, nextBlock);
  1299. }
  1300. block->SetBlockNum(this->blockCount++);
  1301. block->SetFirstInstr(firstInstr);
  1302. block->SetLastInstr(lastInstr);
  1303. if (!prevBlock)
  1304. {
  1305. if (this->blockList)
  1306. {
  1307. this->blockList->prev = block;
  1308. }
  1309. block->next = this->blockList;
  1310. this->blockList = block;
  1311. }
  1312. else
  1313. {
  1314. prevBlock->next = block;
  1315. block->prev = prevBlock;
  1316. block->next = nextBlock;
  1317. nextBlock->prev = block;
  1318. }
  1319. return block;
  1320. }
  1321. BasicBlock *
  1322. FlowGraph::SetBlockTargetAndLoopFlag(IR::LabelInstr * labelInstr)
  1323. {
  1324. BasicBlock * blockTarget = nullptr;
  1325. blockTarget = labelInstr->GetBasicBlock();
  1326. if (blockTarget == nullptr)
  1327. {
  1328. blockTarget = BasicBlock::New(this);
  1329. labelInstr->SetBasicBlock(blockTarget);
  1330. }
  1331. if (labelInstr->m_isLoopTop)
  1332. {
  1333. blockTarget->isLoopHeader = true;
  1334. this->hasLoop = true;
  1335. }
  1336. return blockTarget;
  1337. }
  1338. ///----------------------------------------------------------------------------
  1339. ///
  1340. /// FlowGraph::AddEdge
  1341. ///
  1342. /// Add an edge connecting the two given blocks.
  1343. ///
  1344. ///----------------------------------------------------------------------------
  1345. FlowEdge *
  1346. FlowGraph::AddEdge(BasicBlock * blockPred, BasicBlock * blockSucc)
  1347. {
  1348. FlowEdge * edge = FlowEdge::New(this);
  1349. edge->SetPred(blockPred);
  1350. edge->SetSucc(blockSucc);
  1351. blockPred->AddSucc(edge, this);
  1352. blockSucc->AddPred(edge, this);
  1353. return edge;
  1354. }
  1355. ///----------------------------------------------------------------------------
  1356. ///
  1357. /// FlowGraph::Destroy
  1358. ///
  1359. /// Remove all references to FG structures from the IR in preparation for freeing
  1360. /// the FG.
  1361. ///
  1362. ///----------------------------------------------------------------------------
  1363. void
  1364. FlowGraph::Destroy(void)
  1365. {
  1366. BOOL fHasTry = this->func->HasTry();
  1367. if (fHasTry)
  1368. {
  1369. // Do unreachable code removal up front to avoid problems
  1370. // with unreachable back edges, etc.
  1371. this->RemoveUnreachableBlocks();
  1372. }
  1373. FOREACH_BLOCK_ALL(block, this)
  1374. {
  1375. IR::Instr * firstInstr = block->GetFirstInstr();
  1376. if (block->isDeleted && !block->isDead)
  1377. {
  1378. if (firstInstr->IsLabelInstr())
  1379. {
  1380. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1381. labelInstr->UnlinkBasicBlock();
  1382. // Removing the label for non try blocks as we have a deleted block which has the label instruction
  1383. // still not removed; this prevents the assert for cases where the deleted blocks fall through to a helper block,
  1384. // i.e. helper introduced by polymorphic inlining bailout.
  1385. // Skipping Try blocks as we have dependency on blocks to get the last instr(see below in this function)
  1386. if (!fHasTry)
  1387. {
  1388. if (this->func->GetJITFunctionBody()->IsCoroutine())
  1389. {
  1390. // the label could be a yield resume label, in which case we also need to remove it from the YieldOffsetResumeLabels list
  1391. this->func->MapUntilYieldOffsetResumeLabels([this, &labelInstr](int i, const YieldOffsetResumeLabel& yorl)
  1392. {
  1393. if (labelInstr == yorl.Second())
  1394. {
  1395. labelInstr->m_hasNonBranchRef = false;
  1396. this->func->RemoveYieldOffsetResumeLabel(yorl);
  1397. return true;
  1398. }
  1399. return false;
  1400. });
  1401. }
  1402. Assert(labelInstr->IsUnreferenced());
  1403. labelInstr->Remove();
  1404. }
  1405. }
  1406. continue;
  1407. }
  1408. if (block->isLoopHeader && !block->isDead)
  1409. {
  1410. // Mark the tail block of this loop (the last back-edge). The register allocator
  1411. // uses this to lexically find loops.
  1412. BasicBlock *loopTail = nullptr;
  1413. AssertMsg(firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->m_isLoopTop,
  1414. "Label not marked as loop top...");
  1415. FOREACH_BLOCK_IN_LOOP(loopBlock, block->loop)
  1416. {
  1417. FOREACH_SUCCESSOR_BLOCK(succ, loopBlock)
  1418. {
  1419. if (succ == block)
  1420. {
  1421. loopTail = loopBlock;
  1422. break;
  1423. }
  1424. } NEXT_SUCCESSOR_BLOCK;
  1425. } NEXT_BLOCK_IN_LOOP;
  1426. if (loopTail)
  1427. {
  1428. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  1429. block->loop->SetLoopTopInstr(block->GetFirstInstr()->AsLabelInstr());
  1430. }
  1431. else
  1432. {
  1433. // This loop doesn't have a back-edge: that is, it is not a loop
  1434. // anymore...
  1435. firstInstr->AsLabelInstr()->m_isLoopTop = FALSE;
  1436. }
  1437. }
  1438. if (fHasTry)
  1439. {
  1440. this->UpdateRegionForBlock(block);
  1441. }
  1442. if (firstInstr->IsLabelInstr())
  1443. {
  1444. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1445. labelInstr->UnlinkBasicBlock();
  1446. if (labelInstr->IsUnreferenced() && !fHasTry)
  1447. {
  1448. // This is an unreferenced label, probably added by FG building.
  1449. // Delete it now to make extended basic blocks visible.
  1450. if (firstInstr == block->GetLastInstr())
  1451. {
  1452. labelInstr->Remove();
  1453. continue;
  1454. }
  1455. else
  1456. {
  1457. labelInstr->Remove();
  1458. }
  1459. }
  1460. }
  1461. IR::Instr * lastInstr = block->GetLastInstr();
  1462. if (lastInstr->IsBranchInstr())
  1463. {
  1464. IR::BranchInstr * branchInstr = lastInstr->AsBranchInstr();
  1465. if (!branchInstr->IsConditional() && branchInstr->GetTarget() == branchInstr->m_next)
  1466. {
  1467. // Remove branch to next
  1468. branchInstr->Remove();
  1469. }
  1470. }
  1471. }
  1472. NEXT_BLOCK;
  1473. #if DBG
  1474. if (fHasTry)
  1475. {
  1476. // Now that all blocks have regions, we should see consistently propagated regions at all
  1477. // block boundaries.
  1478. FOREACH_BLOCK(block, this)
  1479. {
  1480. Region * region = block->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1481. Region * predRegion = nullptr;
  1482. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1483. {
  1484. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1485. if (predBlock->GetLastInstr() == nullptr)
  1486. {
  1487. AssertMsg(region == predRegion, "Bad region propagation through empty block");
  1488. }
  1489. else
  1490. {
  1491. switch (predBlock->GetLastInstr()->m_opcode)
  1492. {
  1493. case Js::OpCode::TryCatch:
  1494. case Js::OpCode::TryFinally:
  1495. AssertMsg(region->GetParent() == predRegion, "Bad region prop on entry to try-catch/finally");
  1496. if (block->GetFirstInstr() == predBlock->GetLastInstr()->AsBranchInstr()->GetTarget())
  1497. {
  1498. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::TryCatch)
  1499. {
  1500. AssertMsg(region->GetType() == RegionTypeCatch, "Bad region type on entry to catch");
  1501. }
  1502. else
  1503. {
  1504. AssertMsg(region->GetType() == RegionTypeFinally, "Bad region type on entry to finally");
  1505. }
  1506. }
  1507. else
  1508. {
  1509. AssertMsg(region->GetType() == RegionTypeTry, "Bad region type on entry to try");
  1510. }
  1511. break;
  1512. case Js::OpCode::Leave:
  1513. AssertMsg(region == predRegion->GetParent() || (predRegion->GetType() == RegionTypeTry && predRegion->GetMatchingFinallyRegion(false) == region) ||
  1514. (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1515. break;
  1516. case Js::OpCode::LeaveNull:
  1517. AssertMsg(region == predRegion->GetParent() || (region == predRegion && this->func->IsLoopBodyInTry()), "Bad region prop on leaving try-catch/finally");
  1518. break;
  1519. // If the try region has a branch out of the loop,
  1520. // - the branch is moved out of the loop as part of break block removal, and
  1521. // - BrOnException is inverted to BrOnNoException and a Br is inserted after it.
  1522. // Otherwise,
  1523. // - FullJit: BrOnException is removed in the forward pass.
  1524. case Js::OpCode::BrOnException:
  1525. Assert(!this->func->DoGlobOpt());
  1526. break;
  1527. case Js::OpCode::BrOnNoException:
  1528. Assert(region->GetType() == RegionTypeTry || region->GetType() == RegionTypeCatch || region->GetType() == RegionTypeFinally ||
  1529. // A BrOnException from finally to early exit can be converted to BrOnNoException and Br
  1530. // The Br block maybe a common successor block for early exit along with the BrOnNoException block
  1531. // Region from Br block will be picked up from a predecessor which is not BrOnNoException due to early exit
  1532. // See test0() in test/EH/tryfinallytests.js
  1533. (predRegion->GetType() == RegionTypeFinally && predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit));
  1534. break;
  1535. case Js::OpCode::Br:
  1536. if (predBlock->GetLastInstr()->AsBranchInstr()->m_leaveConvToBr)
  1537. {
  1538. // Leave converted to Br in finally region
  1539. AssertMsg(region == predRegion->GetParent(), "Bad region prop in finally");
  1540. }
  1541. else if (region->GetType() == RegionTypeCatch && region != predRegion)
  1542. {
  1543. AssertMsg(predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1544. }
  1545. else if (region->GetType() == RegionTypeFinally && region != predRegion)
  1546. {
  1547. // We may be left with edges from finally region to early exit
  1548. AssertMsg(predRegion->IsNonExceptingFinally() || predRegion->GetType() == RegionTypeTry, "Bad region type for the try");
  1549. }
  1550. else
  1551. {
  1552. // We may be left with edges from finally region to early exit
  1553. AssertMsg(predRegion->IsNonExceptingFinally() || region == predRegion, "Bad region propagation through interior block");
  1554. }
  1555. break;
  1556. default:
  1557. break;
  1558. }
  1559. }
  1560. }
  1561. NEXT_PREDECESSOR_BLOCK;
  1562. switch (region->GetType())
  1563. {
  1564. case RegionTypeRoot:
  1565. Assert(!region->GetMatchingTryRegion() && !region->GetMatchingCatchRegion() && !region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false));
  1566. break;
  1567. case RegionTypeTry:
  1568. if ((this->func->IsSimpleJit() && this->func->hasBailout) || !this->func->DoOptimizeTry())
  1569. {
  1570. Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && !region->GetMatchingFinallyRegion(false)));
  1571. }
  1572. else
  1573. {
  1574. Assert((region->GetMatchingCatchRegion() != nullptr) ^ (region->GetMatchingFinallyRegion(true) && region->GetMatchingFinallyRegion(false)));
  1575. }
  1576. break;
  1577. case RegionTypeCatch:
  1578. case RegionTypeFinally:
  1579. Assert(region->GetMatchingTryRegion());
  1580. break;
  1581. }
  1582. }
  1583. NEXT_BLOCK;
  1584. FOREACH_BLOCK_ALL(block, this)
  1585. {
  1586. if (block->GetFirstInstr()->IsLabelInstr())
  1587. {
  1588. IR::LabelInstr *labelInstr = block->GetFirstInstr()->AsLabelInstr();
  1589. if (labelInstr->IsUnreferenced())
  1590. {
  1591. // This is an unreferenced label, probably added by FG building.
  1592. // Delete it now to make extended basic blocks visible.
  1593. labelInstr->Remove();
  1594. }
  1595. }
  1596. } NEXT_BLOCK;
  1597. }
  1598. #endif
  1599. this->func->isFlowGraphValid = false;
  1600. }
  1601. bool FlowGraph::IsEHTransitionInstr(IR::Instr *instr)
  1602. {
  1603. Js::OpCode op = instr->m_opcode;
  1604. return (op == Js::OpCode::TryCatch || op == Js::OpCode::TryFinally || op == Js::OpCode::Leave || op == Js::OpCode::LeaveNull);
  1605. }
  1606. BasicBlock * FlowGraph::GetPredecessorForRegionPropagation(BasicBlock *block)
  1607. {
  1608. BasicBlock *ehPred = nullptr;
  1609. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1610. {
  1611. Region * predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1612. if (IsEHTransitionInstr(predBlock->GetLastInstr()) && predRegion)
  1613. {
  1614. // MGTODO : change this to return, once you know there can exist only one eh transitioning pred
  1615. Assert(ehPred == nullptr);
  1616. ehPred = predBlock;
  1617. }
  1618. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1619. }
  1620. NEXT_PREDECESSOR_BLOCK;
  1621. return ehPred;
  1622. }
  1623. // Propagate the region forward from the block's predecessor(s), tracking the effect
  1624. // of the flow transition. Record the region in the block-to-region map provided
  1625. // and on the label at the entry to the block (if any).
  1626. // We need to know the end of finally for inserting edge at the end of finally to early exit
  1627. // Store it in regToFinallyEndMap as we visit blocks instead of recomputing later while adding early exit edges
  1628. void
  1629. FlowGraph::UpdateRegionForBlock(BasicBlock * block)
  1630. {
  1631. Region *region;
  1632. Region * predRegion = nullptr;
  1633. IR::Instr * tryInstr = nullptr;
  1634. IR::Instr * firstInstr = block->GetFirstInstr();
  1635. if (firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1636. {
  1637. Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1638. return;
  1639. }
  1640. if (block == this->blockList)
  1641. {
  1642. // Head of the graph: create the root region.
  1643. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1644. }
  1645. else
  1646. {
  1647. // Propagate the region forward by finding a predecessor we've already processed.
  1648. region = nullptr;
  1649. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1650. {
  1651. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1652. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1653. if (predRegion != nullptr)
  1654. {
  1655. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1656. break;
  1657. }
  1658. }
  1659. NEXT_PREDECESSOR_BLOCK;
  1660. if (block->GetLastInstr()->m_opcode == Js::OpCode::LeaveNull || block->GetLastInstr()->m_opcode == Js::OpCode::Leave)
  1661. {
  1662. if (this->regToFinallyEndMap && region->IsNonExceptingFinally())
  1663. {
  1664. BasicBlock *endOfFinally = regToFinallyEndMap->ContainsKey(region) ? regToFinallyEndMap->Item(region) : nullptr;
  1665. if (!endOfFinally)
  1666. {
  1667. regToFinallyEndMap->Add(region, block);
  1668. }
  1669. else
  1670. {
  1671. Assert(endOfFinally->GetLastInstr()->m_opcode != Js::OpCode::LeaveNull || block == endOfFinally);
  1672. regToFinallyEndMap->Item(region, block);
  1673. }
  1674. }
  1675. }
  1676. }
  1677. Assert(region || block->GetPredList()->Count() == 0);
  1678. if (region && !region->ehBailoutData)
  1679. {
  1680. region->AllocateEHBailoutData(this->func, tryInstr);
  1681. }
  1682. Assert(firstInstr->IsLabelInstr());
  1683. if (firstInstr->IsLabelInstr())
  1684. {
  1685. // Record the region on the label and make sure it stays around as a region
  1686. // marker if we're entering a region at this point.
  1687. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1688. labelInstr->SetRegion(region);
  1689. if (region != predRegion)
  1690. {
  1691. labelInstr->m_hasNonBranchRef = true;
  1692. }
  1693. // One of the pred blocks maybe an eh region, in that case it is important to mark this label's m_hasNonBranchRef
  1694. // If not later in codegen, this label can get deleted. And during SccLiveness, region is propagated to newly created labels in lowerer from the previous label's region
  1695. // We can end up assigning an eh region to a label in a non eh region. And if there is a bailout in such a region, bad things will happen in the interpreter :)
  1696. // See test2()/test3() in tryfinallytests.js
  1697. if (!labelInstr->m_hasNonBranchRef)
  1698. {
  1699. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1700. {
  1701. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1702. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1703. if (predRegion != region)
  1704. {
  1705. labelInstr->m_hasNonBranchRef = true;
  1706. break;
  1707. }
  1708. }
  1709. NEXT_PREDECESSOR_BLOCK;
  1710. }
  1711. }
  1712. }
  1713. void
  1714. FlowGraph::UpdateRegionForBlockFromEHPred(BasicBlock * block, bool reassign)
  1715. {
  1716. Region *region = nullptr;
  1717. Region * predRegion = nullptr;
  1718. IR::Instr * tryInstr = nullptr;
  1719. IR::Instr * firstInstr = block->GetFirstInstr();
  1720. if (!reassign && firstInstr->IsLabelInstr() && firstInstr->AsLabelInstr()->GetRegion())
  1721. {
  1722. Assert(this->func->HasTry() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)));
  1723. return;
  1724. }
  1725. if (block->isDead || block->isDeleted)
  1726. {
  1727. // We can end up calling this function with such blocks, return doing nothing
  1728. // See test5() in tryfinallytests.js
  1729. return;
  1730. }
  1731. if (block == this->blockList)
  1732. {
  1733. // Head of the graph: create the root region.
  1734. region = Region::New(RegionTypeRoot, nullptr, this->func);
  1735. }
  1736. else if (block->GetPredList()->Count() == 1)
  1737. {
  1738. BasicBlock *predBlock = block->GetPredList()->Head()->GetPred();
  1739. AssertMsg(predBlock->GetBlockNum() < this->blockCount, "Misnumbered block at teardown time?");
  1740. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1741. Assert(predRegion);
  1742. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1743. }
  1744. else
  1745. {
  1746. // Propagate the region forward by finding a predecessor we've already processed.
  1747. // Since we do break block remval after region propagation, we cannot pick the first predecessor which has an assigned region
  1748. // If there is a eh transitioning pred, we pick that
  1749. // There cannot be more than one eh transitioning pred (?)
  1750. BasicBlock *ehPred = this->GetPredecessorForRegionPropagation(block);
  1751. if (ehPred)
  1752. {
  1753. predRegion = ehPred->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1754. Assert(predRegion != nullptr);
  1755. region = this->PropagateRegionFromPred(block, ehPred, predRegion, tryInstr);
  1756. }
  1757. else
  1758. {
  1759. FOREACH_PREDECESSOR_BLOCK(predBlock, block)
  1760. {
  1761. predRegion = predBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  1762. if (predRegion != nullptr)
  1763. {
  1764. if ((predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnException || predBlock->GetLastInstr()->m_opcode == Js::OpCode::BrOnNoException) &&
  1765. predBlock->GetLastInstr()->AsBranchInstr()->m_brFinallyToEarlyExit)
  1766. {
  1767. Assert(predRegion->IsNonExceptingFinally());
  1768. // BrOnException from finally region to early exit
  1769. // Skip this edge
  1770. continue;
  1771. }
  1772. if (predBlock->GetLastInstr()->m_opcode == Js::OpCode::Br &&
  1773. predBlock->GetLastInstr()->GetPrevRealInstr()->m_opcode == Js::OpCode::BrOnNoException)
  1774. {
  1775. Assert(predBlock->GetLastInstr()->GetPrevRealInstr()->AsBranchInstr()->m_brFinallyToEarlyExit);
  1776. Assert(predRegion->IsNonExceptingFinally());
  1777. // BrOnException from finally region to early exit changed to BrOnNoException and Br during break block removal
  1778. continue;
  1779. }
  1780. region = this->PropagateRegionFromPred(block, predBlock, predRegion, tryInstr);
  1781. break;
  1782. }
  1783. }
  1784. NEXT_PREDECESSOR_BLOCK;
  1785. }
  1786. }
  1787. Assert(region || block->GetPredList()->Count() == 0 || block->firstInstr->AsLabelInstr()->GetRegion());
  1788. if (region)
  1789. {
  1790. if (!region->ehBailoutData)
  1791. {
  1792. region->AllocateEHBailoutData(this->func, tryInstr);
  1793. }
  1794. Assert(firstInstr->IsLabelInstr());
  1795. if (firstInstr->IsLabelInstr())
  1796. {
  1797. // Record the region on the label and make sure it stays around as a region
  1798. // marker if we're entering a region at this point.
  1799. IR::LabelInstr * labelInstr = firstInstr->AsLabelInstr();
  1800. labelInstr->SetRegion(region);
  1801. if (region != predRegion)
  1802. {
  1803. labelInstr->m_hasNonBranchRef = true;
  1804. }
  1805. }
  1806. }
  1807. }
  1808. Region *
  1809. FlowGraph::PropagateRegionFromPred(BasicBlock * block, BasicBlock * predBlock, Region * predRegion, IR::Instr * &tryInstr)
  1810. {
  1811. // Propagate predRegion to region, looking at the flow transition for an opcode
  1812. // that affects the region.
  1813. Region * region = nullptr;
  1814. IR::Instr * predLastInstr = predBlock->GetLastInstr();
  1815. IR::Instr * firstInstr = block->GetFirstInstr();
  1816. if (predLastInstr == nullptr)
  1817. {
  1818. // Empty block: trivially propagate the region.
  1819. region = predRegion;
  1820. }
  1821. else
  1822. {
  1823. Region * tryRegion = nullptr;
  1824. IR::LabelInstr * tryInstrNext = nullptr;
  1825. switch (predLastInstr->m_opcode)
  1826. {
  1827. case Js::OpCode::TryCatch:
  1828. // Entry to a try-catch. See whether we're entering the try or the catch
  1829. // by looking for the handler label.
  1830. Assert(predLastInstr->m_next->IsLabelInstr());
  1831. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1832. tryRegion = tryInstrNext->GetRegion();
  1833. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1834. {
  1835. region = Region::New(RegionTypeCatch, predRegion, this->func);
  1836. Assert(tryRegion);
  1837. region->SetMatchingTryRegion(tryRegion);
  1838. tryRegion->SetMatchingCatchRegion(region);
  1839. }
  1840. else
  1841. {
  1842. region = Region::New(RegionTypeTry, predRegion, this->func);
  1843. tryInstr = predLastInstr;
  1844. }
  1845. break;
  1846. case Js::OpCode::TryFinally:
  1847. // Entry to a try-finally. See whether we're entering the try or the finally
  1848. // by looking for the handler label.
  1849. Assert(predLastInstr->m_next->IsLabelInstr());
  1850. tryInstrNext = predLastInstr->m_next->AsLabelInstr();
  1851. tryRegion = tryInstrNext->GetRegion();
  1852. if (firstInstr == predLastInstr->AsBranchInstr()->GetTarget())
  1853. {
  1854. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1855. region = Region::New(RegionTypeFinally, predRegion, this->func);
  1856. region->SetMatchingTryRegion(tryRegion);
  1857. tryRegion->SetMatchingFinallyRegion(region, true);
  1858. tryInstr = predLastInstr;
  1859. }
  1860. else
  1861. {
  1862. region = Region::New(RegionTypeTry, predRegion, this->func);
  1863. tryInstr = predLastInstr;
  1864. }
  1865. break;
  1866. case Js::OpCode::Leave:
  1867. if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
  1868. {
  1869. tryRegion = predRegion;
  1870. Assert(tryRegion->GetMatchingFinallyRegion(true) != nullptr);
  1871. region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
  1872. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1873. region->SetMatchingTryRegion(tryRegion);
  1874. tryRegion->SetMatchingFinallyRegion(region, false);
  1875. break;
  1876. }
  1877. // Exiting a try or handler. Retrieve the current region's parent.
  1878. region = predRegion->GetParent();
  1879. if (region == nullptr)
  1880. {
  1881. // We found a Leave in the root region- this can only happen when a jitted loop body
  1882. // in a try block has a return statement.
  1883. Assert(this->func->IsLoopBodyInTry());
  1884. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1885. region = predRegion;
  1886. }
  1887. break;
  1888. case Js::OpCode::LeaveNull:
  1889. // Exiting a try or handler. Retrieve the current region's parent.
  1890. region = predRegion->GetParent();
  1891. if (region == nullptr)
  1892. {
  1893. // We found a Leave in the root region- this can only happen when a jitted loop body
  1894. // in a try block has a return statement.
  1895. Assert(this->func->IsLoopBodyInTry());
  1896. predLastInstr->AsBranchInstr()->m_isOrphanedLeave = true;
  1897. region = predRegion;
  1898. }
  1899. break;
  1900. case Js::OpCode::BailOnException:
  1901. // Infinite loop, no edge to non excepting finally
  1902. if (firstInstr->m_next && firstInstr->m_next->m_opcode == Js::OpCode::Finally)
  1903. {
  1904. tryRegion = predRegion->GetMatchingTryRegion();
  1905. region = Region::New(RegionTypeFinally, predRegion->GetParent(), this->func);
  1906. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry);
  1907. region->SetMatchingTryRegion(tryRegion);
  1908. tryRegion->SetMatchingFinallyRegion(region, false);
  1909. }
  1910. break;
  1911. case Js::OpCode::BrOnException:
  1912. // Infinite loop inside another EH region within finally,
  1913. // We have added edges for all infinite loops inside a finally, identify that and transition to parent
  1914. if (predRegion->GetType() != RegionTypeFinally && firstInstr->GetNextRealInstr()->m_opcode == Js::OpCode::LeaveNull)
  1915. {
  1916. region = predRegion->GetParent();
  1917. }
  1918. else
  1919. {
  1920. region = predRegion;
  1921. }
  1922. break;
  1923. default:
  1924. // Normal (non-EH) transition: just propagate the region.
  1925. region = predRegion;
  1926. break;
  1927. }
  1928. }
  1929. return region;
  1930. }
  1931. void
  1932. FlowGraph::InsertCompBlockToLoopList(Loop *loop, BasicBlock* compBlock, BasicBlock* targetBlock, bool postTarget)
  1933. {
  1934. if (loop)
  1935. {
  1936. bool found = false;
  1937. FOREACH_BLOCK_IN_LOOP_EDITING(loopBlock, loop, iter)
  1938. {
  1939. if (loopBlock == targetBlock)
  1940. {
  1941. found = true;
  1942. break;
  1943. }
  1944. } NEXT_BLOCK_IN_LOOP_EDITING;
  1945. if (found)
  1946. {
  1947. if (postTarget)
  1948. {
  1949. iter.Next();
  1950. }
  1951. iter.InsertBefore(compBlock);
  1952. }
  1953. InsertCompBlockToLoopList(loop->parent, compBlock, targetBlock, postTarget);
  1954. }
  1955. }
  1956. // Insert a block on the given edge
  1957. BasicBlock *
  1958. FlowGraph::InsertAirlockBlock(FlowEdge * edge)
  1959. {
  1960. BasicBlock * airlockBlock = BasicBlock::New(this);
  1961. BasicBlock * sourceBlock = edge->GetPred();
  1962. BasicBlock * sinkBlock = edge->GetSucc();
  1963. BasicBlock * sinkPrevBlock = sinkBlock->prev;
  1964. IR::Instr * sinkPrevBlockLastInstr = sinkPrevBlock->GetLastInstr();
  1965. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  1966. airlockBlock->loop = sinkBlock->loop;
  1967. airlockBlock->SetBlockNum(this->blockCount++);
  1968. #ifdef DBG
  1969. airlockBlock->isAirLockBlock = true;
  1970. #endif
  1971. //
  1972. // Fixup block linkage
  1973. //
  1974. // airlock block is inserted right before sourceBlock
  1975. airlockBlock->prev = sinkBlock->prev;
  1976. sinkBlock->prev = airlockBlock;
  1977. airlockBlock->next = sinkBlock;
  1978. airlockBlock->prev->next = airlockBlock;
  1979. //
  1980. // Fixup flow edges
  1981. //
  1982. sourceBlock->RemoveSucc(sinkBlock, this, false);
  1983. // Add sourceBlock -> airlockBlock
  1984. this->AddEdge(sourceBlock, airlockBlock);
  1985. // Add airlockBlock -> sinkBlock
  1986. edge->SetPred(airlockBlock);
  1987. airlockBlock->AddSucc(edge, this);
  1988. // Fixup data use count
  1989. airlockBlock->SetDataUseCount(1);
  1990. sourceBlock->DecrementDataUseCount();
  1991. //
  1992. // Fixup IR
  1993. //
  1994. // Maintain the instruction region for inlining
  1995. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  1996. Func * sinkLabelFunc = sinkLabel->m_func;
  1997. IR::LabelInstr *airlockLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  1998. sinkPrevBlockLastInstr->InsertAfter(airlockLabel);
  1999. airlockBlock->SetFirstInstr(airlockLabel);
  2000. airlockLabel->SetBasicBlock(airlockBlock);
  2001. // Add br to sinkBlock from airlock block
  2002. IR::BranchInstr *airlockBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  2003. airlockBr->SetByteCodeOffset(sinkLabel);
  2004. airlockLabel->InsertAfter(airlockBr);
  2005. airlockBlock->SetLastInstr(airlockBr);
  2006. airlockLabel->SetByteCodeOffset(sinkLabel);
  2007. // Fixup flow out of sourceBlock
  2008. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  2009. if (sourceBr->IsMultiBranch())
  2010. {
  2011. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, airlockLabel);
  2012. Assert(replaced);
  2013. }
  2014. else if (sourceBr->GetTarget() == sinkLabel)
  2015. {
  2016. sourceBr->SetTarget(airlockLabel);
  2017. }
  2018. if (!sinkPrevBlockLastInstr->IsBranchInstr() || sinkPrevBlockLastInstr->AsBranchInstr()->HasFallThrough())
  2019. {
  2020. if (!sinkPrevBlock->isDeleted)
  2021. {
  2022. FlowEdge *dstEdge = this->FindEdge(sinkPrevBlock, sinkBlock);
  2023. if (dstEdge) // Possibility that sourceblock may be same as sinkPrevBlock
  2024. {
  2025. BasicBlock* compensationBlock = this->InsertCompensationCodeForBlockMove(dstEdge, true /*insert comp block to loop list*/, true);
  2026. compensationBlock->IncrementDataUseCount();
  2027. // We need to skip airlock compensation block in globopt as its inserted while globopt is iteration over the blocks.
  2028. compensationBlock->isAirLockCompensationBlock = true;
  2029. }
  2030. }
  2031. }
  2032. #if DBG_DUMP
  2033. this->Dump(true, _u("\n After insertion of airlock block \n"));
  2034. #endif
  2035. return airlockBlock;
  2036. }
  2037. // Insert a block on the given edge
  2038. BasicBlock *
  2039. FlowGraph::InsertCompensationCodeForBlockMove(FlowEdge * edge, bool insertToLoopList, bool sinkBlockLoop)
  2040. {
  2041. BasicBlock * compBlock = BasicBlock::New(this);
  2042. BasicBlock * sourceBlock = edge->GetPred();
  2043. BasicBlock * sinkBlock = edge->GetSucc();
  2044. BasicBlock * fallthroughBlock = sourceBlock->next;
  2045. IR::Instr * sourceLastInstr = sourceBlock->GetLastInstr();
  2046. compBlock->SetBlockNum(this->blockCount++);
  2047. if (insertToLoopList)
  2048. {
  2049. // For flow graph edits in
  2050. if (sinkBlockLoop)
  2051. {
  2052. if (sinkBlock->loop && sinkBlock->loop->GetHeadBlock() == sinkBlock)
  2053. {
  2054. // BLUE 531255: sinkblock may be the head block of new loop, we shouldn't insert compensation block to that loop
  2055. // Insert it to all the parent loop lists.
  2056. compBlock->loop = sinkBlock->loop->parent;
  2057. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false);
  2058. }
  2059. else
  2060. {
  2061. compBlock->loop = sinkBlock->loop;
  2062. InsertCompBlockToLoopList(compBlock->loop, compBlock, sinkBlock, false); // sinkBlock or fallthroughBlock?
  2063. }
  2064. #ifdef DBG
  2065. compBlock->isBreakCompensationBlockAtSink = true;
  2066. #endif
  2067. }
  2068. else
  2069. {
  2070. compBlock->loop = sourceBlock->loop;
  2071. InsertCompBlockToLoopList(compBlock->loop, compBlock, sourceBlock, true);
  2072. #ifdef DBG
  2073. compBlock->isBreakCompensationBlockAtSource = true;
  2074. #endif
  2075. }
  2076. }
  2077. //
  2078. // Fixup block linkage
  2079. //
  2080. // compensation block is inserted right after sourceBlock
  2081. compBlock->next = fallthroughBlock;
  2082. fallthroughBlock->prev = compBlock;
  2083. compBlock->prev = sourceBlock;
  2084. sourceBlock->next = compBlock;
  2085. //
  2086. // Fixup flow edges
  2087. //
  2088. sourceBlock->RemoveSucc(sinkBlock, this, false);
  2089. // Add sourceBlock -> airlockBlock
  2090. this->AddEdge(sourceBlock, compBlock);
  2091. // Add airlockBlock -> sinkBlock
  2092. edge->SetPred(compBlock);
  2093. compBlock->AddSucc(edge, this);
  2094. //
  2095. // Fixup IR
  2096. //
  2097. // Maintain the instruction region for inlining
  2098. IR::LabelInstr *sinkLabel = sinkBlock->GetFirstInstr()->AsLabelInstr();
  2099. Func * sinkLabelFunc = sinkLabel->m_func;
  2100. IR::LabelInstr *compLabel = IR::LabelInstr::New(Js::OpCode::Label, sinkLabelFunc);
  2101. sourceLastInstr->InsertAfter(compLabel);
  2102. compBlock->SetFirstInstr(compLabel);
  2103. compLabel->SetBasicBlock(compBlock);
  2104. // Add br to sinkBlock from compensation block
  2105. IR::BranchInstr *compBr = IR::BranchInstr::New(Js::OpCode::Br, sinkLabel, sinkLabelFunc);
  2106. compBr->SetByteCodeOffset(sinkLabel);
  2107. compLabel->InsertAfter(compBr);
  2108. compBlock->SetLastInstr(compBr);
  2109. compLabel->SetByteCodeOffset(sinkLabel);
  2110. // Fixup flow out of sourceBlock
  2111. if (sourceLastInstr->IsBranchInstr())
  2112. {
  2113. IR::BranchInstr *sourceBr = sourceLastInstr->AsBranchInstr();
  2114. Assert(sourceBr->IsMultiBranch() || sourceBr->IsConditional());
  2115. if (sourceBr->IsMultiBranch())
  2116. {
  2117. const bool replaced = sourceBr->AsMultiBrInstr()->ReplaceTarget(sinkLabel, compLabel);
  2118. Assert(replaced);
  2119. }
  2120. }
  2121. bool assignRegionsBeforeGlobopt = this->func->HasTry() && (this->func->DoOptimizeTry() ||
  2122. (this->func->IsSimpleJit() && this->func->hasBailout));
  2123. if (assignRegionsBeforeGlobopt)
  2124. {
  2125. UpdateRegionForBlockFromEHPred(compBlock);
  2126. }
  2127. return compBlock;
  2128. }
  2129. void
  2130. FlowGraph::RemoveUnreachableBlocks()
  2131. {
  2132. AnalysisAssert(this->blockList);
  2133. FOREACH_BLOCK(block, this)
  2134. {
  2135. block->isVisited = false;
  2136. }
  2137. NEXT_BLOCK;
  2138. this->blockList->isVisited = true;
  2139. FOREACH_BLOCK_EDITING(block, this)
  2140. {
  2141. if (block->isVisited)
  2142. {
  2143. FOREACH_SUCCESSOR_BLOCK(succ, block)
  2144. {
  2145. succ->isVisited = true;
  2146. } NEXT_SUCCESSOR_BLOCK;
  2147. }
  2148. else
  2149. {
  2150. this->RemoveBlock(block);
  2151. }
  2152. }
  2153. NEXT_BLOCK_EDITING;
  2154. }
  2155. // If block has no predecessor, remove it.
  2156. bool
  2157. FlowGraph::RemoveUnreachableBlock(BasicBlock *block, GlobOpt * globOpt)
  2158. {
  2159. bool isDead = false;
  2160. if ((block->GetPredList() == nullptr || block->GetPredList()->Empty()) && block != this->func->m_fg->blockList)
  2161. {
  2162. isDead = true;
  2163. }
  2164. else if (block->isLoopHeader)
  2165. {
  2166. // A dead loop still has back-edges pointing to it...
  2167. isDead = true;
  2168. FOREACH_PREDECESSOR_BLOCK(pred, block)
  2169. {
  2170. if (!block->loop->IsDescendentOrSelf(pred->loop))
  2171. {
  2172. isDead = false;
  2173. }
  2174. } NEXT_PREDECESSOR_BLOCK;
  2175. }
  2176. if (isDead)
  2177. {
  2178. this->RemoveBlock(block, globOpt);
  2179. return true;
  2180. }
  2181. return false;
  2182. }
  2183. IR::Instr *
  2184. FlowGraph::PeepTypedCm(IR::Instr *instr)
  2185. {
  2186. // Basic pattern, peep:
  2187. // t1 = CmEq a, b
  2188. // BrTrue_I4 $L1, t1
  2189. // Into:
  2190. // t1 = 1
  2191. // BrEq $L1, a, b
  2192. // t1 = 0
  2193. IR::Instr * instrNext = instr->GetNextRealInstrOrLabel();
  2194. // find intermediate Lds
  2195. IR::Instr * instrLd = nullptr;
  2196. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  2197. {
  2198. instrLd = instrNext;
  2199. instrNext = instrNext->GetNextRealInstrOrLabel();
  2200. }
  2201. IR::Instr * instrLd2 = nullptr;
  2202. if (instrNext->m_opcode == Js::OpCode::Ld_I4)
  2203. {
  2204. instrLd2 = instrNext;
  2205. instrNext = instrNext->GetNextRealInstrOrLabel();
  2206. }
  2207. // Find BrTrue/BrFalse
  2208. IR::Instr *instrBr;
  2209. bool brIsTrue;
  2210. if (instrNext->m_opcode == Js::OpCode::BrTrue_I4)
  2211. {
  2212. instrBr = instrNext;
  2213. brIsTrue = true;
  2214. }
  2215. else if (instrNext->m_opcode == Js::OpCode::BrFalse_I4)
  2216. {
  2217. instrBr = instrNext;
  2218. brIsTrue = false;
  2219. }
  2220. else
  2221. {
  2222. return nullptr;
  2223. }
  2224. AssertMsg(instrLd || (!instrLd && !instrLd2), "Either instrLd is non-null or both null");
  2225. // if we have intermediate Lds, then make sure pattern is:
  2226. // t1 = CmEq a, b
  2227. // t2 = Ld_A t1
  2228. // BrTrue $L1, t2
  2229. if (instrLd && !instrLd->GetSrc1()->IsEqual(instr->GetDst()))
  2230. {
  2231. return nullptr;
  2232. }
  2233. if (instrLd2 && !instrLd2->GetSrc1()->IsEqual(instrLd->GetDst()))
  2234. {
  2235. return nullptr;
  2236. }
  2237. // Make sure we have:
  2238. // t1 = CmEq a, b
  2239. // BrTrue/BrFalse t1
  2240. if (!(instr->GetDst()->IsEqual(instrBr->GetSrc1()) || (instrLd && instrLd->GetDst()->IsEqual(instrBr->GetSrc1())) || (instrLd2 && instrLd2->GetDst()->IsEqual(instrBr->GetSrc1()))))
  2241. {
  2242. return nullptr;
  2243. }
  2244. IR::Opnd * src1 = instr->UnlinkSrc1();
  2245. IR::Opnd * src2 = instr->UnlinkSrc2();
  2246. IR::Instr * instrNew;
  2247. IR::Opnd * tmpOpnd;
  2248. if (instr->GetDst()->IsEqual(src1) || (instrLd && instrLd->GetDst()->IsEqual(src1)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src1)))
  2249. {
  2250. Assert(src1->IsInt32());
  2251. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  2252. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src1, instr->m_func);
  2253. instrNew->SetByteCodeOffset(instr);
  2254. instr->InsertBefore(instrNew);
  2255. src1 = tmpOpnd;
  2256. }
  2257. if (instr->GetDst()->IsEqual(src2) || (instrLd && instrLd->GetDst()->IsEqual(src2)) || (instrLd2 && instrLd2->GetDst()->IsEqual(src2)))
  2258. {
  2259. Assert(src2->IsInt32());
  2260. tmpOpnd = IR::RegOpnd::New(TyInt32, instr->m_func);
  2261. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, tmpOpnd, src2, instr->m_func);
  2262. instrNew->SetByteCodeOffset(instr);
  2263. instr->InsertBefore(instrNew);
  2264. src2 = tmpOpnd;
  2265. }
  2266. instrBr->ReplaceSrc1(src1);
  2267. instrBr->SetSrc2(src2);
  2268. Js::OpCode newOpcode;
  2269. switch (instr->m_opcode)
  2270. {
  2271. case Js::OpCode::CmEq_I4:
  2272. newOpcode = Js::OpCode::BrEq_I4;
  2273. break;
  2274. case Js::OpCode::CmGe_I4:
  2275. newOpcode = Js::OpCode::BrGe_I4;
  2276. break;
  2277. case Js::OpCode::CmGt_I4:
  2278. newOpcode = Js::OpCode::BrGt_I4;
  2279. break;
  2280. case Js::OpCode::CmLt_I4:
  2281. newOpcode = Js::OpCode::BrLt_I4;
  2282. break;
  2283. case Js::OpCode::CmLe_I4:
  2284. newOpcode = Js::OpCode::BrLe_I4;
  2285. break;
  2286. case Js::OpCode::CmUnGe_I4:
  2287. newOpcode = Js::OpCode::BrUnGe_I4;
  2288. break;
  2289. case Js::OpCode::CmUnGt_I4:
  2290. newOpcode = Js::OpCode::BrUnGt_I4;
  2291. break;
  2292. case Js::OpCode::CmUnLt_I4:
  2293. newOpcode = Js::OpCode::BrUnLt_I4;
  2294. break;
  2295. case Js::OpCode::CmUnLe_I4:
  2296. newOpcode = Js::OpCode::BrUnLe_I4;
  2297. break;
  2298. case Js::OpCode::CmNeq_I4:
  2299. newOpcode = Js::OpCode::BrNeq_I4;
  2300. break;
  2301. case Js::OpCode::CmEq_A:
  2302. newOpcode = Js::OpCode::BrEq_A;
  2303. break;
  2304. case Js::OpCode::CmGe_A:
  2305. newOpcode = Js::OpCode::BrGe_A;
  2306. break;
  2307. case Js::OpCode::CmGt_A:
  2308. newOpcode = Js::OpCode::BrGt_A;
  2309. break;
  2310. case Js::OpCode::CmLt_A:
  2311. newOpcode = Js::OpCode::BrLt_A;
  2312. break;
  2313. case Js::OpCode::CmLe_A:
  2314. newOpcode = Js::OpCode::BrLe_A;
  2315. break;
  2316. case Js::OpCode::CmUnGe_A:
  2317. newOpcode = Js::OpCode::BrUnGe_A;
  2318. break;
  2319. case Js::OpCode::CmUnGt_A:
  2320. newOpcode = Js::OpCode::BrUnGt_A;
  2321. break;
  2322. case Js::OpCode::CmUnLt_A:
  2323. newOpcode = Js::OpCode::BrUnLt_A;
  2324. break;
  2325. case Js::OpCode::CmUnLe_A:
  2326. newOpcode = Js::OpCode::BrUnLe_A;
  2327. break;
  2328. case Js::OpCode::CmNeq_A:
  2329. newOpcode = Js::OpCode::BrNeq_A;
  2330. break;
  2331. case Js::OpCode::CmSrEq_A:
  2332. newOpcode = Js::OpCode::BrSrEq_A;
  2333. break;
  2334. case Js::OpCode::CmSrNeq_A:
  2335. newOpcode = Js::OpCode::BrSrNeq_A;
  2336. break;
  2337. default:
  2338. newOpcode = Js::OpCode::InvalidOpCode;
  2339. Assume(UNREACHED);
  2340. }
  2341. instrBr->m_opcode = newOpcode;
  2342. if (brIsTrue)
  2343. {
  2344. instr->SetSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2345. instr->m_opcode = Js::OpCode::Ld_I4;
  2346. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2347. instrNew->SetByteCodeOffset(instrBr);
  2348. instrBr->InsertAfter(instrNew);
  2349. if (instrLd)
  2350. {
  2351. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2352. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2353. instrNew->SetByteCodeOffset(instrBr);
  2354. instrBr->InsertAfter(instrNew);
  2355. if (instrLd2)
  2356. {
  2357. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(1, TyInt8, instr->m_func));
  2358. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(0, TyInt8, instr->m_func), instr->m_func);
  2359. instrNew->SetByteCodeOffset(instrBr);
  2360. instrBr->InsertAfter(instrNew);
  2361. }
  2362. }
  2363. }
  2364. else
  2365. {
  2366. instrBr->AsBranchInstr()->Invert();
  2367. instr->SetSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2368. instr->m_opcode = Js::OpCode::Ld_I4;
  2369. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instr->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2370. instrNew->SetByteCodeOffset(instrBr);
  2371. instrBr->InsertAfter(instrNew);
  2372. if (instrLd)
  2373. {
  2374. instrLd->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2375. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2376. instrNew->SetByteCodeOffset(instrBr);
  2377. instrBr->InsertAfter(instrNew);
  2378. if (instrLd2)
  2379. {
  2380. instrLd2->ReplaceSrc1(IR::IntConstOpnd::New(0, TyInt8, instr->m_func));
  2381. instrNew = IR::Instr::New(Js::OpCode::Ld_I4, instrLd2->GetDst(), IR::IntConstOpnd::New(1, TyInt8, instr->m_func), instr->m_func);
  2382. instrNew->SetByteCodeOffset(instrBr);
  2383. instrBr->InsertAfter(instrNew);
  2384. }
  2385. }
  2386. }
  2387. return instrBr;
  2388. }
  2389. IR::Instr *
  2390. FlowGraph::PeepCm(IR::Instr *instr)
  2391. {
  2392. // Basic pattern, peep:
  2393. // t1 = CmEq a, b
  2394. // t2 = Ld_A t1
  2395. // BrTrue $L1, t2
  2396. // Into:
  2397. // t1 = True
  2398. // t2 = True
  2399. // BrEq $L1, a, b
  2400. // t1 = False
  2401. // t2 = False
  2402. //
  2403. // The true/false Ld_A's will most likely end up being dead-stores...
  2404. // Alternate Pattern
  2405. // t1= CmEq a, b
  2406. // BrTrue $L1, t1
  2407. // Into:
  2408. // BrEq $L1, a, b
  2409. Func *func = instr->m_func;
  2410. // Find Ld_A
  2411. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  2412. IR::Instr *inlineeEndInstr = nullptr;
  2413. IR::Instr *instrNew;
  2414. IR::Instr *instrLd = nullptr, *instrLd2 = nullptr;
  2415. IR::Instr *instrByteCode = instrNext;
  2416. bool ldFound = false;
  2417. IR::Opnd *brSrc = instr->GetDst();
  2418. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(instr->GetDst()))
  2419. {
  2420. ldFound = true;
  2421. instrLd = instrNext;
  2422. brSrc = instrNext->GetDst();
  2423. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  2424. {
  2425. return nullptr;
  2426. }
  2427. instrNext = instrLd->GetNextRealInstrOrLabel();
  2428. // Is there a second Ld_A?
  2429. if (instrNext->m_opcode == Js::OpCode::Ld_A && instrNext->GetSrc1()->IsEqual(brSrc))
  2430. {
  2431. // We have:
  2432. // t1 = Cm
  2433. // t2 = t1 // ldSrc = t1
  2434. // t3 = t2 // ldDst = t3
  2435. // BrTrue/BrFalse t3
  2436. instrLd2 = instrNext;
  2437. brSrc = instrLd2->GetDst();
  2438. instrNext = instrLd2->GetNextRealInstrOrLabel();
  2439. if (brSrc->IsEqual(instr->GetSrc1()) || brSrc->IsEqual(instr->GetSrc2()))
  2440. {
  2441. return nullptr;
  2442. }
  2443. }
  2444. }
  2445. // Skip over InlineeEnd
  2446. if (instrNext->m_opcode == Js::OpCode::InlineeEnd)
  2447. {
  2448. inlineeEndInstr = instrNext;
  2449. instrNext = inlineeEndInstr->GetNextRealInstrOrLabel();
  2450. }
  2451. // Find BrTrue/BrFalse
  2452. bool brIsTrue;
  2453. if (instrNext->m_opcode == Js::OpCode::BrTrue_A)
  2454. {
  2455. brIsTrue = true;
  2456. }
  2457. else if (instrNext->m_opcode == Js::OpCode::BrFalse_A)
  2458. {
  2459. brIsTrue = false;
  2460. }
  2461. else
  2462. {
  2463. return nullptr;
  2464. }
  2465. IR::Instr *instrBr = instrNext;
  2466. // Make sure we have:
  2467. // t1 = Ld_A
  2468. // BrTrue/BrFalse t1
  2469. if (!instr->GetDst()->IsEqual(instrBr->GetSrc1()) && !brSrc->IsEqual(instrBr->GetSrc1()))
  2470. {
  2471. return nullptr;
  2472. }
  2473. //
  2474. // We have a match. Generate the new branch
  2475. //
  2476. // BrTrue/BrFalse t1
  2477. // Keep a copy of the inliner func and the bytecode offset of the original BrTrue/BrFalse if we end up inserting a new branch out of the inlinee,
  2478. // and sym id of t1 for proper restoration on a bailout before the branch.
  2479. Func* origBrFunc = instrBr->m_func;
  2480. uint32 origBrByteCodeOffset = instrBr->GetByteCodeOffset();
  2481. uint32 origBranchSrcSymId = instrBr->GetSrc1()->GetStackSym()->m_id;
  2482. bool origBranchSrcOpndIsJITOpt = instrBr->GetSrc1()->GetIsJITOptimizedReg();
  2483. instrBr->Unlink();
  2484. instr->InsertBefore(instrBr);
  2485. instrBr->ClearByteCodeOffset();
  2486. instrBr->SetByteCodeOffset(instr);
  2487. instrBr->FreeSrc1();
  2488. instrBr->SetSrc1(instr->UnlinkSrc1());
  2489. instrBr->SetSrc2(instr->UnlinkSrc2());
  2490. instrBr->m_func = instr->m_func;
  2491. Js::OpCode newOpcode = Js::OpCode::InvalidOpCode;
  2492. switch(instr->m_opcode)
  2493. {
  2494. case Js::OpCode::CmEq_A:
  2495. newOpcode = Js::OpCode::BrEq_A;
  2496. break;
  2497. case Js::OpCode::CmGe_A:
  2498. newOpcode = Js::OpCode::BrGe_A;
  2499. break;
  2500. case Js::OpCode::CmGt_A:
  2501. newOpcode = Js::OpCode::BrGt_A;
  2502. break;
  2503. case Js::OpCode::CmLt_A:
  2504. newOpcode = Js::OpCode::BrLt_A;
  2505. break;
  2506. case Js::OpCode::CmLe_A:
  2507. newOpcode = Js::OpCode::BrLe_A;
  2508. break;
  2509. case Js::OpCode::CmUnGe_A:
  2510. newOpcode = Js::OpCode::BrUnGe_A;
  2511. break;
  2512. case Js::OpCode::CmUnGt_A:
  2513. newOpcode = Js::OpCode::BrUnGt_A;
  2514. break;
  2515. case Js::OpCode::CmUnLt_A:
  2516. newOpcode = Js::OpCode::BrUnLt_A;
  2517. break;
  2518. case Js::OpCode::CmUnLe_A:
  2519. newOpcode = Js::OpCode::BrUnLe_A;
  2520. break;
  2521. case Js::OpCode::CmNeq_A:
  2522. newOpcode = Js::OpCode::BrNeq_A;
  2523. break;
  2524. case Js::OpCode::CmSrEq_A:
  2525. newOpcode = Js::OpCode::BrSrEq_A;
  2526. break;
  2527. case Js::OpCode::CmSrNeq_A:
  2528. newOpcode = Js::OpCode::BrSrNeq_A;
  2529. break;
  2530. default:
  2531. Assert(UNREACHED);
  2532. __assume(UNREACHED);
  2533. }
  2534. instrBr->m_opcode = newOpcode;
  2535. IR::AddrOpnd* trueOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetTrueAddr(), IR::AddrOpndKindDynamicVar, func, true);
  2536. IR::AddrOpnd* falseOpnd = IR::AddrOpnd::New(func->GetScriptContextInfo()->GetFalseAddr(), IR::AddrOpndKindDynamicVar, func, true);
  2537. trueOpnd->SetValueType(ValueType::Boolean);
  2538. falseOpnd->SetValueType(ValueType::Boolean);
  2539. if (ldFound)
  2540. {
  2541. // Split Ld_A into "Ld_A TRUE"/"Ld_A FALSE"
  2542. if (brIsTrue)
  2543. {
  2544. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  2545. instrNew->SetByteCodeOffset(instrBr);
  2546. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2547. instrBr->InsertBefore(instrNew);
  2548. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), trueOpnd, instrBr->m_func);
  2549. instrNew->SetByteCodeOffset(instrBr);
  2550. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2551. instrBr->InsertBefore(instrNew);
  2552. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrLd->m_func);
  2553. instrLd->InsertBefore(instrNew);
  2554. instrNew->SetByteCodeOffset(instrLd);
  2555. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2556. instrLd->ReplaceSrc1(falseOpnd);
  2557. if (instrLd2)
  2558. {
  2559. instrLd2->ReplaceSrc1(falseOpnd);
  2560. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd2->GetDst(), trueOpnd, instrBr->m_func);
  2561. instrBr->InsertBefore(instrNew);
  2562. instrNew->SetByteCodeOffset(instrBr);
  2563. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2564. }
  2565. }
  2566. else
  2567. {
  2568. instrBr->AsBranchInstr()->Invert();
  2569. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), falseOpnd, instrBr->m_func);
  2570. instrBr->InsertBefore(instrNew);
  2571. instrNew->SetByteCodeOffset(instrBr);
  2572. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2573. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetDst(), falseOpnd, instrBr->m_func);
  2574. instrBr->InsertBefore(instrNew);
  2575. instrNew->SetByteCodeOffset(instrBr);
  2576. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2577. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrLd->m_func);
  2578. instrLd->InsertBefore(instrNew);
  2579. instrNew->SetByteCodeOffset(instrLd);
  2580. instrLd->ReplaceSrc1(trueOpnd);
  2581. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2582. if (instrLd2)
  2583. {
  2584. instrLd2->ReplaceSrc1(trueOpnd);
  2585. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd->GetSrc1(), trueOpnd, instrBr->m_func);
  2586. instrBr->InsertBefore(instrNew);
  2587. instrNew->SetByteCodeOffset(instrBr);
  2588. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2589. }
  2590. }
  2591. }
  2592. // Fix InlineeEnd
  2593. if (inlineeEndInstr)
  2594. {
  2595. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr, instrByteCode , origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2596. }
  2597. if (instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  2598. {
  2599. Assert(!instrBr->AsBranchInstr()->HasByteCodeReg());
  2600. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  2601. instrBr->AsBranchInstr()->SetByteCodeReg(dstSym->GetByteCodeRegSlot());
  2602. }
  2603. brSrc = brSrc->Copy(this->func);
  2604. // We need brSrc later, but instr->Remove() might delete it...
  2605. IR::AutoReuseOpnd brSrcAutoCopy(brSrc, this->func, true);
  2606. instr->Remove();
  2607. //
  2608. // Try optimizing through a second branch.
  2609. // Peep:
  2610. //
  2611. // t2 = True;
  2612. // BrTrue $L1
  2613. // ...
  2614. // L1:
  2615. // t1 = Ld_A t2
  2616. // BrTrue $L2
  2617. //
  2618. // Into:
  2619. // t2 = True;
  2620. // t1 = True;
  2621. // BrTrue $L2 <---
  2622. // ...
  2623. // L1:
  2624. // t1 = Ld_A t2
  2625. // BrTrue $L2
  2626. //
  2627. // This cleanup helps expose second level Cm peeps.
  2628. IR::Instr *instrLd3 = instrBr->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2629. // Skip over branch to branch
  2630. while (instrLd3->m_opcode == Js::OpCode::Br)
  2631. {
  2632. instrLd3 = instrLd3->AsBranchInstr()->GetTarget()->GetNextRealInstrOrLabel();
  2633. }
  2634. // Find Ld_A
  2635. if (instrLd3->m_opcode != Js::OpCode::Ld_A)
  2636. {
  2637. return instrBr;
  2638. }
  2639. IR::Instr *instrBr2 = instrLd3->GetNextRealInstrOrLabel();
  2640. IR::Instr *inlineeEndInstr2 = nullptr;
  2641. // InlineeEnd?
  2642. // REVIEW: Can we handle 2 inlineeEnds?
  2643. if (instrBr2->m_opcode == Js::OpCode::InlineeEnd && !inlineeEndInstr)
  2644. {
  2645. inlineeEndInstr2 = instrBr2;
  2646. instrBr2 = instrBr2->GetNextRealInstrOrLabel();
  2647. }
  2648. // Find branch
  2649. bool brIsTrue2;
  2650. if (instrBr2->m_opcode == Js::OpCode::BrTrue_A)
  2651. {
  2652. brIsTrue2 = true;
  2653. }
  2654. else if (instrBr2->m_opcode == Js::OpCode::BrFalse_A)
  2655. {
  2656. brIsTrue2 = false;
  2657. }
  2658. else
  2659. {
  2660. return nullptr;
  2661. }
  2662. // Make sure Ld_A operates on the right tmps.
  2663. if (!instrLd3->GetDst()->IsEqual(instrBr2->GetSrc1()) || !brSrc->IsEqual(instrLd3->GetSrc1()))
  2664. {
  2665. return nullptr;
  2666. }
  2667. if (instrLd3->GetDst()->IsEqual(instrBr->GetSrc1()) || instrLd3->GetDst()->IsEqual(instrBr->GetSrc2()))
  2668. {
  2669. return nullptr;
  2670. }
  2671. // Make sure that the reg we're assigning to is not live in the intervening instructions (if this is a forward branch).
  2672. if (instrLd3->GetByteCodeOffset() > instrBr->GetByteCodeOffset())
  2673. {
  2674. StackSym *symLd3 = instrLd3->GetDst()->AsRegOpnd()->m_sym;
  2675. if (IR::Instr::HasSymUseInRange(symLd3, instrBr->m_next, instrLd3))
  2676. {
  2677. return nullptr;
  2678. }
  2679. }
  2680. //
  2681. // We have a match!
  2682. //
  2683. if(inlineeEndInstr2)
  2684. {
  2685. origBrFunc = instrBr2->m_func;
  2686. origBrByteCodeOffset = instrBr2->GetByteCodeOffset();
  2687. origBranchSrcSymId = instrBr2->GetSrc1()->GetStackSym()->m_id;
  2688. }
  2689. // Fix Ld_A
  2690. if (brIsTrue)
  2691. {
  2692. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), trueOpnd, instrBr->m_func);
  2693. instrBr->InsertBefore(instrNew);
  2694. instrNew->SetByteCodeOffset(instrBr);
  2695. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2696. }
  2697. else
  2698. {
  2699. instrNew = IR::Instr::New(Js::OpCode::Ld_A, instrLd3->GetDst(), falseOpnd, instrBr->m_func);
  2700. instrBr->InsertBefore(instrNew);
  2701. instrNew->SetByteCodeOffset(instrBr);
  2702. instrNew->GetDst()->AsRegOpnd()->m_fgPeepTmp = true;
  2703. }
  2704. IR::LabelInstr *brTarget2;
  2705. // Retarget branch
  2706. if (brIsTrue2 == brIsTrue)
  2707. {
  2708. brTarget2 = instrBr2->AsBranchInstr()->GetTarget();
  2709. }
  2710. else
  2711. {
  2712. brTarget2 = IR::LabelInstr::New(Js::OpCode::Label, instrBr2->m_func);
  2713. brTarget2->SetByteCodeOffset(instrBr2->m_next);
  2714. instrBr2->InsertAfter(brTarget2);
  2715. }
  2716. instrBr->AsBranchInstr()->SetTarget(brTarget2);
  2717. // InlineeEnd?
  2718. if (inlineeEndInstr2)
  2719. {
  2720. this->InsertInlineeOnFLowEdge(instrBr->AsBranchInstr(), inlineeEndInstr2, instrByteCode, origBrFunc, origBrByteCodeOffset, origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2721. }
  2722. return instrBr;
  2723. }
  2724. void
  2725. FlowGraph::InsertInlineeOnFLowEdge(IR::BranchInstr *instrBr, IR::Instr *inlineeEndInstr, IR::Instr *instrBytecode, Func* origBrFunc, uint32 origByteCodeOffset, bool origBranchSrcOpndIsJITOpt, uint32 origBranchSrcSymId)
  2726. {
  2727. // Helper for PeepsCm code.
  2728. //
  2729. // We've skipped some InlineeEnd. Globopt expects to see these
  2730. // on all flow paths out of the inlinee. Insert an InlineeEnd
  2731. // on the new path:
  2732. // BrEq $L1, a, b
  2733. // Becomes:
  2734. // BrNeq $L2, a, b
  2735. // InlineeEnd
  2736. // Br $L1
  2737. // L2:
  2738. instrBr->AsBranchInstr()->Invert();
  2739. IR::BranchInstr *newBr = IR::BranchInstr::New(Js::OpCode::Br, instrBr->AsBranchInstr()->GetTarget(), origBrFunc);
  2740. newBr->SetByteCodeOffset(origByteCodeOffset);
  2741. instrBr->InsertAfter(newBr);
  2742. IR::LabelInstr *newLabel = IR::LabelInstr::New(Js::OpCode::Label, instrBr->m_func);
  2743. newLabel->SetByteCodeOffset(instrBytecode);
  2744. newBr->InsertAfter(newLabel);
  2745. instrBr->AsBranchInstr()->SetTarget(newLabel);
  2746. IR::Instr *newInlineeEnd = IR::Instr::New(Js::OpCode::InlineeEnd, inlineeEndInstr->m_func);
  2747. newInlineeEnd->SetSrc1(inlineeEndInstr->GetSrc1());
  2748. newInlineeEnd->SetSrc2(inlineeEndInstr->GetSrc2());
  2749. newInlineeEnd->SetByteCodeOffset(instrBytecode);
  2750. newInlineeEnd->SetIsCloned(true); // Mark it as cloned - this is used later by the inlinee args optimization
  2751. newBr->InsertBefore(newInlineeEnd);
  2752. IR::ByteCodeUsesInstr * useOrigBranchSrcInstr = IR::ByteCodeUsesInstr::New(origBrFunc, origByteCodeOffset);
  2753. useOrigBranchSrcInstr->SetRemovedOpndSymbol(origBranchSrcOpndIsJITOpt, origBranchSrcSymId);
  2754. newBr->InsertBefore(useOrigBranchSrcInstr);
  2755. uint newBrFnNumber = newBr->m_func->GetFunctionNumber();
  2756. Assert(newBrFnNumber == origBrFunc->GetFunctionNumber());
  2757. // The function numbers of the new branch and the inlineeEnd instruction should be different (ensuring that the new branch is not added in the inlinee but in the inliner).
  2758. // Only case when they can be same is recursive calls - inlinee and inliner are the same function
  2759. Assert(newBrFnNumber != inlineeEndInstr->m_func->GetFunctionNumber() ||
  2760. newBrFnNumber == inlineeEndInstr->m_func->GetParentFunc()->GetFunctionNumber());
  2761. }
  2762. BasicBlock *
  2763. BasicBlock::New(FlowGraph * graph)
  2764. {
  2765. BasicBlock * block;
  2766. block = JitAnew(graph->alloc, BasicBlock, graph->alloc, graph->GetFunc());
  2767. return block;
  2768. }
  2769. void
  2770. BasicBlock::AddPred(FlowEdge * edge, FlowGraph * graph)
  2771. {
  2772. this->predList.Prepend(graph->alloc, edge);
  2773. }
  2774. void
  2775. BasicBlock::AddSucc(FlowEdge * edge, FlowGraph * graph)
  2776. {
  2777. this->succList.Prepend(graph->alloc, edge);
  2778. }
  2779. void
  2780. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph)
  2781. {
  2782. this->RemovePred(block, graph, true, false);
  2783. }
  2784. void
  2785. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph)
  2786. {
  2787. this->RemoveSucc(block, graph, true, false);
  2788. }
  2789. void
  2790. BasicBlock::RemoveDeadPred(BasicBlock *block, FlowGraph * graph)
  2791. {
  2792. this->RemovePred(block, graph, true, true);
  2793. }
  2794. void
  2795. BasicBlock::RemoveDeadSucc(BasicBlock *block, FlowGraph * graph)
  2796. {
  2797. this->RemoveSucc(block, graph, true, true);
  2798. }
  2799. void
  2800. BasicBlock::RemovePred(BasicBlock *block, FlowGraph * graph, bool doCleanSucc, bool moveToDead)
  2801. {
  2802. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2803. {
  2804. if (edge->GetPred() == block)
  2805. {
  2806. BasicBlock *blockSucc = edge->GetSucc();
  2807. if (moveToDead)
  2808. {
  2809. iter.MoveCurrentTo(this->GetDeadPredList());
  2810. }
  2811. else
  2812. {
  2813. iter.RemoveCurrent(graph->alloc);
  2814. }
  2815. if (doCleanSucc)
  2816. {
  2817. block->RemoveSucc(this, graph, false, moveToDead);
  2818. }
  2819. if (blockSucc->isLoopHeader && blockSucc->loop && blockSucc->GetPredList()->HasOne())
  2820. {
  2821. Loop *loop = blockSucc->loop;
  2822. loop->isDead = true;
  2823. }
  2824. return;
  2825. }
  2826. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2827. AssertMsg(UNREACHED, "Edge not found.");
  2828. }
  2829. void
  2830. BasicBlock::RemoveSucc(BasicBlock *block, FlowGraph * graph, bool doCleanPred, bool moveToDead)
  2831. {
  2832. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2833. {
  2834. if (edge->GetSucc() == block)
  2835. {
  2836. if (moveToDead)
  2837. {
  2838. iter.MoveCurrentTo(this->GetDeadSuccList());
  2839. }
  2840. else
  2841. {
  2842. iter.RemoveCurrent(graph->alloc);
  2843. }
  2844. if (doCleanPred)
  2845. {
  2846. block->RemovePred(this, graph, false, moveToDead);
  2847. }
  2848. if (block->isLoopHeader && block->loop && block->GetPredList()->HasOne())
  2849. {
  2850. Loop *loop = block->loop;
  2851. loop->isDead = true;
  2852. }
  2853. return;
  2854. }
  2855. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2856. AssertMsg(UNREACHED, "Edge not found.");
  2857. }
  2858. void
  2859. BasicBlock::UnlinkPred(BasicBlock *block)
  2860. {
  2861. this->UnlinkPred(block, true);
  2862. }
  2863. void
  2864. BasicBlock::UnlinkSucc(BasicBlock *block)
  2865. {
  2866. this->UnlinkSucc(block, true);
  2867. }
  2868. void
  2869. BasicBlock::UnlinkPred(BasicBlock *block, bool doCleanSucc)
  2870. {
  2871. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetPredList(), iter)
  2872. {
  2873. if (edge->GetPred() == block)
  2874. {
  2875. iter.UnlinkCurrent();
  2876. if (doCleanSucc)
  2877. {
  2878. block->UnlinkSucc(this, false);
  2879. }
  2880. return;
  2881. }
  2882. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2883. AssertMsg(UNREACHED, "Edge not found.");
  2884. }
  2885. void
  2886. BasicBlock::UnlinkSucc(BasicBlock *block, bool doCleanPred)
  2887. {
  2888. FOREACH_SLISTBASECOUNTED_ENTRY_EDITING(FlowEdge*, edge, this->GetSuccList(), iter)
  2889. {
  2890. if (edge->GetSucc() == block)
  2891. {
  2892. iter.UnlinkCurrent();
  2893. if (doCleanPred)
  2894. {
  2895. block->UnlinkPred(this, false);
  2896. }
  2897. return;
  2898. }
  2899. } NEXT_SLISTBASECOUNTED_ENTRY_EDITING;
  2900. AssertMsg(UNREACHED, "Edge not found.");
  2901. }
  2902. bool
  2903. BasicBlock::IsLandingPad()
  2904. {
  2905. BasicBlock * nextBlock = this->GetNext();
  2906. return nextBlock && nextBlock->loop && nextBlock->isLoopHeader && nextBlock->loop->landingPad == this;
  2907. }
  2908. IR::Instr *
  2909. FlowGraph::RemoveInstr(IR::Instr *instr, GlobOpt * globOpt)
  2910. {
  2911. IR::Instr *instrPrev = instr->m_prev;
  2912. if (globOpt)
  2913. {
  2914. // Removing block during glob opt. Need to maintain the graph so that
  2915. // bailout will record the byte code use in case the dead code is exposed
  2916. // by dyno-pogo optimization (where bailout need the byte code uses from
  2917. // the dead blocks where it may not be dead after bailing out)
  2918. if (instr->IsLabelInstr())
  2919. {
  2920. instr->AsLabelInstr()->m_isLoopTop = false;
  2921. return instr;
  2922. }
  2923. else if (instr->IsByteCodeUsesInstr())
  2924. {
  2925. return instr;
  2926. }
  2927. /*
  2928. * Scope object has to be implicitly live whenever Heap Arguments object is live.
  2929. * - When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
  2930. */
  2931. Js::OpCode opcode = instr->m_opcode;
  2932. if (opcode == Js::OpCode::LdElemI_A && instr->DoStackArgsOpt(this->func) &&
  2933. globOpt->CurrentBlockData()->IsArgumentsOpnd(instr->GetSrc1()) && instr->m_func->GetScopeObjSym())
  2934. {
  2935. IR::ByteCodeUsesInstr * byteCodeUsesInstr = IR::ByteCodeUsesInstr::New(instr);
  2936. byteCodeUsesInstr->SetNonOpndSymbol(instr->m_func->GetScopeObjSym()->m_id);
  2937. instr->InsertAfter(byteCodeUsesInstr);
  2938. }
  2939. IR::ByteCodeUsesInstr * newByteCodeUseInstr = globOpt->ConvertToByteCodeUses(instr);
  2940. if (newByteCodeUseInstr != nullptr)
  2941. {
  2942. // We don't care about property used in these instruction
  2943. // It is only necessary for field copy prop so that we will keep the implicit call
  2944. // up to the copy prop location.
  2945. newByteCodeUseInstr->propertySymUse = nullptr;
  2946. if (opcode == Js::OpCode::Yield)
  2947. {
  2948. IR::Instr *instrLabel = newByteCodeUseInstr->m_next;
  2949. while (instrLabel->m_opcode != Js::OpCode::Label)
  2950. {
  2951. instrLabel = instrLabel->m_next;
  2952. }
  2953. func->RemoveDeadYieldOffsetResumeLabel(instrLabel->AsLabelInstr());
  2954. instrLabel->AsLabelInstr()->m_hasNonBranchRef = false;
  2955. }
  2956. // Save the last instruction to update the block with
  2957. return newByteCodeUseInstr;
  2958. }
  2959. else
  2960. {
  2961. return instrPrev;
  2962. }
  2963. }
  2964. else
  2965. {
  2966. instr->Remove();
  2967. return instrPrev;
  2968. }
  2969. }
  2970. void
  2971. FlowGraph::RemoveBlock(BasicBlock *block, GlobOpt * globOpt, bool tailDuping)
  2972. {
  2973. Assert(!block->isDead && !block->isDeleted);
  2974. IR::Instr * lastInstr = nullptr;
  2975. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  2976. {
  2977. if (instr->m_opcode == Js::OpCode::FunctionExit)
  2978. {
  2979. // Removing FunctionExit causes problems downstream...
  2980. // We could change the opcode, or have FunctionEpilog/FunctionExit to get
  2981. // rid of the epilog.
  2982. break;
  2983. }
  2984. if (instr == block->GetFirstInstr())
  2985. {
  2986. Assert(instr->IsLabelInstr());
  2987. instr->AsLabelInstr()->m_isLoopTop = false;
  2988. instr->AsLabelInstr()->m_hasNonBranchRef = false;
  2989. }
  2990. else
  2991. {
  2992. lastInstr = this->RemoveInstr(instr, globOpt);
  2993. }
  2994. } NEXT_INSTR_IN_BLOCK_EDITING;
  2995. if (lastInstr)
  2996. {
  2997. block->SetLastInstr(lastInstr);
  2998. }
  2999. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetPredList())
  3000. {
  3001. edge->GetPred()->RemoveSucc(block, this, false, globOpt != nullptr);
  3002. } NEXT_SLISTBASECOUNTED_ENTRY;
  3003. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, block->GetSuccList())
  3004. {
  3005. edge->GetSucc()->RemovePred(block, this, false, globOpt != nullptr);
  3006. } NEXT_SLISTBASECOUNTED_ENTRY;
  3007. if (block->isLoopHeader && this->loopList)
  3008. {
  3009. // If loop graph is built, remove loop from loopList
  3010. Loop **pPrevLoop = &this->loopList;
  3011. while (*pPrevLoop != block->loop)
  3012. {
  3013. pPrevLoop = &((*pPrevLoop)->next);
  3014. }
  3015. *pPrevLoop = (*pPrevLoop)->next;
  3016. this->hasLoop = (this->loopList != nullptr);
  3017. }
  3018. if (globOpt != nullptr)
  3019. {
  3020. block->isDead = true;
  3021. block->GetPredList()->MoveTo(block->GetDeadPredList());
  3022. block->GetSuccList()->MoveTo(block->GetDeadSuccList());
  3023. }
  3024. if (tailDuping)
  3025. {
  3026. block->isDead = true;
  3027. }
  3028. block->isDeleted = true;
  3029. block->SetDataUseCount(0);
  3030. }
  3031. void
  3032. BasicBlock::UnlinkInstr(IR::Instr * instr)
  3033. {
  3034. Assert(this->Contains(instr));
  3035. Assert(this->GetFirstInstr() != this->GetLastInstr());
  3036. if (instr == this->GetFirstInstr())
  3037. {
  3038. Assert(!this->GetFirstInstr()->IsLabelInstr());
  3039. this->SetFirstInstr(instr->m_next);
  3040. }
  3041. else if (instr == this->GetLastInstr())
  3042. {
  3043. this->SetLastInstr(instr->m_prev);
  3044. }
  3045. instr->Unlink();
  3046. }
  3047. void
  3048. BasicBlock::RemoveInstr(IR::Instr * instr)
  3049. {
  3050. Assert(this->Contains(instr));
  3051. if (instr == this->GetFirstInstr())
  3052. {
  3053. this->SetFirstInstr(instr->m_next);
  3054. }
  3055. else if (instr == this->GetLastInstr())
  3056. {
  3057. this->SetLastInstr(instr->m_prev);
  3058. }
  3059. instr->Remove();
  3060. }
  3061. void
  3062. BasicBlock::InsertInstrBefore(IR::Instr *newInstr, IR::Instr *beforeThisInstr)
  3063. {
  3064. Assert(this->Contains(beforeThisInstr));
  3065. beforeThisInstr->InsertBefore(newInstr);
  3066. if(this->GetFirstInstr() == beforeThisInstr)
  3067. {
  3068. Assert(!beforeThisInstr->IsLabelInstr());
  3069. this->SetFirstInstr(newInstr);
  3070. }
  3071. }
  3072. void
  3073. BasicBlock::InsertInstrAfter(IR::Instr *newInstr, IR::Instr *afterThisInstr)
  3074. {
  3075. Assert(this->Contains(afterThisInstr));
  3076. afterThisInstr->InsertAfter(newInstr);
  3077. if (this->GetLastInstr() == afterThisInstr)
  3078. {
  3079. Assert(afterThisInstr->HasFallThrough());
  3080. this->SetLastInstr(newInstr);
  3081. }
  3082. }
  3083. void
  3084. BasicBlock::InsertAfter(IR::Instr *newInstr)
  3085. {
  3086. Assert(this->GetLastInstr()->HasFallThrough());
  3087. this->GetLastInstr()->InsertAfter(newInstr);
  3088. this->SetLastInstr(newInstr);
  3089. }
  3090. void
  3091. Loop::SetHasCall()
  3092. {
  3093. Loop * current = this;
  3094. do
  3095. {
  3096. if (current->hasCall)
  3097. {
  3098. #if DBG
  3099. current = current->parent;
  3100. while (current)
  3101. {
  3102. Assert(current->hasCall);
  3103. current = current->parent;
  3104. }
  3105. #endif
  3106. break;
  3107. }
  3108. current->hasCall = true;
  3109. current = current->parent;
  3110. }
  3111. while (current != nullptr);
  3112. }
  3113. void
  3114. Loop::SetImplicitCallFlags(Js::ImplicitCallFlags newFlags)
  3115. {
  3116. Loop * current = this;
  3117. do
  3118. {
  3119. if ((current->implicitCallFlags & newFlags) == newFlags)
  3120. {
  3121. #if DBG
  3122. current = current->parent;
  3123. while (current)
  3124. {
  3125. Assert((current->implicitCallFlags & newFlags) == newFlags);
  3126. current = current->parent;
  3127. }
  3128. #endif
  3129. break;
  3130. }
  3131. newFlags = (Js::ImplicitCallFlags)(implicitCallFlags | newFlags);
  3132. current->implicitCallFlags = newFlags;
  3133. current = current->parent;
  3134. }
  3135. while (current != nullptr);
  3136. }
  3137. Js::ImplicitCallFlags
  3138. Loop::GetImplicitCallFlags()
  3139. {
  3140. if (this->implicitCallFlags == Js::ImplicitCall_HasNoInfo)
  3141. {
  3142. if (this->parent == nullptr)
  3143. {
  3144. // We don't have any information, and we don't have any parent, so just assume that there aren't any implicit calls
  3145. this->implicitCallFlags = Js::ImplicitCall_None;
  3146. }
  3147. else
  3148. {
  3149. // We don't have any information, get it from the parent and hope for the best
  3150. this->implicitCallFlags = this->parent->GetImplicitCallFlags();
  3151. }
  3152. }
  3153. return this->implicitCallFlags;
  3154. }
  3155. bool
  3156. Loop::CanDoFieldCopyProp()
  3157. {
  3158. #if DBG_DUMP
  3159. if (((this->implicitCallFlags & ~(Js::ImplicitCall_External)) == 0) &&
  3160. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  3161. {
  3162. Output::Print(_u("fieldcopyprop disabled because external: loop count: %d"), GetLoopNumber());
  3163. GetFunc()->DumpFullFunctionName();
  3164. Output::Print(_u("\n"));
  3165. Output::Flush();
  3166. }
  3167. #endif
  3168. return GlobOpt::ImplicitCallFlagsAllowOpts(this);
  3169. }
  3170. bool
  3171. Loop::CanDoFieldHoist()
  3172. {
  3173. // We can do field hoist wherever we can do copy prop
  3174. return CanDoFieldCopyProp();
  3175. }
  3176. bool
  3177. Loop::CanHoistInvariants() const
  3178. {
  3179. Func * func = this->GetHeadBlock()->firstInstr->m_func->GetTopFunc();
  3180. if (PHASE_OFF(Js::InvariantsPhase, func))
  3181. {
  3182. return false;
  3183. }
  3184. return true;
  3185. }
  3186. IR::LabelInstr *
  3187. Loop::GetLoopTopInstr() const
  3188. {
  3189. IR::LabelInstr * instr = nullptr;
  3190. if (this->topFunc->isFlowGraphValid)
  3191. {
  3192. instr = this->GetHeadBlock()->GetFirstInstr()->AsLabelInstr();
  3193. }
  3194. else
  3195. {
  3196. // Flowgraph gets torn down after the globopt, so can't get the loopTop from the head block.
  3197. instr = this->loopTopLabel;
  3198. }
  3199. if (instr)
  3200. {
  3201. Assert(instr->m_isLoopTop);
  3202. }
  3203. return instr;
  3204. }
  3205. void
  3206. Loop::SetLoopTopInstr(IR::LabelInstr * loopTop)
  3207. {
  3208. this->loopTopLabel = loopTop;
  3209. }
  3210. #if DBG_DUMP
  3211. uint
  3212. Loop::GetLoopNumber() const
  3213. {
  3214. IR::LabelInstr * loopTopInstr = this->GetLoopTopInstr();
  3215. if (loopTopInstr->IsProfiledLabelInstr())
  3216. {
  3217. return loopTopInstr->AsProfiledLabelInstr()->loopNum;
  3218. }
  3219. return Js::LoopHeader::NoLoop;
  3220. }
  3221. bool
  3222. BasicBlock::Contains(IR::Instr * instr)
  3223. {
  3224. FOREACH_INSTR_IN_BLOCK(blockInstr, this)
  3225. {
  3226. if (instr == blockInstr)
  3227. {
  3228. return true;
  3229. }
  3230. }
  3231. NEXT_INSTR_IN_BLOCK;
  3232. return false;
  3233. }
  3234. #endif
  3235. FlowEdge *
  3236. FlowEdge::New(FlowGraph * graph)
  3237. {
  3238. FlowEdge * edge;
  3239. edge = JitAnew(graph->alloc, FlowEdge);
  3240. return edge;
  3241. }
  3242. bool
  3243. Loop::IsDescendentOrSelf(Loop const * loop) const
  3244. {
  3245. Loop const * currentLoop = loop;
  3246. while (currentLoop != nullptr)
  3247. {
  3248. if (currentLoop == this)
  3249. {
  3250. return true;
  3251. }
  3252. currentLoop = currentLoop->parent;
  3253. }
  3254. return false;
  3255. }
  3256. void FlowGraph::SafeRemoveInstr(IR::Instr *instr)
  3257. {
  3258. BasicBlock *block;
  3259. if (instr->m_next->IsLabelInstr())
  3260. {
  3261. block = instr->m_next->AsLabelInstr()->GetBasicBlock()->GetPrev();
  3262. block->RemoveInstr(instr);
  3263. }
  3264. else if (instr->IsLabelInstr())
  3265. {
  3266. block = instr->AsLabelInstr()->GetBasicBlock();
  3267. block->RemoveInstr(instr);
  3268. }
  3269. else
  3270. {
  3271. Assert(!instr->EndsBasicBlock() && !instr->StartsBasicBlock());
  3272. instr->Remove();
  3273. }
  3274. }
  3275. bool FlowGraph::IsUnsignedOpnd(IR::Opnd *src, IR::Opnd **pShrSrc1)
  3276. {
  3277. // Look for an unsigned constant, or the result of an unsigned shift by zero
  3278. if (!src->IsRegOpnd())
  3279. {
  3280. return false;
  3281. }
  3282. if (!src->AsRegOpnd()->m_sym->IsSingleDef())
  3283. {
  3284. return false;
  3285. }
  3286. if (src->AsRegOpnd()->m_sym->IsIntConst())
  3287. {
  3288. int32 intConst = src->AsRegOpnd()->m_sym->GetIntConstValue();
  3289. if (intConst >= 0)
  3290. {
  3291. *pShrSrc1 = src;
  3292. return true;
  3293. }
  3294. else
  3295. {
  3296. return false;
  3297. }
  3298. }
  3299. IR::Instr * shrUInstr = src->AsRegOpnd()->m_sym->GetInstrDef();
  3300. if (shrUInstr->m_opcode != Js::OpCode::ShrU_A)
  3301. {
  3302. return false;
  3303. }
  3304. IR::Opnd *shrCnt = shrUInstr->GetSrc2();
  3305. if (!shrCnt->IsRegOpnd() || !shrCnt->AsRegOpnd()->m_sym->IsTaggableIntConst() || shrCnt->AsRegOpnd()->m_sym->GetIntConstValue() != 0)
  3306. {
  3307. return false;
  3308. }
  3309. IR::Opnd *shrSrc = shrUInstr->GetSrc1();
  3310. *pShrSrc1 = shrSrc;
  3311. return true;
  3312. }
  3313. bool FlowGraph::UnsignedCmpPeep(IR::Instr *cmpInstr)
  3314. {
  3315. IR::Opnd *cmpSrc1 = cmpInstr->GetSrc1();
  3316. IR::Opnd *cmpSrc2 = cmpInstr->GetSrc2();
  3317. IR::Opnd *newSrc1;
  3318. IR::Opnd *newSrc2;
  3319. // Look for something like:
  3320. // t1 = ShrU_A x, 0
  3321. // t2 = 10;
  3322. // BrGt t1, t2, L
  3323. //
  3324. // Peep to:
  3325. //
  3326. // t1 = ShrU_A x, 0
  3327. // t2 = 10;
  3328. // t3 = Or_A x, 0
  3329. // BrUnGt x, t3, L
  3330. // ByteCodeUse t1
  3331. //
  3332. // Hopefully dead-store can get rid of the ShrU
  3333. if (!this->func->DoGlobOpt() || !GlobOpt::DoAggressiveIntTypeSpec(this->func) || !GlobOpt::DoLossyIntTypeSpec(this->func))
  3334. {
  3335. return false;
  3336. }
  3337. if (cmpInstr->IsBranchInstr() && !cmpInstr->AsBranchInstr()->IsConditional())
  3338. {
  3339. return false;
  3340. }
  3341. if (!cmpInstr->GetSrc2())
  3342. {
  3343. return false;
  3344. }
  3345. if (!this->IsUnsignedOpnd(cmpSrc1, &newSrc1))
  3346. {
  3347. return false;
  3348. }
  3349. if (!this->IsUnsignedOpnd(cmpSrc2, &newSrc2))
  3350. {
  3351. return false;
  3352. }
  3353. switch(cmpInstr->m_opcode)
  3354. {
  3355. case Js::OpCode::BrEq_A:
  3356. case Js::OpCode::BrNeq_A:
  3357. case Js::OpCode::BrSrEq_A:
  3358. case Js::OpCode::BrSrNeq_A:
  3359. case Js::OpCode::CmEq_A:
  3360. case Js::OpCode::CmNeq_A:
  3361. case Js::OpCode::CmSrEq_A:
  3362. case Js::OpCode::CmSrNeq_A:
  3363. break;
  3364. case Js::OpCode::BrLe_A:
  3365. cmpInstr->m_opcode = Js::OpCode::BrUnLe_A;
  3366. break;
  3367. case Js::OpCode::BrLt_A:
  3368. cmpInstr->m_opcode = Js::OpCode::BrUnLt_A;
  3369. break;
  3370. case Js::OpCode::BrGe_A:
  3371. cmpInstr->m_opcode = Js::OpCode::BrUnGe_A;
  3372. break;
  3373. case Js::OpCode::BrGt_A:
  3374. cmpInstr->m_opcode = Js::OpCode::BrUnGt_A;
  3375. break;
  3376. case Js::OpCode::CmLe_A:
  3377. cmpInstr->m_opcode = Js::OpCode::CmUnLe_A;
  3378. break;
  3379. case Js::OpCode::CmLt_A:
  3380. cmpInstr->m_opcode = Js::OpCode::CmUnLt_A;
  3381. break;
  3382. case Js::OpCode::CmGe_A:
  3383. cmpInstr->m_opcode = Js::OpCode::CmUnGe_A;
  3384. break;
  3385. case Js::OpCode::CmGt_A:
  3386. cmpInstr->m_opcode = Js::OpCode::CmUnGt_A;
  3387. break;
  3388. default:
  3389. return false;
  3390. }
  3391. IR::ByteCodeUsesInstr * bytecodeInstr = IR::ByteCodeUsesInstr::New(cmpInstr);
  3392. cmpInstr->InsertAfter(bytecodeInstr);
  3393. if (cmpSrc1 != newSrc1)
  3394. {
  3395. if (cmpSrc1->IsRegOpnd() && !cmpSrc1->GetIsJITOptimizedReg())
  3396. {
  3397. bytecodeInstr->Set(cmpSrc1);
  3398. }
  3399. IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc1->GetType(), cmpInstr->m_func);
  3400. IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc1, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
  3401. orZero->SetByteCodeOffset(cmpInstr);
  3402. cmpInstr->InsertBefore(orZero);
  3403. cmpInstr->ReplaceSrc1(unsignedSrc);
  3404. if (newSrc1->IsRegOpnd())
  3405. {
  3406. cmpInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  3407. orZero->GetSrc1()->SetIsJITOptimizedReg(true);
  3408. }
  3409. }
  3410. if (cmpSrc2 != newSrc2)
  3411. {
  3412. if (cmpSrc2->IsRegOpnd() && !cmpSrc2->GetIsJITOptimizedReg())
  3413. {
  3414. bytecodeInstr->Set(cmpSrc2);
  3415. }
  3416. IR::RegOpnd * unsignedSrc = IR::RegOpnd::New(newSrc2->GetType(), cmpInstr->m_func);
  3417. IR::Instr * orZero = IR::Instr::New(Js::OpCode::Or_A, unsignedSrc, newSrc2, IR::IntConstOpnd::New(0, TyMachReg, cmpInstr->m_func), cmpInstr->m_func);
  3418. orZero->SetByteCodeOffset(cmpInstr);
  3419. cmpInstr->InsertBefore(orZero);
  3420. cmpInstr->ReplaceSrc2(unsignedSrc);
  3421. if (newSrc2->IsRegOpnd())
  3422. {
  3423. cmpInstr->GetSrc2()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  3424. orZero->GetSrc1()->SetIsJITOptimizedReg(true);
  3425. }
  3426. }
  3427. return true;
  3428. }
  3429. #if DBG
  3430. void
  3431. FlowGraph::VerifyLoopGraph()
  3432. {
  3433. FOREACH_BLOCK(block, this)
  3434. {
  3435. Loop *loop = block->loop;
  3436. FOREACH_SUCCESSOR_BLOCK(succ, block)
  3437. {
  3438. if (loop == succ->loop)
  3439. {
  3440. Assert(succ->isLoopHeader == false || loop->GetHeadBlock() == succ);
  3441. continue;
  3442. }
  3443. if (succ->isLoopHeader)
  3444. {
  3445. Assert(succ->loop->parent == loop
  3446. || (!loop->IsDescendentOrSelf(succ->loop)));
  3447. continue;
  3448. }
  3449. Assert(succ->loop == nullptr || succ->loop->IsDescendentOrSelf(loop));
  3450. } NEXT_SUCCESSOR_BLOCK;
  3451. if (!PHASE_OFF(Js::RemoveBreakBlockPhase, this->GetFunc()))
  3452. {
  3453. // Make sure all break blocks have been removed.
  3454. if (loop && !block->isLoopHeader && !(this->func->HasTry() && !this->func->DoOptimizeTry()))
  3455. {
  3456. Assert(loop->IsDescendentOrSelf(block->GetPrev()->loop));
  3457. }
  3458. }
  3459. } NEXT_BLOCK;
  3460. }
  3461. #endif
  3462. #if DBG_DUMP
  3463. void
  3464. FlowGraph::Dump(bool onlyOnVerboseMode, const char16 *form)
  3465. {
  3466. if(PHASE_DUMP(Js::FGBuildPhase, this->GetFunc()))
  3467. {
  3468. if (!onlyOnVerboseMode || Js::Configuration::Global.flags.Verbose)
  3469. {
  3470. if (form)
  3471. {
  3472. Output::Print(form);
  3473. }
  3474. this->Dump();
  3475. }
  3476. }
  3477. }
  3478. void
  3479. FlowGraph::Dump()
  3480. {
  3481. Output::Print(_u("\nFlowGraph\n"));
  3482. FOREACH_BLOCK(block, this)
  3483. {
  3484. Loop * loop = block->loop;
  3485. while (loop)
  3486. {
  3487. Output::Print(_u(" "));
  3488. loop = loop->parent;
  3489. }
  3490. block->DumpHeader(false);
  3491. } NEXT_BLOCK;
  3492. Output::Print(_u("\nLoopGraph\n"));
  3493. for (Loop *loop = this->loopList; loop; loop = loop->next)
  3494. {
  3495. Output::Print(_u("\nLoop\n"));
  3496. FOREACH_BLOCK_IN_LOOP(block, loop)
  3497. {
  3498. block->DumpHeader(false);
  3499. }NEXT_BLOCK_IN_LOOP;
  3500. Output::Print(_u("Loop Ends\n"));
  3501. }
  3502. }
  3503. void
  3504. BasicBlock::DumpHeader(bool insertCR)
  3505. {
  3506. if (insertCR)
  3507. {
  3508. Output::Print(_u("\n"));
  3509. }
  3510. Output::Print(_u("BLOCK %d:"), this->number);
  3511. if (this->isDead)
  3512. {
  3513. Output::Print(_u(" **** DEAD ****"));
  3514. }
  3515. if (this->isBreakBlock)
  3516. {
  3517. Output::Print(_u(" **** Break Block ****"));
  3518. }
  3519. else if (this->isAirLockBlock)
  3520. {
  3521. Output::Print(_u(" **** Air lock Block ****"));
  3522. }
  3523. else if (this->isBreakCompensationBlockAtSource)
  3524. {
  3525. Output::Print(_u(" **** Break Source Compensation Code ****"));
  3526. }
  3527. else if (this->isBreakCompensationBlockAtSink)
  3528. {
  3529. Output::Print(_u(" **** Break Sink Compensation Code ****"));
  3530. }
  3531. else if (this->isAirLockCompensationBlock)
  3532. {
  3533. Output::Print(_u(" **** Airlock block Compensation Code ****"));
  3534. }
  3535. if (!this->predList.Empty())
  3536. {
  3537. BOOL fFirst = TRUE;
  3538. Output::Print(_u(" In("));
  3539. FOREACH_PREDECESSOR_BLOCK(blockPred, this)
  3540. {
  3541. if (!fFirst)
  3542. {
  3543. Output::Print(_u(", "));
  3544. }
  3545. Output::Print(_u("%d"), blockPred->GetBlockNum());
  3546. fFirst = FALSE;
  3547. }
  3548. NEXT_PREDECESSOR_BLOCK;
  3549. Output::Print(_u(")"));
  3550. }
  3551. if (!this->succList.Empty())
  3552. {
  3553. BOOL fFirst = TRUE;
  3554. Output::Print(_u(" Out("));
  3555. FOREACH_SUCCESSOR_BLOCK(blockSucc, this)
  3556. {
  3557. if (!fFirst)
  3558. {
  3559. Output::Print(_u(", "));
  3560. }
  3561. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  3562. fFirst = FALSE;
  3563. }
  3564. NEXT_SUCCESSOR_BLOCK;
  3565. Output::Print(_u(")"));
  3566. }
  3567. if (!this->deadPredList.Empty())
  3568. {
  3569. BOOL fFirst = TRUE;
  3570. Output::Print(_u(" DeadIn("));
  3571. FOREACH_DEAD_PREDECESSOR_BLOCK(blockPred, this)
  3572. {
  3573. if (!fFirst)
  3574. {
  3575. Output::Print(_u(", "));
  3576. }
  3577. Output::Print(_u("%d"), blockPred->GetBlockNum());
  3578. fFirst = FALSE;
  3579. }
  3580. NEXT_DEAD_PREDECESSOR_BLOCK;
  3581. Output::Print(_u(")"));
  3582. }
  3583. if (!this->deadSuccList.Empty())
  3584. {
  3585. BOOL fFirst = TRUE;
  3586. Output::Print(_u(" DeadOut("));
  3587. FOREACH_DEAD_SUCCESSOR_BLOCK(blockSucc, this)
  3588. {
  3589. if (!fFirst)
  3590. {
  3591. Output::Print(_u(", "));
  3592. }
  3593. Output::Print(_u("%d"), blockSucc->GetBlockNum());
  3594. fFirst = FALSE;
  3595. }
  3596. NEXT_DEAD_SUCCESSOR_BLOCK;
  3597. Output::Print(_u(")"));
  3598. }
  3599. if (this->loop)
  3600. {
  3601. Output::Print(_u(" Loop(%d) header: %d"), this->loop->loopNumber, this->loop->GetHeadBlock()->GetBlockNum());
  3602. if (this->loop->parent)
  3603. {
  3604. Output::Print(_u(" parent(%d): %d"), this->loop->parent->loopNumber, this->loop->parent->GetHeadBlock()->GetBlockNum());
  3605. }
  3606. if (this->loop->GetHeadBlock() == this)
  3607. {
  3608. Output::SkipToColumn(50);
  3609. Output::Print(_u("Call Exp/Imp: "));
  3610. if (this->loop->GetHasCall())
  3611. {
  3612. Output::Print(_u("yes/"));
  3613. }
  3614. else
  3615. {
  3616. Output::Print(_u(" no/"));
  3617. }
  3618. Output::Print(Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->loop->GetImplicitCallFlags()));
  3619. }
  3620. }
  3621. Output::Print(_u("\n"));
  3622. if (insertCR)
  3623. {
  3624. Output::Print(_u("\n"));
  3625. }
  3626. }
  3627. void
  3628. BasicBlock::Dump()
  3629. {
  3630. // Dumping the first instruction (label) will dump the block header as well.
  3631. FOREACH_INSTR_IN_BLOCK(instr, this)
  3632. {
  3633. instr->Dump();
  3634. }
  3635. NEXT_INSTR_IN_BLOCK;
  3636. }
  3637. void
  3638. AddPropertyCacheBucket::Dump() const
  3639. {
  3640. Assert(this->initialType != nullptr);
  3641. Assert(this->finalType != nullptr);
  3642. Output::Print(_u(" initial type: 0x%x, final type: 0x%x "), this->initialType->GetAddr(), this->finalType->GetAddr());
  3643. }
  3644. void
  3645. ObjTypeGuardBucket::Dump() const
  3646. {
  3647. Assert(this->guardedPropertyOps != nullptr);
  3648. this->guardedPropertyOps->Dump();
  3649. }
  3650. void
  3651. ObjWriteGuardBucket::Dump() const
  3652. {
  3653. Assert(this->writeGuards != nullptr);
  3654. this->writeGuards->Dump();
  3655. }
  3656. #endif
  3657. void
  3658. BasicBlock::CleanUpValueMaps()
  3659. {
  3660. // Don't do cleanup if it's been done recently.
  3661. // Landing pad could get optimized twice...
  3662. // We want the same info out the first and second time. So always cleanup.
  3663. // Increasing the cleanup threshold count for asmjs to 500
  3664. uint cleanupCount = (!this->globOptData.globOpt->GetIsAsmJSFunc()) ? CONFIG_FLAG(GoptCleanupThreshold) : CONFIG_FLAG(AsmGoptCleanupThreshold);
  3665. if (!this->IsLandingPad() && this->globOptData.globOpt->instrCountSinceLastCleanUp < cleanupCount)
  3666. {
  3667. return;
  3668. }
  3669. this->globOptData.globOpt->instrCountSinceLastCleanUp = 0;
  3670. JitArenaAllocator* tempAlloc = this->globOptData.globOpt->tempAlloc;
  3671. GlobHashTable *thisTable = this->globOptData.symToValueMap;
  3672. BVSparse<JitArenaAllocator> deadSymsBv(tempAlloc);
  3673. BVSparse<JitArenaAllocator> keepAliveSymsBv(tempAlloc);
  3674. BVSparse<JitArenaAllocator> availableValueNumbers(tempAlloc);
  3675. availableValueNumbers.Copy(this->globOptData.globOpt->byteCodeConstantValueNumbersBv);
  3676. BVSparse<JitArenaAllocator> *upwardExposedUses = this->upwardExposedUses;
  3677. BVSparse<JitArenaAllocator> *upwardExposedFields = this->upwardExposedFields;
  3678. bool isInLoop = !!this->loop;
  3679. BVSparse<JitArenaAllocator> symsInCallSequence(tempAlloc);
  3680. SListBase<IR::Opnd *> * callSequence = this->globOptData.callSequence;
  3681. if (callSequence && !callSequence->Empty())
  3682. {
  3683. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, callSequence)
  3684. {
  3685. StackSym * sym = opnd->GetStackSym();
  3686. symsInCallSequence.Set(sym->m_id);
  3687. }
  3688. }
  3689. NEXT_SLISTBASE_ENTRY;
  3690. for (uint i = 0; i < thisTable->tableSize; i++)
  3691. {
  3692. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  3693. {
  3694. bool isSymUpwardExposed = upwardExposedUses->Test(bucket.value->m_id) || upwardExposedFields->Test(bucket.value->m_id);
  3695. if (!isSymUpwardExposed && symsInCallSequence.Test(bucket.value->m_id))
  3696. {
  3697. // Don't remove/shrink sym-value pair if the sym is referenced in callSequence even if the sym is dead according to backward data flow.
  3698. // This is possible in some edge cases that an infinite loop is involved when evaluating parameter for a function (between StartCall and Call),
  3699. // there is no backward data flow into the infinite loop block, but non empty callSequence still populates to it in this (forward) pass
  3700. // which causes error when looking up value for the syms in callSequence (cannot find the value).
  3701. // It would cause error to fill out the bailout information for the loop blocks.
  3702. // Remove dead syms from callSequence has some risk because there are various associated counters which need to be consistent.
  3703. continue;
  3704. }
  3705. // Make sure symbol was created before backward pass.
  3706. // If symbols isn't upward exposed, mark it as dead.
  3707. // If a symbol was copy-prop'd in a loop prepass, the upwardExposedUses info could be wrong. So wait until we are out of the loop before clearing it.
  3708. if ((SymID)bucket.value->m_id <= this->globOptData.globOpt->maxInitialSymID && !isSymUpwardExposed
  3709. && (!isInLoop || !this->globOptData.globOpt->prePassCopyPropSym->Test(bucket.value->m_id)))
  3710. {
  3711. Value *val = bucket.element;
  3712. ValueInfo *valueInfo = val->GetValueInfo();
  3713. Sym * sym = bucket.value;
  3714. Sym *symStore = valueInfo->GetSymStore();
  3715. if (symStore && symStore == bucket.value)
  3716. {
  3717. // Keep constants around, as we don't know if there will be further uses
  3718. if (!bucket.element->GetValueInfo()->IsVarConstant() && !bucket.element->GetValueInfo()->HasIntConstantValue())
  3719. {
  3720. // Symbol may still be a copy-prop candidate. Wait before deleting it.
  3721. deadSymsBv.Set(bucket.value->m_id);
  3722. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  3723. // created in backward pass and so their symIds > maxInitialSymID.
  3724. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  3725. {
  3726. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  3727. }
  3728. }
  3729. availableValueNumbers.Set(val->GetValueNumber());
  3730. }
  3731. else
  3732. {
  3733. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  3734. // created in backward pass and so their symIds > maxInitialSymID. Perhaps we could remove
  3735. // it explicitly here, but would it work alright with the iterator?
  3736. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  3737. {
  3738. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  3739. }
  3740. // Not a copy-prop candidate; delete it right away.
  3741. iter.RemoveCurrent(thisTable->alloc);
  3742. this->globOptData.liveInt32Syms->Clear(sym->m_id);
  3743. this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  3744. this->globOptData.liveFloat64Syms->Clear(sym->m_id);
  3745. this->globOptData.SetChangedSym(sym);
  3746. }
  3747. }
  3748. else
  3749. {
  3750. Sym * sym = bucket.value;
  3751. if (sym->IsPropertySym() && !this->globOptData.liveFields->Test(sym->m_id))
  3752. {
  3753. // Remove propertySyms which are not live anymore.
  3754. iter.RemoveCurrent(thisTable->alloc);
  3755. this->globOptData.liveInt32Syms->Clear(sym->m_id);
  3756. this->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  3757. this->globOptData.liveFloat64Syms->Clear(sym->m_id);
  3758. }
  3759. else
  3760. {
  3761. // Look at the copy-prop candidate. We don't want to get rid of the data for a symbol which is
  3762. // a copy-prop candidate.
  3763. Value *val = bucket.element;
  3764. ValueInfo *valueInfo = val->GetValueInfo();
  3765. Sym *symStore = valueInfo->GetSymStore();
  3766. if (symStore && symStore != bucket.value)
  3767. {
  3768. keepAliveSymsBv.Set(symStore->m_id);
  3769. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  3770. {
  3771. keepAliveSymsBv.Set(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  3772. }
  3773. }
  3774. availableValueNumbers.Set(val->GetValueNumber());
  3775. }
  3776. }
  3777. } NEXT_SLISTBASE_ENTRY_EDITING;
  3778. }
  3779. deadSymsBv.Minus(&keepAliveSymsBv);
  3780. // Now cleanup exprToValueMap table
  3781. ExprHashTable *thisExprTable = this->globOptData.exprToValueMap;
  3782. bool oldHasCSECandidatesValue = this->globOptData.hasCSECandidates; // Could be false if none need bailout.
  3783. this->globOptData.hasCSECandidates = false;
  3784. for (uint i = 0; i < thisExprTable->tableSize; i++)
  3785. {
  3786. FOREACH_SLISTBASE_ENTRY_EDITING(ExprHashBucket, bucket, &thisExprTable->table[i], iter)
  3787. {
  3788. ExprHash hash = bucket.value;
  3789. ValueNumber src1ValNum = hash.GetSrc1ValueNumber();
  3790. ValueNumber src2ValNum = hash.GetSrc2ValueNumber();
  3791. // If src1Val or src2Val are not available anymore, no point keeping this CSE candidate
  3792. bool removeCurrent = false;
  3793. if ((src1ValNum && !availableValueNumbers.Test(src1ValNum))
  3794. || (src2ValNum && !availableValueNumbers.Test(src2ValNum)))
  3795. {
  3796. removeCurrent = true;
  3797. }
  3798. else
  3799. {
  3800. // If we are keeping this value, make sure we also keep the symStore in the value table
  3801. removeCurrent = true; // Remove by default, unless it's set to false later below.
  3802. Value *val = bucket.element;
  3803. if (val)
  3804. {
  3805. Sym *symStore = val->GetValueInfo()->GetSymStore();
  3806. if (symStore)
  3807. {
  3808. Value *symStoreVal = this->globOptData.FindValue(symStore);
  3809. if (symStoreVal && symStoreVal->GetValueNumber() == val->GetValueNumber())
  3810. {
  3811. removeCurrent = false;
  3812. deadSymsBv.Clear(symStore->m_id);
  3813. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  3814. {
  3815. deadSymsBv.Clear(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  3816. }
  3817. }
  3818. }
  3819. }
  3820. }
  3821. if(removeCurrent)
  3822. {
  3823. iter.RemoveCurrent(thisExprTable->alloc);
  3824. }
  3825. else
  3826. {
  3827. this->globOptData.hasCSECandidates = oldHasCSECandidatesValue;
  3828. }
  3829. } NEXT_SLISTBASE_ENTRY_EDITING;
  3830. }
  3831. FOREACH_BITSET_IN_SPARSEBV(dead_id, &deadSymsBv)
  3832. {
  3833. thisTable->Clear(dead_id);
  3834. Sym* sym = this->func->m_symTable->Find(dead_id);
  3835. this->globOptData.SetChangedSym(sym);
  3836. }
  3837. NEXT_BITSET_IN_SPARSEBV;
  3838. if (!deadSymsBv.IsEmpty())
  3839. {
  3840. if (this->func->IsJitInDebugMode())
  3841. {
  3842. // Do not remove non-temp local vars from liveVarSyms (i.e. do not let them become dead).
  3843. // We will need to restore all initialized/used so far non-temp local during bail out.
  3844. // (See BackwardPass::ProcessBailOutInfo)
  3845. Assert(this->func->m_nonTempLocalVars);
  3846. BVSparse<JitArenaAllocator> tempBv(tempAlloc);
  3847. tempBv.Minus(&deadSymsBv, this->func->m_nonTempLocalVars);
  3848. this->globOptData.liveVarSyms->Minus(&tempBv);
  3849. #if DBG
  3850. tempBv.And(this->globOptData.liveInt32Syms, this->func->m_nonTempLocalVars);
  3851. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveInt32Syms?");
  3852. tempBv.And(this->globOptData.liveLossyInt32Syms, this->func->m_nonTempLocalVars);
  3853. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveLossyInt32Syms?");
  3854. tempBv.And(this->globOptData.liveFloat64Syms, this->func->m_nonTempLocalVars);
  3855. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveFloat64Syms?");
  3856. #endif
  3857. }
  3858. else
  3859. {
  3860. this->globOptData.liveVarSyms->Minus(&deadSymsBv);
  3861. }
  3862. this->globOptData.liveInt32Syms->Minus(&deadSymsBv);
  3863. this->globOptData.liveLossyInt32Syms->Minus(&deadSymsBv);
  3864. this->globOptData.liveFloat64Syms->Minus(&deadSymsBv);
  3865. }
  3866. JitAdelete(this->globOptData.globOpt->alloc, upwardExposedUses);
  3867. this->upwardExposedUses = nullptr;
  3868. JitAdelete(this->globOptData.globOpt->alloc, upwardExposedFields);
  3869. this->upwardExposedFields = nullptr;
  3870. if (this->cloneStrCandidates)
  3871. {
  3872. JitAdelete(this->globOptData.globOpt->alloc, this->cloneStrCandidates);
  3873. this->cloneStrCandidates = nullptr;
  3874. }
  3875. }
  3876. void
  3877. BasicBlock::MergePredBlocksValueMaps(GlobOpt* globOpt)
  3878. {
  3879. Assert(!globOpt->isCallHelper);
  3880. // We keep a local temporary copy for the merge
  3881. GlobOptBlockData blockData(this->globOptData.curFunc);
  3882. if (!globOpt->isRecursiveCallOnLandingPad)
  3883. {
  3884. blockData.NullOutBlockData(globOpt, globOpt->func);
  3885. }
  3886. else
  3887. {
  3888. // If we are going over the landing pad again after field PRE, just start again
  3889. // with the value table where we left off.
  3890. blockData.CopyBlockData(&this->globOptData);
  3891. return;
  3892. }
  3893. BVSparse<JitArenaAllocator> symsRequiringCompensation(globOpt->tempAlloc);
  3894. {
  3895. BVSparse<JitArenaAllocator> symsCreatedForMerge(globOpt->tempAlloc);
  3896. bool forceTypeSpecOnLoopHeader = true;
  3897. FOREACH_PREDECESSOR_BLOCK(pred, this)
  3898. {
  3899. if (pred->globOptData.callSequence && pred->globOptData.callSequence->Empty())
  3900. {
  3901. JitAdelete(globOpt->alloc, pred->globOptData.callSequence);
  3902. pred->globOptData.callSequence = nullptr;
  3903. }
  3904. if (this->isLoopHeader && globOpt->IsLoopPrePass() && globOpt->prePassLoop == this->loop && this->loop->IsDescendentOrSelf(pred->loop))
  3905. {
  3906. // Loop back-edge.
  3907. // First pass on loop runs optimistically, without doing transforms.
  3908. // Skip this edge for now.
  3909. continue;
  3910. }
  3911. PathDependentInfo *const pathDependentInfo = __edge->GetPathDependentInfo();
  3912. PathDependentInfoToRestore pathDependentInfoToRestore;
  3913. if (pathDependentInfo)
  3914. {
  3915. pathDependentInfoToRestore = globOpt->UpdatePathDependentInfo(pathDependentInfo);
  3916. }
  3917. Assert(pred->GetDataUseCount());
  3918. // First pred?
  3919. if (blockData.symToValueMap == nullptr)
  3920. {
  3921. // Only one edge?
  3922. if (pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr)
  3923. {
  3924. blockData.ReuseBlockData(&pred->globOptData);
  3925. // Don't need to restore the old value info
  3926. pathDependentInfoToRestore.Clear();
  3927. }
  3928. else
  3929. {
  3930. blockData.CloneBlockData(this, pred);
  3931. }
  3932. }
  3933. else
  3934. {
  3935. const bool isLoopPrePass = globOpt->IsLoopPrePass();
  3936. blockData.MergeBlockData(
  3937. this,
  3938. pred,
  3939. isLoopPrePass ? nullptr : &symsRequiringCompensation,
  3940. isLoopPrePass ? nullptr : &symsCreatedForMerge,
  3941. forceTypeSpecOnLoopHeader);
  3942. forceTypeSpecOnLoopHeader = false; // can force type-spec on the loop header only for the first back edge.
  3943. }
  3944. // Restore the value for the next edge
  3945. if (pathDependentInfo)
  3946. {
  3947. globOpt->RestorePathDependentInfo(pathDependentInfo, pathDependentInfoToRestore);
  3948. __edge->ClearPathDependentInfo(globOpt->alloc);
  3949. }
  3950. } NEXT_PREDECESSOR_BLOCK;
  3951. }
  3952. // Consider: We can recreate values for hoisted field so it can copy prop out of the loop
  3953. if (blockData.symToValueMap == nullptr)
  3954. {
  3955. Assert(blockData.hoistableFields == nullptr);
  3956. blockData.InitBlockData(globOpt, globOpt->func);
  3957. }
  3958. else if (blockData.hoistableFields)
  3959. {
  3960. Assert(globOpt->TrackHoistableFields());
  3961. blockData.hoistableFields->And(this->globOptData.liveFields);
  3962. }
  3963. if (!globOpt->DoObjTypeSpec())
  3964. {
  3965. // Object type specialization is off, but if copy prop is on (e.g., /force:fieldhoist) we're not clearing liveFields,
  3966. // so we may be letting type syms slip through this block.
  3967. globOpt->KillAllObjectTypes(this->globOptData.liveFields);
  3968. }
  3969. this->globOptData.CopyBlockData(&blockData);
  3970. if (globOpt->IsLoopPrePass())
  3971. {
  3972. Assert(this->loop);
  3973. if(globOpt->DoBoundCheckHoist())
  3974. {
  3975. globOpt->SetInductionVariableValueNumbers(&blockData);
  3976. }
  3977. if (this->isLoopHeader && globOpt->rootLoopPrePass == this->loop)
  3978. {
  3979. // Capture bail out info in case we have optimization that needs it
  3980. Assert(this->loop->bailOutInfo == nullptr);
  3981. IR::Instr * firstInstr = this->GetFirstInstr();
  3982. this->loop->bailOutInfo = JitAnew(globOpt->func->m_alloc, BailOutInfo,
  3983. firstInstr->GetByteCodeOffset(), firstInstr->m_func);
  3984. globOpt->FillBailOutInfo(this, this->loop->bailOutInfo);
  3985. #if ENABLE_DEBUG_CONFIG_OPTIONS
  3986. this->loop->bailOutInfo->bailOutOpcode = Js::OpCode::LoopBodyStart;
  3987. #endif
  3988. }
  3989. // If loop pre-pass, don't insert convert from type-spec to var
  3990. return;
  3991. }
  3992. this->CleanUpValueMaps();
  3993. Sym *symIV = nullptr;
  3994. // Clean up the syms requiring compensation by checking the final value in the merged block to see if the sym still requires
  3995. // compensation. All the while, create a mapping from sym to value info in the merged block. This dictionary helps avoid a
  3996. // value lookup in the merged block per predecessor.
  3997. SymToValueInfoMap symsRequiringCompensationToMergedValueInfoMap(globOpt->tempAlloc);
  3998. if(!symsRequiringCompensation.IsEmpty())
  3999. {
  4000. const SymTable *const symTable = func->m_symTable;
  4001. FOREACH_BITSET_IN_SPARSEBV(id, &symsRequiringCompensation)
  4002. {
  4003. Sym *const sym = symTable->Find(id);
  4004. Assert(sym);
  4005. Value *const value = blockData.FindValue(sym);
  4006. if(!value)
  4007. {
  4008. continue;
  4009. }
  4010. ValueInfo *const valueInfo = value->GetValueInfo();
  4011. if(!valueInfo->IsArrayValueInfo())
  4012. {
  4013. continue;
  4014. }
  4015. // At least one new sym was created while merging and associated with the merged value info, so those syms will
  4016. // require compensation in predecessors. For now, the dead store phase is relied upon to remove compensation that is
  4017. // dead due to no further uses of the new sym.
  4018. symsRequiringCompensationToMergedValueInfoMap.Add(sym, valueInfo);
  4019. } NEXT_BITSET_IN_SPARSEBV;
  4020. symsRequiringCompensation.ClearAll();
  4021. }
  4022. if (this->isLoopHeader)
  4023. {
  4024. BVSparse<JitArenaAllocator> * tempBv = globOpt->tempBv;
  4025. // Values on the back-edge in the prepass may be conservative for syms defined in the loop, and type specialization in
  4026. // the prepass is not reflective of the value, but rather, is used to determine whether the sym should be specialized
  4027. // around the loop. Additionally, some syms that are used before defined in the loop may be specialized in the loop
  4028. // header despite not being specialized in the landing pad. Now that the type specialization bit-vectors are merged,
  4029. // specialize the corresponding value infos in the loop header too.
  4030. Assert(tempBv->IsEmpty());
  4031. Loop *const loop = this->loop;
  4032. SymTable *const symTable = func->m_symTable;
  4033. JitArenaAllocator *const alloc = globOpt->alloc;
  4034. // Int-specialized syms
  4035. tempBv->Or(loop->likelyIntSymsUsedBeforeDefined, loop->symsDefInLoop);
  4036. tempBv->And(blockData.liveInt32Syms);
  4037. tempBv->Minus(blockData.liveLossyInt32Syms);
  4038. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4039. {
  4040. StackSym *const varSym = symTable->FindStackSym(id);
  4041. Assert(varSym);
  4042. Value *const value = blockData.FindValue(varSym);
  4043. Assert(value);
  4044. ValueInfo *const valueInfo = value->GetValueInfo();
  4045. if(!valueInfo->IsInt())
  4046. {
  4047. globOpt->ChangeValueInfo(nullptr, value, valueInfo->SpecializeToInt32(alloc));
  4048. }
  4049. } NEXT_BITSET_IN_SPARSEBV;
  4050. // Float-specialized syms
  4051. tempBv->Or(loop->likelyNumberSymsUsedBeforeDefined, loop->symsDefInLoop);
  4052. tempBv->Or(loop->forceFloat64SymsOnEntry);
  4053. tempBv->And(blockData.liveFloat64Syms);
  4054. GlobOptBlockData &landingPadBlockData = loop->landingPad->globOptData;
  4055. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4056. {
  4057. StackSym *const varSym = symTable->FindStackSym(id);
  4058. Assert(varSym);
  4059. // If the type-spec sym is null or if the sym is not float-specialized in the loop landing pad, the sym may have
  4060. // been merged to float on a loop back-edge when it was live as float on the back-edge, and live as int in the loop
  4061. // header. In this case, compensation inserted in the loop landing pad will use BailOutNumberOnly, and so it is
  4062. // guaranteed that the value will be float. Otherwise, if the type-spec sym exists, its field can be checked to see
  4063. // if it's prevented from being anything but a number.
  4064. StackSym *const typeSpecSym = varSym->GetFloat64EquivSym(nullptr);
  4065. if(!typeSpecSym ||
  4066. typeSpecSym->m_requiresBailOnNotNumber ||
  4067. !landingPadBlockData.IsFloat64TypeSpecialized(varSym))
  4068. {
  4069. Value *const value = blockData.FindValue(varSym);
  4070. if(value)
  4071. {
  4072. ValueInfo *const valueInfo = value->GetValueInfo();
  4073. if(!valueInfo->IsNumber())
  4074. {
  4075. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToFloat64(alloc));
  4076. }
  4077. }
  4078. else
  4079. {
  4080. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::Float), varSym);
  4081. }
  4082. }
  4083. } NEXT_BITSET_IN_SPARSEBV;
  4084. #ifdef ENABLE_SIMDJS
  4085. // SIMD_JS
  4086. // Simd128 type-spec syms
  4087. BVSparse<JitArenaAllocator> tempBv2(globOpt->tempAlloc);
  4088. // For syms we made alive in loop header because of hoisting, use-before-def, or def in Loop body, set their valueInfo to definite.
  4089. // Make live on header AND in one of forceSimd128* or likelySimd128* vectors.
  4090. tempBv->Or(loop->likelySimd128F4SymsUsedBeforeDefined, loop->symsDefInLoop);
  4091. tempBv->Or(loop->likelySimd128I4SymsUsedBeforeDefined);
  4092. tempBv->Or(loop->forceSimd128F4SymsOnEntry);
  4093. tempBv->Or(loop->forceSimd128I4SymsOnEntry);
  4094. tempBv2.Or(blockData.liveSimd128F4Syms, blockData.liveSimd128I4Syms);
  4095. tempBv->And(&tempBv2);
  4096. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4097. {
  4098. StackSym * typeSpecSym = nullptr;
  4099. StackSym *const varSym = symTable->FindStackSym(id);
  4100. Assert(varSym);
  4101. if (blockData.liveSimd128F4Syms->Test(id))
  4102. {
  4103. typeSpecSym = varSym->GetSimd128F4EquivSym(nullptr);
  4104. if (!typeSpecSym || !landingPadBlockData.IsSimd128F4TypeSpecialized(varSym))
  4105. {
  4106. Value *const value = blockData.FindValue(varSym);
  4107. if (value)
  4108. {
  4109. ValueInfo *const valueInfo = value->GetValueInfo();
  4110. if (!valueInfo->IsSimd128Float32x4())
  4111. {
  4112. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToSimd128F4(alloc));
  4113. }
  4114. }
  4115. else
  4116. {
  4117. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Float32x4), varSym), varSym);
  4118. }
  4119. }
  4120. }
  4121. else if (blockData.liveSimd128I4Syms->Test(id))
  4122. {
  4123. typeSpecSym = varSym->GetSimd128I4EquivSym(nullptr);
  4124. if (!typeSpecSym || !landingPadBlockData.IsSimd128I4TypeSpecialized(varSym))
  4125. {
  4126. Value *const value = blockData.FindValue(varSym);
  4127. if (value)
  4128. {
  4129. ValueInfo *const valueInfo = value->GetValueInfo();
  4130. if (!valueInfo->IsSimd128Int32x4())
  4131. {
  4132. globOpt->ChangeValueInfo(this, value, valueInfo->SpecializeToSimd128I4(alloc));
  4133. }
  4134. }
  4135. else
  4136. {
  4137. this->globOptData.SetValue(globOpt->NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Int32x4), varSym), varSym);
  4138. }
  4139. }
  4140. }
  4141. else
  4142. {
  4143. Assert(UNREACHED);
  4144. }
  4145. } NEXT_BITSET_IN_SPARSEBV;
  4146. #endif
  4147. tempBv->ClearAll();
  4148. }
  4149. // We need to handle the case where a symbol is type-spec'd coming from some predecessors,
  4150. // but not from others.
  4151. //
  4152. // We can do this by inserting the right conversion in the predecessor block, but we
  4153. // can only do this if we are the first successor of that block, since the previous successors
  4154. // would have already been processed. Instead, we'll need to break the edge and insert a block
  4155. // (airlock block) to put in the conversion code.
  4156. Assert(globOpt->tempBv->IsEmpty());
  4157. BVSparse<JitArenaAllocator> tempBv2(globOpt->tempAlloc);
  4158. BVSparse<JitArenaAllocator> tempBv3(globOpt->tempAlloc);
  4159. BVSparse<JitArenaAllocator> tempBv4(globOpt->tempAlloc);
  4160. #ifdef ENABLE_SIMDJS
  4161. // SIMD_JS
  4162. BVSparse<JitArenaAllocator> simd128F4SymsToUnbox(globOpt->tempAlloc);
  4163. BVSparse<JitArenaAllocator> simd128I4SymsToUnbox(globOpt->tempAlloc);
  4164. #endif
  4165. FOREACH_PREDECESSOR_EDGE_EDITING(edge, this, iter)
  4166. {
  4167. BasicBlock *pred = edge->GetPred();
  4168. if (pred->loop && pred->loop->GetHeadBlock() == this)
  4169. {
  4170. pred->DecrementDataUseCount();
  4171. // Skip loop back-edges. We will handle these when we get to the exit blocks.
  4172. continue;
  4173. }
  4174. BasicBlock *orgPred = nullptr;
  4175. if (pred->isAirLockCompensationBlock)
  4176. {
  4177. Assert(pred->GetPredList()->HasOne());
  4178. orgPred = pred;
  4179. pred = (pred->GetPredList()->Head())->GetPred();
  4180. }
  4181. // Lossy int in the merged block, and no int in the predecessor - need a lossy conversion to int
  4182. tempBv2.Minus(blockData.liveLossyInt32Syms, pred->globOptData.liveInt32Syms);
  4183. // Lossless int in the merged block, and no lossless int in the predecessor - need a lossless conversion to int
  4184. tempBv3.Minus(blockData.liveInt32Syms, this->globOptData.liveLossyInt32Syms);
  4185. globOpt->tempBv->Minus(pred->globOptData.liveInt32Syms, pred->globOptData.liveLossyInt32Syms);
  4186. tempBv3.Minus(globOpt->tempBv);
  4187. globOpt->tempBv->Minus(blockData.liveVarSyms, pred->globOptData.liveVarSyms);
  4188. tempBv4.Minus(blockData.liveFloat64Syms, pred->globOptData.liveFloat64Syms);
  4189. bool symIVNeedsSpecializing = (symIV && !pred->globOptData.liveInt32Syms->Test(symIV->m_id) && !tempBv3.Test(symIV->m_id));
  4190. #ifdef ENABLE_SIMDJS
  4191. // SIMD_JS
  4192. simd128F4SymsToUnbox.Minus(blockData.liveSimd128F4Syms, pred->globOptData.liveSimd128F4Syms);
  4193. simd128I4SymsToUnbox.Minus(blockData.liveSimd128I4Syms, pred->globOptData.liveSimd128I4Syms);
  4194. #endif
  4195. if (!globOpt->tempBv->IsEmpty() ||
  4196. !tempBv2.IsEmpty() ||
  4197. !tempBv3.IsEmpty() ||
  4198. !tempBv4.IsEmpty() ||
  4199. #ifdef ENABLE_SIMDJS
  4200. !simd128F4SymsToUnbox.IsEmpty() ||
  4201. !simd128I4SymsToUnbox.IsEmpty() ||
  4202. #endif
  4203. symIVNeedsSpecializing ||
  4204. symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  4205. {
  4206. // We can't un-specialize a symbol in a predecessor if we've already processed
  4207. // a successor of that block. Instead, insert a new block on the flow edge
  4208. // (an airlock block) and do the un-specialization there.
  4209. //
  4210. // Alternatively, the current block could be an exit block out of this loop, and so the predecessor may exit the
  4211. // loop. In that case, if the predecessor may continue into the loop without exiting, then we need an airlock block
  4212. // to do the appropriate conversions only on the exit path (preferring not to do the conversions inside the loop).
  4213. // If, on the other hand, the predecessor always flows into the current block, then it always exits, so we don't need
  4214. // an airlock block and can just do the conversions in the predecessor.
  4215. if (pred->GetSuccList()->Head()->GetSucc() != this||
  4216. (pred->loop && pred->loop->parent == this->loop && pred->GetSuccList()->Count() > 1))
  4217. {
  4218. BasicBlock *airlockBlock = nullptr;
  4219. if (!orgPred)
  4220. {
  4221. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !globOpt->IsLoopPrePass())
  4222. {
  4223. Output::Print(_u("TRACE: "));
  4224. Output::Print(_u("Inserting airlock block to convert syms to var between block %d and %d\n"),
  4225. pred->GetBlockNum(), this->GetBlockNum());
  4226. Output::Flush();
  4227. }
  4228. airlockBlock = globOpt->func->m_fg->InsertAirlockBlock(edge);
  4229. }
  4230. else
  4231. {
  4232. Assert(orgPred->isAirLockCompensationBlock);
  4233. airlockBlock = orgPred;
  4234. pred->DecrementDataUseCount();
  4235. airlockBlock->isAirLockCompensationBlock = false; // This is airlock block now. So remove the attribute.
  4236. }
  4237. globOpt->CloneBlockData(airlockBlock, pred);
  4238. pred = airlockBlock;
  4239. }
  4240. if (!globOpt->tempBv->IsEmpty())
  4241. {
  4242. globOpt->ToVar(globOpt->tempBv, pred);
  4243. }
  4244. if (!tempBv2.IsEmpty())
  4245. {
  4246. globOpt->ToInt32(&tempBv2, pred, true /* lossy */);
  4247. }
  4248. if (!tempBv3.IsEmpty())
  4249. {
  4250. globOpt->ToInt32(&tempBv3, pred, false /* lossy */);
  4251. }
  4252. if (!tempBv4.IsEmpty())
  4253. {
  4254. globOpt->ToFloat64(&tempBv4, pred);
  4255. }
  4256. if (symIVNeedsSpecializing)
  4257. {
  4258. globOpt->tempBv->ClearAll();
  4259. globOpt->tempBv->Set(symIV->m_id);
  4260. globOpt->ToInt32(globOpt->tempBv, pred, false /* lossy */);
  4261. }
  4262. if(symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  4263. {
  4264. globOpt->InsertValueCompensation(pred, symsRequiringCompensationToMergedValueInfoMap);
  4265. }
  4266. #ifdef ENABLE_SIMDJS
  4267. // SIMD_JS
  4268. if (!simd128F4SymsToUnbox.IsEmpty())
  4269. {
  4270. globOpt->ToTypeSpec(&simd128F4SymsToUnbox, pred, TySimd128F4, IR::BailOutSimd128F4Only);
  4271. }
  4272. if (!simd128I4SymsToUnbox.IsEmpty())
  4273. {
  4274. globOpt->ToTypeSpec(&simd128I4SymsToUnbox, pred, TySimd128I4, IR::BailOutSimd128I4Only);
  4275. }
  4276. #endif
  4277. }
  4278. } NEXT_PREDECESSOR_EDGE_EDITING;
  4279. FOREACH_PREDECESSOR_EDGE(edge, this)
  4280. {
  4281. // Peak Memory optimization:
  4282. // These are in an arena, but putting them on the free list greatly reduces
  4283. // the peak memory used by the global optimizer for complex flow graphs.
  4284. BasicBlock *pred = edge->GetPred();
  4285. if (!this->isLoopHeader || this->loop != pred->loop)
  4286. {
  4287. // Skip airlock compensation block as we are not going to walk this block.
  4288. if (pred->isAirLockCompensationBlock)
  4289. {
  4290. pred->DecrementDataUseCount();
  4291. Assert(pred->GetPredList()->HasOne());
  4292. pred = (pred->GetPredList()->Head())->GetPred();
  4293. }
  4294. if (pred->DecrementDataUseCount() == 0 && (!this->loop || this->loop->landingPad != pred))
  4295. {
  4296. if (!(pred->GetSuccList()->HasOne() && this->GetPredList()->HasOne() && this->loop == nullptr))
  4297. {
  4298. pred->globOptData.DeleteBlockData();
  4299. }
  4300. else
  4301. {
  4302. pred->globOptData.NullOutBlockData(globOpt, globOpt->func);
  4303. }
  4304. }
  4305. }
  4306. } NEXT_PREDECESSOR_EDGE;
  4307. globOpt->tempBv->ClearAll();
  4308. Assert(!globOpt->IsLoopPrePass()); // We already early return if we are in prepass
  4309. if (this->isLoopHeader)
  4310. {
  4311. Loop *const loop = this->loop;
  4312. // Save values live on loop entry, such that we can adjust the state of the
  4313. // values on the back-edge to match.
  4314. loop->varSymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4315. loop->varSymsOnEntry->Copy(this->globOptData.liveVarSyms);
  4316. loop->int32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4317. loop->int32SymsOnEntry->Copy(this->globOptData.liveInt32Syms);
  4318. loop->lossyInt32SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4319. loop->lossyInt32SymsOnEntry->Copy(this->globOptData.liveLossyInt32Syms);
  4320. loop->float64SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4321. loop->float64SymsOnEntry->Copy(this->globOptData.liveFloat64Syms);
  4322. #ifdef ENABLE_SIMDJS
  4323. // SIMD_JS
  4324. loop->simd128F4SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4325. loop->simd128F4SymsOnEntry->Copy(this->globOptData.liveSimd128F4Syms);
  4326. loop->simd128I4SymsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4327. loop->simd128I4SymsOnEntry->Copy(this->globOptData.liveSimd128I4Syms);
  4328. #endif
  4329. loop->liveFieldsOnEntry = JitAnew(globOpt->alloc, BVSparse<JitArenaAllocator>, globOpt->alloc);
  4330. loop->liveFieldsOnEntry->Copy(this->globOptData.liveFields);
  4331. if(globOpt->DoBoundCheckHoist() && loop->inductionVariables)
  4332. {
  4333. globOpt->FinalizeInductionVariables(loop, &blockData);
  4334. if(globOpt->DoLoopCountBasedBoundCheckHoist())
  4335. {
  4336. globOpt->DetermineDominatingLoopCountableBlock(loop, this);
  4337. }
  4338. }
  4339. }
  4340. else if (!this->loop)
  4341. {
  4342. this->SetDataUseCount(this->GetSuccList()->Count());
  4343. }
  4344. else if(this == this->loop->dominatingLoopCountableBlock)
  4345. {
  4346. globOpt->DetermineLoopCount(this->loop);
  4347. }
  4348. }
  4349. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock)
  4350. {
  4351. toBlock->globOptData.CloneBlockData(toBlock, fromBlock);
  4352. }
  4353. void
  4354. GlobOpt::CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  4355. {
  4356. ValueSet *const valuesToKillOnCalls = JitAnew(this->alloc, ValueSet, this->alloc);
  4357. toData->valuesToKillOnCalls = valuesToKillOnCalls;
  4358. // Values are shared between symbols with the same ValueNumber.
  4359. // Use a dictionary to share the clone values.
  4360. ValueSetByValueNumber *const valuesCreatedForClone = this->valuesCreatedForClone;
  4361. Assert(valuesCreatedForClone);
  4362. Assert(valuesCreatedForClone->Count() == 0);
  4363. DebugOnly(ValueSetByValueNumber originalValues(tempAlloc, 64));
  4364. const uint tableSize = toData->symToValueMap->tableSize;
  4365. SListBase<GlobHashBucket> *const table = toData->symToValueMap->table;
  4366. for (uint i = 0; i < tableSize; i++)
  4367. {
  4368. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &table[i])
  4369. {
  4370. Value *value = bucket.element;
  4371. ValueNumber valueNum = value->GetValueNumber();
  4372. #if DBG
  4373. // Ensure that the set of values in fromData contains only one value per value number. Byte-code constant values
  4374. // are reused in multiple blocks without cloning, so exclude those value numbers.
  4375. {
  4376. Value *const previouslyClonedOriginalValue = originalValues.Lookup(valueNum);
  4377. if (previouslyClonedOriginalValue)
  4378. {
  4379. if (!byteCodeConstantValueNumbersBv->Test(valueNum))
  4380. {
  4381. Assert(value == previouslyClonedOriginalValue);
  4382. }
  4383. }
  4384. else
  4385. {
  4386. originalValues.Add(value);
  4387. }
  4388. }
  4389. #endif
  4390. Value *newValue = valuesCreatedForClone->Lookup(valueNum);
  4391. if (!newValue)
  4392. {
  4393. newValue = CopyValue(value, valueNum);
  4394. TrackMergedValueForKills(newValue, toData, nullptr);
  4395. valuesCreatedForClone->Add(newValue);
  4396. }
  4397. bucket.element = newValue;
  4398. } NEXT_SLISTBASE_ENTRY;
  4399. }
  4400. valuesCreatedForClone->Clear();
  4401. ProcessValueKills(toBlock, toData);
  4402. }
  4403. PRECandidatesList * GlobOpt::FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc)
  4404. {
  4405. // Iterate over the value table looking for propertySyms which are candidates to
  4406. // pre-load in the landing pad for field PRE
  4407. GlobHashTable *valueTable = block->globOptData.symToValueMap;
  4408. Loop *loop = block->loop;
  4409. PRECandidatesList *candidates = nullptr;
  4410. for (uint i = 0; i < valueTable->tableSize; i++)
  4411. {
  4412. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &valueTable->table[i])
  4413. {
  4414. Sym *sym = bucket.value;
  4415. if (!sym->IsPropertySym())
  4416. {
  4417. continue;
  4418. }
  4419. PropertySym *propertySym = sym->AsPropertySym();
  4420. // Field should be live on the back-edge
  4421. if (!block->globOptData.liveFields->Test(propertySym->m_id))
  4422. {
  4423. continue;
  4424. }
  4425. // Field should be live in the landing pad as well
  4426. if (!loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  4427. {
  4428. continue;
  4429. }
  4430. Value *value = bucket.element;
  4431. Sym *symStore = value->GetValueInfo()->GetSymStore();
  4432. if (!symStore || !symStore->IsStackSym())
  4433. {
  4434. continue;
  4435. }
  4436. // Check upwardExposed in case of:
  4437. // s1 = 0;
  4438. // loop:
  4439. // = o.x;
  4440. // foo();
  4441. // o.x = s1;
  4442. // Can't thrash s1 in loop top.
  4443. if (!symStore->AsStackSym()->IsSingleDef() || loop->GetHeadBlock()->upwardExposedUses->Test(symStore->m_id))
  4444. {
  4445. // If symStore isn't singleDef, we need to make sure it still has the same value.
  4446. // This usually fails if we are not aggressive at transferring values in the prepass.
  4447. Value **pSymStoreFromValue = valueTable->Get(symStore->m_id);
  4448. // Consider: We should be fine if symStore isn't live in landing pad...
  4449. if (!pSymStoreFromValue || (*pSymStoreFromValue)->GetValueNumber() != value->GetValueNumber())
  4450. {
  4451. continue;
  4452. }
  4453. }
  4454. BasicBlock *landingPad = loop->landingPad;
  4455. Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
  4456. if (!landingPadValue)
  4457. {
  4458. // Value should be added as initial value or already be there.
  4459. return nullptr;
  4460. }
  4461. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  4462. if (!ldInstr)
  4463. {
  4464. continue;
  4465. }
  4466. if (!candidates)
  4467. {
  4468. candidates = Anew(alloc, PRECandidatesList, alloc);
  4469. }
  4470. candidates->Prepend(&bucket);
  4471. } NEXT_SLISTBASE_ENTRY;
  4472. }
  4473. return candidates;
  4474. }
  4475. void GlobOpt::InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  4476. {
  4477. if (toBlock->isLoopHeader // isLoopBackEdge
  4478. && toBlock->cloneStrCandidates
  4479. && !IsLoopPrePass())
  4480. {
  4481. Loop *loop = toBlock->loop;
  4482. BasicBlock *landingPad = loop->landingPad;
  4483. const SymTable *const symTable = func->m_symTable;
  4484. Assert(tempBv->IsEmpty());
  4485. tempBv->And(toBlock->cloneStrCandidates, fromData->isTempSrc);
  4486. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  4487. {
  4488. StackSym *const sym = (StackSym *)symTable->Find(id);
  4489. Assert(sym);
  4490. if (!landingPad->globOptData.liveVarSyms->Test(id)
  4491. || !fromData->liveVarSyms->Test(id))
  4492. {
  4493. continue;
  4494. }
  4495. Value * landingPadValue = landingPad->globOptData.FindValue(sym);
  4496. if (landingPadValue == nullptr)
  4497. {
  4498. continue;
  4499. }
  4500. Value * loopValue = fromData->FindValue(sym);
  4501. if (loopValue == nullptr)
  4502. {
  4503. continue;
  4504. }
  4505. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  4506. ValueInfo *loopValueInfo = loopValue->GetValueInfo();
  4507. if (landingPadValueInfo->IsLikelyString()
  4508. && loopValueInfo->IsLikelyString())
  4509. {
  4510. IR::Instr *cloneStr = IR::Instr::New(Js::OpCode::CloneStr, this->func);
  4511. IR::RegOpnd *opnd = IR::RegOpnd::New(sym, IRType::TyVar, this->func);
  4512. cloneStr->SetDst(opnd);
  4513. cloneStr->SetSrc1(opnd);
  4514. if (loop->bailOutInfo->bailOutInstr)
  4515. {
  4516. loop->bailOutInfo->bailOutInstr->InsertBefore(cloneStr);
  4517. }
  4518. else
  4519. {
  4520. landingPad->InsertAfter(cloneStr);
  4521. }
  4522. toData->isTempSrc->Set(id);
  4523. }
  4524. }
  4525. NEXT_BITSET_IN_SPARSEBV;
  4526. tempBv->ClearAll();
  4527. }
  4528. }