BackwardPass.cpp 369 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #define INLINEEMETAARG_COUNT 3
  7. BackwardPass::BackwardPass(Func * func, GlobOpt * globOpt, Js::Phase tag)
  8. : func(func), globOpt(globOpt), tag(tag), currentPrePassLoop(nullptr), tempAlloc(nullptr),
  9. preOpBailOutInstrToProcess(nullptr),
  10. considerSymAsRealUseInNoImplicitCallUses(nullptr),
  11. isCollectionPass(false), currentRegion(nullptr),
  12. collectionPassSubPhase(CollectionPassSubPhase::None),
  13. isLoopPrepass(false)
  14. {
  15. // Those are the only three phases BackwardPass will use currently
  16. Assert(tag == Js::BackwardPhase || tag == Js::DeadStorePhase || tag == Js::CaptureByteCodeRegUsePhase);
  17. #if DBG
  18. // The CaptureByteCodeRegUse phase is just a collection phase, no mutations should occur
  19. this->isCollectionPass = tag == Js::CaptureByteCodeRegUsePhase;
  20. #endif
  21. this->implicitCallBailouts = 0;
  22. this->fieldOpts = 0;
  23. #if DBG_DUMP
  24. this->numDeadStore = 0;
  25. this->numMarkTempNumber = 0;
  26. this->numMarkTempNumberTransferred = 0;
  27. this->numMarkTempObject = 0;
  28. #endif
  29. }
  30. void
  31. BackwardPass::DoSetDead(IR::Opnd * opnd, bool isDead) const
  32. {
  33. // Note: Dead bit on the Opnd records flow-based liveness.
  34. // This is distinct from isLastUse, which records lexical last-ness.
  35. if (isDead && this->tag == Js::BackwardPhase && !this->IsPrePass())
  36. {
  37. opnd->SetIsDead();
  38. }
  39. else if (this->tag == Js::DeadStorePhase)
  40. {
  41. // Set or reset in DeadStorePhase.
  42. // CSE could make previous dead operand not the last use, so reset it.
  43. opnd->SetIsDead(isDead);
  44. }
  45. }
  46. bool
  47. BackwardPass::DoByteCodeUpwardExposedUsed() const
  48. {
  49. return (
  50. (this->tag == Js::DeadStorePhase && this->func->hasBailout) ||
  51. (this->tag == Js::BackwardPhase && this->func->HasTry() && this->func->DoOptimizeTry())
  52. #if DBG
  53. || tag == Js::CaptureByteCodeRegUsePhase
  54. #endif
  55. );
  56. }
  57. bool BackwardPass::DoCaptureByteCodeUpwardExposedUsed() const
  58. {
  59. #if DBG
  60. return (this->tag == Js::CaptureByteCodeRegUsePhase || this->tag == Js::DeadStorePhase) &&
  61. this->DoByteCodeUpwardExposedUsed() &&
  62. !func->IsJitInDebugMode() &&
  63. !this->func->GetJITFunctionBody()->IsAsmJsMode() &&
  64. this->func->DoGlobOpt();
  65. #else
  66. return false;
  67. #endif
  68. }
  69. bool
  70. BackwardPass::DoMarkTempNumbers() const
  71. {
  72. #if FLOATVAR
  73. return false;
  74. #else
  75. // only mark temp number on the dead store phase
  76. return (tag == Js::DeadStorePhase) && !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  77. !PHASE_OFF(Js::MarkTempNumberPhase, this->func) && func->DoFastPaths() && (!this->func->HasTry());
  78. #endif
  79. }
  80. bool
  81. BackwardPass::SatisfyMarkTempObjectsConditions() const {
  82. return !PHASE_OFF(Js::MarkTempPhase, this->func) &&
  83. !PHASE_OFF(Js::MarkTempObjectPhase, this->func) &&
  84. func->DoGlobOpt() && func->GetHasTempObjectProducingInstr() &&
  85. !func->IsJitInDebugMode() &&
  86. func->DoGlobOptsForGeneratorFunc();
  87. // Why MarkTempObject is disabled under debugger:
  88. // We add 'identified so far dead non-temp locals' to byteCodeUpwardExposedUsed in ProcessBailOutInfo,
  89. // this may cause MarkTempObject to convert some temps back to non-temp when it sees a 'transferred exposed use'
  90. // from a temp to non-temp. That's in general not a supported conversion (while non-temp -> temp is fine).
  91. }
  92. bool
  93. BackwardPass::DoMarkTempObjects() const
  94. {
  95. // only mark temp object on the backward store phase
  96. return (tag == Js::BackwardPhase) && SatisfyMarkTempObjectsConditions();
  97. }
  98. bool
  99. BackwardPass::DoMarkTempNumbersOnTempObjects() const
  100. {
  101. return !PHASE_OFF(Js::MarkTempNumberOnTempObjectPhase, this->func) && DoMarkTempNumbers() && this->func->GetHasMarkTempObjects();
  102. }
  103. #if DBG
  104. bool
  105. BackwardPass::DoMarkTempObjectVerify() const
  106. {
  107. // only mark temp object on the backward store phase
  108. return (tag == Js::DeadStorePhase) && SatisfyMarkTempObjectsConditions();
  109. }
  110. #endif
  111. // static
  112. bool
  113. BackwardPass::DoDeadStore(Func* func)
  114. {
  115. return
  116. !PHASE_OFF(Js::DeadStorePhase, func) &&
  117. (!func->HasTry() || func->DoOptimizeTry());
  118. }
  119. bool
  120. BackwardPass::DoDeadStore() const
  121. {
  122. return
  123. this->tag == Js::DeadStorePhase &&
  124. DoDeadStore(this->func);
  125. }
  126. bool
  127. BackwardPass::DoDeadStoreSlots() const
  128. {
  129. // only dead store fields if glob opt is on to generate the trackable fields bitvector
  130. return (tag == Js::DeadStorePhase && this->func->DoGlobOpt()
  131. && (!this->func->HasTry()));
  132. }
  133. // Whether dead store is enabled for given func and sym.
  134. // static
  135. bool
  136. BackwardPass::DoDeadStore(Func* func, StackSym* sym)
  137. {
  138. // Dead store is disabled under debugger for non-temp local vars.
  139. return
  140. DoDeadStore(func) &&
  141. !(func->IsJitInDebugMode() && sym->HasByteCodeRegSlot() && func->IsNonTempLocalVar(sym->GetByteCodeRegSlot())) &&
  142. func->DoGlobOptsForGeneratorFunc();
  143. }
  144. bool
  145. BackwardPass::DoTrackNegativeZero() const
  146. {
  147. return
  148. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  149. !PHASE_OFF(Js::TrackNegativeZeroPhase, func) &&
  150. func->DoGlobOpt() &&
  151. !IsPrePass() &&
  152. !func->IsJitInDebugMode() &&
  153. func->DoGlobOptsForGeneratorFunc();
  154. }
  155. bool
  156. BackwardPass::DoTrackBitOpsOrNumber() const
  157. {
  158. #if defined(_WIN32) && defined(TARGET_64)
  159. return
  160. !PHASE_OFF1(Js::TypedArrayVirtualPhase) &&
  161. tag == Js::BackwardPhase &&
  162. func->DoGlobOpt() &&
  163. !IsPrePass() &&
  164. !func->IsJitInDebugMode() &&
  165. func->DoGlobOptsForGeneratorFunc();
  166. #else
  167. return false;
  168. #endif
  169. }
  170. bool
  171. BackwardPass::DoTrackIntOverflow() const
  172. {
  173. return
  174. !PHASE_OFF(Js::TrackIntUsagePhase, func) &&
  175. !PHASE_OFF(Js::TrackIntOverflowPhase, func) &&
  176. tag == Js::BackwardPhase &&
  177. !IsPrePass() &&
  178. globOpt->DoLossyIntTypeSpec() &&
  179. !func->IsJitInDebugMode() &&
  180. func->DoGlobOptsForGeneratorFunc();
  181. }
  182. bool
  183. BackwardPass::DoTrackCompoundedIntOverflow() const
  184. {
  185. return
  186. !PHASE_OFF(Js::TrackCompoundedIntOverflowPhase, func) &&
  187. DoTrackIntOverflow() && !func->IsTrackCompoundedIntOverflowDisabled();
  188. }
  189. bool
  190. BackwardPass::DoTrackNon32BitOverflow() const
  191. {
  192. // enabled only for IA
  193. #if defined(_M_IX86) || defined(_M_X64)
  194. return true;
  195. #else
  196. return false;
  197. #endif
  198. }
  199. void
  200. BackwardPass::CleanupBackwardPassInfoInFlowGraph()
  201. {
  202. if (!this->func->m_fg->hasBackwardPassInfo)
  203. {
  204. // No information to clean up
  205. return;
  206. }
  207. // The backward pass temp arena has already been deleted, we can just reset the data
  208. FOREACH_BLOCK_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  209. {
  210. block->upwardExposedUses = nullptr;
  211. block->upwardExposedFields = nullptr;
  212. block->typesNeedingKnownObjectLayout = nullptr;
  213. block->slotDeadStoreCandidates = nullptr;
  214. block->byteCodeUpwardExposedUsed = nullptr;
  215. block->liveFixedFields = nullptr;
  216. #if DBG
  217. block->byteCodeRestoreSyms = nullptr;
  218. block->excludeByteCodeUpwardExposedTracking = nullptr;
  219. #endif
  220. block->tempNumberTracker = nullptr;
  221. block->tempObjectTracker = nullptr;
  222. #if DBG
  223. block->tempObjectVerifyTracker = nullptr;
  224. #endif
  225. block->stackSymToFinalType = nullptr;
  226. block->stackSymToGuardedProperties = nullptr;
  227. block->stackSymToWriteGuardsMap = nullptr;
  228. block->cloneStrCandidates = nullptr;
  229. block->noImplicitCallUses = nullptr;
  230. block->noImplicitCallNoMissingValuesUses = nullptr;
  231. block->noImplicitCallNativeArrayUses = nullptr;
  232. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  233. block->noImplicitCallArrayLengthSymUses = nullptr;
  234. block->couldRemoveNegZeroBailoutForDef = nullptr;
  235. if (block->loop != nullptr)
  236. {
  237. block->loop->hasDeadStoreCollectionPass = false;
  238. block->loop->hasDeadStorePrepass = false;
  239. }
  240. }
  241. NEXT_BLOCK_IN_FUNC_DEAD_OR_ALIVE;
  242. }
  243. /*
  244. * We Insert ArgIns at the start of the function for all the formals.
  245. * Unused formals will be deadstored during the deadstore pass.
  246. * We need ArgIns only for the outermost function(inliner).
  247. */
  248. void
  249. BackwardPass::InsertArgInsForFormals()
  250. {
  251. if (func->IsStackArgsEnabled() && !func->GetJITFunctionBody()->HasImplicitArgIns())
  252. {
  253. IR::Instr * insertAfterInstr = func->m_headInstr->m_next;
  254. AssertMsg(insertAfterInstr->IsLabelInstr(), "First Instr of the first block should always have a label");
  255. Js::ArgSlot paramsCount = insertAfterInstr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  256. IR::Instr * argInInstr = nullptr;
  257. for (Js::ArgSlot argumentIndex = 1; argumentIndex <= paramsCount; argumentIndex++)
  258. {
  259. IR::SymOpnd * srcOpnd;
  260. StackSym * symSrc = StackSym::NewParamSlotSym(argumentIndex + 1, func);
  261. StackSym * symDst = StackSym::New(func);
  262. IR::RegOpnd * dstOpnd = IR::RegOpnd::New(symDst, TyVar, func);
  263. func->SetArgOffset(symSrc, (argumentIndex + LowererMD::GetFormalParamOffset()) * MachPtr);
  264. srcOpnd = IR::SymOpnd::New(symSrc, TyVar, func);
  265. argInInstr = IR::Instr::New(Js::OpCode::ArgIn_A, dstOpnd, srcOpnd, func);
  266. insertAfterInstr->InsertAfter(argInInstr);
  267. insertAfterInstr = argInInstr;
  268. AssertMsg(!func->HasStackSymForFormal(argumentIndex - 1), "Already has a stack sym for this formal?");
  269. this->func->TrackStackSymForFormalIndex(argumentIndex - 1, symDst);
  270. }
  271. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase) && paramsCount > 0)
  272. {
  273. Output::Print(_u("StackArgFormals : %s (%d) :Inserting ArgIn_A for LdSlot (formals) in the start of Deadstore pass. \n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetFunctionNumber());
  274. Output::Flush();
  275. }
  276. }
  277. }
  278. void
  279. BackwardPass::MarkScopeObjSymUseForStackArgOpt()
  280. {
  281. IR::Instr * instr = this->currentInstr;
  282. BasicBlock *block = this->currentBlock;
  283. if (tag == Js::DeadStorePhase)
  284. {
  285. if (instr->DoStackArgsOpt() && !block->IsLandingPad() && instr->m_func->GetScopeObjSym() != nullptr && this->DoByteCodeUpwardExposedUsed())
  286. {
  287. this->currentBlock->byteCodeUpwardExposedUsed->Set(instr->m_func->GetScopeObjSym()->m_id);
  288. }
  289. }
  290. }
  291. void
  292. BackwardPass::ProcessBailOnStackArgsOutOfActualsRange()
  293. {
  294. IR::Instr * instr = this->currentInstr;
  295. if (tag == Js::DeadStorePhase &&
  296. (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  297. instr->HasBailOutInfo() && !IsPrePass())
  298. {
  299. if (instr->DoStackArgsOpt())
  300. {
  301. AssertMsg(instr->GetBailOutKind() & IR::BailOnStackArgsOutOfActualsRange, "Stack args bail out is not set when the optimization is turned on? ");
  302. if (instr->GetBailOutKind() & ~IR::BailOnStackArgsOutOfActualsRange)
  303. {
  304. //Make sure that in absence of potential LazyBailOut and BailOutOnImplicitCallsPreOp, we only have BailOnStackArgsOutOfActualsRange bit set
  305. Assert((BailOutInfo::WithoutLazyBailOut(instr->GetBailOutKind() & ~IR::BailOutOnImplicitCallsPreOp)) == IR::BailOnStackArgsOutOfActualsRange);
  306. //We are sure at this point, that we will not have any implicit calls as we wouldn't have done this optimization in the first place.
  307. instr->SetBailOutKind(IR::BailOnStackArgsOutOfActualsRange);
  308. }
  309. }
  310. else if (instr->GetBailOutKind() & IR::BailOnStackArgsOutOfActualsRange)
  311. {
  312. //If we don't decide to do StackArgs, then remove the bail out at this point.
  313. //We would have optimistically set the bailout in the forward pass, and by the end of forward pass - we
  314. //turned off stack args for some reason. So we are removing it in the deadstore pass.
  315. IR::BailOutKind bailOutKind = instr->GetBailOutKind() & ~IR::BailOnStackArgsOutOfActualsRange;
  316. if (bailOutKind == IR::BailOutInvalid)
  317. {
  318. instr->ClearBailOutInfo();
  319. }
  320. else
  321. {
  322. instr->SetBailOutKind(bailOutKind);
  323. }
  324. }
  325. }
  326. }
  327. void
  328. BackwardPass::Optimize()
  329. {
  330. if (tag == Js::BackwardPhase && PHASE_OFF(tag, this->func))
  331. {
  332. return;
  333. }
  334. if (tag == Js::CaptureByteCodeRegUsePhase && (!PHASE_ENABLED(CaptureByteCodeRegUsePhase, this->func) || !DoCaptureByteCodeUpwardExposedUsed()))
  335. {
  336. return;
  337. }
  338. if (tag == Js::DeadStorePhase)
  339. {
  340. if (!this->func->DoLoopFastPaths() || !this->func->DoFastPaths())
  341. {
  342. //arguments[] access is similar to array fast path hence disable when array fastpath is disabled.
  343. //loopFastPath is always true except explicitly disabled
  344. //defaultDoFastPath can be false when we the source code size is huge
  345. func->SetHasStackArgs(false);
  346. }
  347. InsertArgInsForFormals();
  348. }
  349. NoRecoverMemoryJitArenaAllocator localAlloc(tag == Js::BackwardPhase? _u("BE-Backward") : _u("BE-DeadStore"),
  350. this->func->m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  351. this->tempAlloc = &localAlloc;
  352. #if DBG_DUMP
  353. if (this->IsTraceEnabled())
  354. {
  355. this->func->DumpHeader();
  356. }
  357. #endif
  358. this->CleanupBackwardPassInfoInFlowGraph();
  359. // Info about whether a sym is used in a way in which -0 differs from +0, or whether the sym is used in a way in which an
  360. // int32 overflow when generating the value of the sym matters, in the current block. The info is transferred to
  361. // instructions that define the sym in the current block as they are encountered. The info in these bit vectors is discarded
  362. // after optimizing each block, so the only info that remains for GlobOpt is that which is transferred to instructions.
  363. BVSparse<JitArenaAllocator> localNegativeZeroDoesNotMatterBySymId(tempAlloc);
  364. negativeZeroDoesNotMatterBySymId = &localNegativeZeroDoesNotMatterBySymId;
  365. BVSparse<JitArenaAllocator> localSymUsedOnlyForBitOpsBySymId(tempAlloc);
  366. symUsedOnlyForBitOpsBySymId = &localSymUsedOnlyForBitOpsBySymId;
  367. BVSparse<JitArenaAllocator> localSymUsedOnlyForNumberBySymId(tempAlloc);
  368. symUsedOnlyForNumberBySymId = &localSymUsedOnlyForNumberBySymId;
  369. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterBySymId(tempAlloc);
  370. intOverflowDoesNotMatterBySymId = &localIntOverflowDoesNotMatterBySymId;
  371. BVSparse<JitArenaAllocator> localIntOverflowDoesNotMatterInRangeBySymId(tempAlloc);
  372. intOverflowDoesNotMatterInRangeBySymId = &localIntOverflowDoesNotMatterInRangeBySymId;
  373. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeInt(tempAlloc);
  374. candidateSymsRequiredToBeInt = &localCandidateSymsRequiredToBeInt;
  375. BVSparse<JitArenaAllocator> localCandidateSymsRequiredToBeLossyInt(tempAlloc);
  376. candidateSymsRequiredToBeLossyInt = &localCandidateSymsRequiredToBeLossyInt;
  377. intOverflowCurrentlyMattersInRange = true;
  378. FloatSymEquivalenceMap localFloatSymEquivalenceMap(tempAlloc);
  379. floatSymEquivalenceMap = &localFloatSymEquivalenceMap;
  380. NumberTempRepresentativePropertySymMap localNumberTempRepresentativePropertySym(tempAlloc);
  381. numberTempRepresentativePropertySym = &localNumberTempRepresentativePropertySym;
  382. FOREACH_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE(block, this->func)
  383. {
  384. this->OptBlock(block);
  385. }
  386. NEXT_BLOCK_BACKWARD_IN_FUNC_DEAD_OR_ALIVE;
  387. if (this->tag == Js::DeadStorePhase && !PHASE_OFF(Js::MemOpPhase, this->func))
  388. {
  389. this->RemoveEmptyLoops();
  390. }
  391. this->func->m_fg->hasBackwardPassInfo = true;
  392. if(DoTrackCompoundedIntOverflow())
  393. {
  394. // Tracking int overflow makes use of a scratch field in stack syms, which needs to be cleared
  395. func->m_symTable->ClearStackSymScratch();
  396. }
  397. #if DBG_DUMP
  398. if (PHASE_STATS(this->tag, this->func))
  399. {
  400. this->func->DumpHeader();
  401. Output::Print(this->tag == Js::BackwardPhase? _u("Backward Phase Stats:\n") : _u("Deadstore Phase Stats:\n"));
  402. if (this->DoDeadStore())
  403. {
  404. Output::Print(_u(" Deadstore : %3d\n"), this->numDeadStore);
  405. }
  406. if (this->DoMarkTempNumbers())
  407. {
  408. Output::Print(_u(" Temp Number : %3d\n"), this->numMarkTempNumber);
  409. Output::Print(_u(" Transferred Temp Number: %3d\n"), this->numMarkTempNumberTransferred);
  410. }
  411. if (this->DoMarkTempObjects())
  412. {
  413. Output::Print(_u(" Temp Object : %3d\n"), this->numMarkTempObject);
  414. }
  415. }
  416. #endif
  417. }
  418. void
  419. BackwardPass::MergeSuccBlocksInfo(BasicBlock * block)
  420. {
  421. // Can't reuse the bv in the current block, because its successor can be itself.
  422. TempNumberTracker * tempNumberTracker = nullptr;
  423. TempObjectTracker * tempObjectTracker = nullptr;
  424. #if DBG
  425. TempObjectVerifyTracker * tempObjectVerifyTracker = nullptr;
  426. #endif
  427. HashTable<AddPropertyCacheBucket> * stackSymToFinalType = nullptr;
  428. HashTable<ObjTypeGuardBucket> * stackSymToGuardedProperties = nullptr;
  429. HashTable<ObjWriteGuardBucket> * stackSymToWriteGuardsMap = nullptr;
  430. BVSparse<JitArenaAllocator> * cloneStrCandidates = nullptr;
  431. BVSparse<JitArenaAllocator> * noImplicitCallUses = nullptr;
  432. BVSparse<JitArenaAllocator> * noImplicitCallNoMissingValuesUses = nullptr;
  433. BVSparse<JitArenaAllocator> * noImplicitCallNativeArrayUses = nullptr;
  434. BVSparse<JitArenaAllocator> * noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  435. BVSparse<JitArenaAllocator> * noImplicitCallArrayLengthSymUses = nullptr;
  436. BVSparse<JitArenaAllocator> * upwardExposedUses = nullptr;
  437. BVSparse<JitArenaAllocator> * upwardExposedFields = nullptr;
  438. BVSparse<JitArenaAllocator> * typesNeedingKnownObjectLayout = nullptr;
  439. BVSparse<JitArenaAllocator> * slotDeadStoreCandidates = nullptr;
  440. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = nullptr;
  441. BVSparse<JitArenaAllocator> * couldRemoveNegZeroBailoutForDef = nullptr;
  442. BVSparse<JitArenaAllocator> * liveFixedFields = nullptr;
  443. #if DBG
  444. uint byteCodeLocalsCount = func->GetJITFunctionBody()->GetLocalsCount();
  445. StackSym ** byteCodeRestoreSyms = nullptr;
  446. BVSparse<JitArenaAllocator> * excludeByteCodeUpwardExposedTracking = nullptr;
  447. #endif
  448. Assert(!block->isDead || block->GetSuccList()->Empty());
  449. if (this->DoByteCodeUpwardExposedUsed())
  450. {
  451. byteCodeUpwardExposedUsed = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  452. #if DBG
  453. byteCodeRestoreSyms = JitAnewArrayZ(this->tempAlloc, StackSym *, byteCodeLocalsCount);
  454. excludeByteCodeUpwardExposedTracking = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  455. #endif
  456. }
  457. #if DBG
  458. if (!IsCollectionPass() && this->DoMarkTempObjectVerify())
  459. {
  460. tempObjectVerifyTracker = JitAnew(this->tempAlloc, TempObjectVerifyTracker, this->tempAlloc, block->loop != nullptr);
  461. }
  462. #endif
  463. if (!block->isDead)
  464. {
  465. bool keepUpwardExposed = (this->tag == Js::BackwardPhase);
  466. JitArenaAllocator *upwardExposedArena = nullptr;
  467. if(!IsCollectionPass())
  468. {
  469. upwardExposedArena = keepUpwardExposed ? this->globOpt->alloc : this->tempAlloc;
  470. upwardExposedUses = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  471. upwardExposedFields = JitAnew(upwardExposedArena, BVSparse<JitArenaAllocator>, upwardExposedArena);
  472. if (this->tag == Js::DeadStorePhase)
  473. {
  474. liveFixedFields = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  475. typesNeedingKnownObjectLayout = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  476. }
  477. if (this->DoDeadStoreSlots())
  478. {
  479. slotDeadStoreCandidates = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  480. }
  481. if (this->DoMarkTempNumbers())
  482. {
  483. tempNumberTracker = JitAnew(this->tempAlloc, TempNumberTracker, this->tempAlloc, block->loop != nullptr);
  484. }
  485. if (this->DoMarkTempObjects())
  486. {
  487. tempObjectTracker = JitAnew(this->tempAlloc, TempObjectTracker, this->tempAlloc, block->loop != nullptr);
  488. }
  489. noImplicitCallUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  490. noImplicitCallNoMissingValuesUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  491. noImplicitCallNativeArrayUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  492. noImplicitCallJsArrayHeadSegmentSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  493. noImplicitCallArrayLengthSymUses = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  494. if (this->tag == Js::BackwardPhase)
  495. {
  496. cloneStrCandidates = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  497. }
  498. else
  499. {
  500. couldRemoveNegZeroBailoutForDef = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  501. }
  502. }
  503. bool firstSucc = true;
  504. FOREACH_SUCCESSOR_BLOCK(blockSucc, block)
  505. {
  506. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  507. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  508. #endif
  509. // save the byteCodeUpwardExposedUsed from deleting for the block right after the memop loop
  510. if (this->tag == Js::DeadStorePhase && !this->IsPrePass() && globOpt->HasMemOp(block->loop) && blockSucc->loop != block->loop)
  511. {
  512. Assert(block->loop->memOpInfo->inductionVariablesUsedAfterLoop == nullptr);
  513. block->loop->memOpInfo->inductionVariablesUsedAfterLoop = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  514. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->byteCodeUpwardExposedUsed);
  515. block->loop->memOpInfo->inductionVariablesUsedAfterLoop->Or(blockSucc->upwardExposedUses);
  516. }
  517. bool deleteData = false;
  518. if (!blockSucc->isLoopHeader && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  519. {
  520. Assert(blockSucc->GetDataUseCount() != 0);
  521. deleteData = (blockSucc->DecrementDataUseCount() == 0);
  522. if (blockSucc->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence)
  523. {
  524. // We hold on to data for these blocks until the arena gets cleared due to unusual data lifetimes.
  525. deleteData = false;
  526. blockSucc->IncrementDataUseCount();
  527. }
  528. }
  529. #if DBG
  530. if (excludeByteCodeUpwardExposedTracking && blockSucc->excludeByteCodeUpwardExposedTracking)
  531. {
  532. excludeByteCodeUpwardExposedTracking->Or(blockSucc->excludeByteCodeUpwardExposedTracking);
  533. }
  534. #endif
  535. Assert((byteCodeUpwardExposedUsed == nullptr) == !this->DoByteCodeUpwardExposedUsed());
  536. if (byteCodeUpwardExposedUsed && blockSucc->byteCodeUpwardExposedUsed)
  537. {
  538. byteCodeUpwardExposedUsed->Or(blockSucc->byteCodeUpwardExposedUsed);
  539. if (this->tag == Js::DeadStorePhase)
  540. {
  541. #if DBG
  542. for (uint i = 0; i < byteCodeLocalsCount; i++)
  543. {
  544. if (byteCodeRestoreSyms[i] == nullptr)
  545. {
  546. byteCodeRestoreSyms[i] = blockSucc->byteCodeRestoreSyms[i];
  547. }
  548. else
  549. {
  550. Assert(blockSucc->byteCodeRestoreSyms[i] == nullptr
  551. || byteCodeRestoreSyms[i] == blockSucc->byteCodeRestoreSyms[i]);
  552. }
  553. }
  554. #endif
  555. if (deleteData)
  556. {
  557. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region. So, don't delete it in the backwards pass.
  558. JitAdelete(this->tempAlloc, blockSucc->byteCodeUpwardExposedUsed);
  559. blockSucc->byteCodeUpwardExposedUsed = nullptr;
  560. }
  561. }
  562. #if DBG
  563. if (deleteData)
  564. {
  565. JitAdeleteArray(this->tempAlloc, byteCodeLocalsCount, blockSucc->byteCodeRestoreSyms);
  566. blockSucc->byteCodeRestoreSyms = nullptr;
  567. JitAdelete(this->tempAlloc, blockSucc->excludeByteCodeUpwardExposedTracking);
  568. blockSucc->excludeByteCodeUpwardExposedTracking = nullptr;
  569. }
  570. #endif
  571. }
  572. else
  573. {
  574. Assert(blockSucc->byteCodeUpwardExposedUsed == nullptr);
  575. Assert(blockSucc->byteCodeRestoreSyms == nullptr);
  576. Assert(blockSucc->excludeByteCodeUpwardExposedTracking == nullptr);
  577. }
  578. if(IsCollectionPass())
  579. {
  580. continue;
  581. }
  582. Assert((blockSucc->upwardExposedUses != nullptr)
  583. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  584. Assert((blockSucc->upwardExposedFields != nullptr)
  585. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop))));
  586. Assert((blockSucc->typesNeedingKnownObjectLayout != nullptr)
  587. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  588. || this->tag != Js::DeadStorePhase);
  589. Assert((blockSucc->slotDeadStoreCandidates != nullptr)
  590. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  591. || !this->DoDeadStoreSlots());
  592. Assert((blockSucc->tempNumberTracker != nullptr)
  593. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  594. || !this->DoMarkTempNumbers());
  595. Assert((blockSucc->tempObjectTracker != nullptr)
  596. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  597. || !this->DoMarkTempObjects());
  598. Assert((blockSucc->tempObjectVerifyTracker != nullptr)
  599. || (blockSucc->isLoopHeader && (this->IsPrePass() || blockSucc->loop->IsDescendentOrSelf(block->loop)))
  600. || !this->DoMarkTempObjectVerify());
  601. if (this->tag == Js::DeadStorePhase && blockSucc->liveFixedFields != nullptr)
  602. {
  603. liveFixedFields->Or(blockSucc->liveFixedFields);
  604. JitAdelete(this->tempAlloc, blockSucc->liveFixedFields);
  605. blockSucc->liveFixedFields = nullptr;
  606. }
  607. if (blockSucc->upwardExposedUses != nullptr)
  608. {
  609. upwardExposedUses->Or(blockSucc->upwardExposedUses);
  610. if (deleteData && (!keepUpwardExposed
  611. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  612. {
  613. JitAdelete(upwardExposedArena, blockSucc->upwardExposedUses);
  614. blockSucc->upwardExposedUses = nullptr;
  615. }
  616. }
  617. if (blockSucc->upwardExposedFields != nullptr)
  618. {
  619. upwardExposedFields->Or(blockSucc->upwardExposedFields);
  620. if (deleteData && (!keepUpwardExposed
  621. || (this->IsPrePass() && blockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)))
  622. {
  623. JitAdelete(upwardExposedArena, blockSucc->upwardExposedFields);
  624. blockSucc->upwardExposedFields = nullptr;
  625. }
  626. }
  627. if (blockSucc->typesNeedingKnownObjectLayout != nullptr)
  628. {
  629. typesNeedingKnownObjectLayout->Or(blockSucc->typesNeedingKnownObjectLayout);
  630. if (deleteData)
  631. {
  632. JitAdelete(this->tempAlloc, blockSucc->typesNeedingKnownObjectLayout);
  633. blockSucc->typesNeedingKnownObjectLayout = nullptr;
  634. }
  635. }
  636. if (blockSucc->slotDeadStoreCandidates != nullptr)
  637. {
  638. slotDeadStoreCandidates->And(blockSucc->slotDeadStoreCandidates);
  639. if (deleteData)
  640. {
  641. JitAdelete(this->tempAlloc, blockSucc->slotDeadStoreCandidates);
  642. blockSucc->slotDeadStoreCandidates = nullptr;
  643. }
  644. }
  645. if (blockSucc->tempNumberTracker != nullptr)
  646. {
  647. Assert((blockSucc->loop != nullptr) == blockSucc->tempNumberTracker->HasTempTransferDependencies());
  648. tempNumberTracker->MergeData(blockSucc->tempNumberTracker, deleteData);
  649. if (deleteData)
  650. {
  651. blockSucc->tempNumberTracker = nullptr;
  652. }
  653. }
  654. if (blockSucc->tempObjectTracker != nullptr)
  655. {
  656. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectTracker->HasTempTransferDependencies());
  657. tempObjectTracker->MergeData(blockSucc->tempObjectTracker, deleteData);
  658. if (deleteData)
  659. {
  660. blockSucc->tempObjectTracker = nullptr;
  661. }
  662. }
  663. #if DBG
  664. if (blockSucc->tempObjectVerifyTracker != nullptr)
  665. {
  666. Assert((blockSucc->loop != nullptr) == blockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  667. tempObjectVerifyTracker->MergeData(blockSucc->tempObjectVerifyTracker, deleteData);
  668. if (deleteData)
  669. {
  670. blockSucc->tempObjectVerifyTracker = nullptr;
  671. }
  672. }
  673. #endif
  674. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func,
  675. _u("ObjTypeSpecStore: func %s, edge %d => %d: "),
  676. this->func->GetDebugNumberSet(debugStringBuffer),
  677. block->GetBlockNum(), blockSucc->GetBlockNum());
  678. auto fixupFrom = [block, blockSucc, upwardExposedUses, this](Bucket<AddPropertyCacheBucket> &bucket)
  679. {
  680. AddPropertyCacheBucket *fromData = &bucket.element;
  681. if (fromData->GetInitialType() == nullptr ||
  682. fromData->GetFinalType() == fromData->GetInitialType())
  683. {
  684. return;
  685. }
  686. this->InsertTypeTransitionsAtPriorSuccessors(block, blockSucc, bucket.value, fromData, upwardExposedUses);
  687. };
  688. auto fixupTo = [blockSucc, upwardExposedUses, this](Bucket<AddPropertyCacheBucket> &bucket)
  689. {
  690. AddPropertyCacheBucket *toData = &bucket.element;
  691. if (toData->GetInitialType() == nullptr ||
  692. toData->GetFinalType() == toData->GetInitialType())
  693. {
  694. return;
  695. }
  696. this->InsertTypeTransitionAtBlock(blockSucc, bucket.value, toData, upwardExposedUses);
  697. };
  698. if (blockSucc->stackSymToFinalType != nullptr)
  699. {
  700. #if DBG_DUMP
  701. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  702. {
  703. blockSucc->stackSymToFinalType->Dump();
  704. }
  705. #endif
  706. if (firstSucc)
  707. {
  708. stackSymToFinalType = blockSucc->stackSymToFinalType->Copy();
  709. }
  710. else if (stackSymToFinalType != nullptr)
  711. {
  712. if (this->IsPrePass())
  713. {
  714. stackSymToFinalType->And(blockSucc->stackSymToFinalType);
  715. }
  716. else
  717. {
  718. // Insert any type transitions that can't be merged past this point.
  719. stackSymToFinalType->AndWithFixup(blockSucc->stackSymToFinalType, fixupFrom, fixupTo);
  720. }
  721. }
  722. else if (!this->IsPrePass())
  723. {
  724. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, blockSucc->stackSymToFinalType)
  725. {
  726. fixupTo(bucket);
  727. }
  728. NEXT_HASHTABLE_ENTRY;
  729. }
  730. if (deleteData)
  731. {
  732. blockSucc->stackSymToFinalType->Delete();
  733. blockSucc->stackSymToFinalType = nullptr;
  734. }
  735. }
  736. else
  737. {
  738. PHASE_PRINT_TRACE(Js::ObjTypeSpecStorePhase, this->func, _u("null\n"));
  739. if (stackSymToFinalType)
  740. {
  741. if (!this->IsPrePass())
  742. {
  743. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, stackSymToFinalType)
  744. {
  745. fixupFrom(bucket);
  746. }
  747. NEXT_HASHTABLE_ENTRY;
  748. }
  749. stackSymToFinalType->Delete();
  750. stackSymToFinalType = nullptr;
  751. }
  752. }
  753. if (tag == Js::BackwardPhase)
  754. {
  755. if (blockSucc->cloneStrCandidates != nullptr)
  756. {
  757. Assert(cloneStrCandidates != nullptr);
  758. cloneStrCandidates->Or(blockSucc->cloneStrCandidates);
  759. if (deleteData)
  760. {
  761. JitAdelete(this->globOpt->alloc, blockSucc->cloneStrCandidates);
  762. blockSucc->cloneStrCandidates = nullptr;
  763. }
  764. }
  765. #if DBG_DUMP
  766. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  767. {
  768. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  769. Output::Print(_u("ObjTypeSpec: top function %s (%s), function %s (%s), write guard symbols on edge %d => %d: "),
  770. this->func->GetTopFunc()->GetJITFunctionBody()->GetDisplayName(),
  771. this->func->GetTopFunc()->GetDebugNumberSet(debugStringBuffer),
  772. this->func->GetJITFunctionBody()->GetDisplayName(),
  773. this->func->GetDebugNumberSet(debugStringBuffer2), block->GetBlockNum(),
  774. blockSucc->GetBlockNum());
  775. }
  776. #endif
  777. if (blockSucc->stackSymToWriteGuardsMap != nullptr)
  778. {
  779. #if DBG_DUMP
  780. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  781. {
  782. Output::Print(_u("\n"));
  783. blockSucc->stackSymToWriteGuardsMap->Dump();
  784. }
  785. #endif
  786. if (stackSymToWriteGuardsMap == nullptr)
  787. {
  788. stackSymToWriteGuardsMap = blockSucc->stackSymToWriteGuardsMap->Copy();
  789. }
  790. else
  791. {
  792. stackSymToWriteGuardsMap->Or(
  793. blockSucc->stackSymToWriteGuardsMap, &BackwardPass::MergeWriteGuards);
  794. }
  795. if (deleteData)
  796. {
  797. blockSucc->stackSymToWriteGuardsMap->Delete();
  798. blockSucc->stackSymToWriteGuardsMap = nullptr;
  799. }
  800. }
  801. else
  802. {
  803. #if DBG_DUMP
  804. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  805. {
  806. Output::Print(_u("null\n"));
  807. }
  808. #endif
  809. }
  810. }
  811. else
  812. {
  813. #if DBG_DUMP
  814. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  815. {
  816. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  817. Output::Print(_u("ObjTypeSpec: top function %s (%s), function %s (%s), guarded property operations on edge %d => %d: \n"),
  818. this->func->GetTopFunc()->GetJITFunctionBody()->GetDisplayName(),
  819. this->func->GetTopFunc()->GetDebugNumberSet(debugStringBuffer),
  820. this->func->GetJITFunctionBody()->GetDisplayName(),
  821. this->func->GetDebugNumberSet(debugStringBuffer2),
  822. block->GetBlockNum(), blockSucc->GetBlockNum());
  823. }
  824. #endif
  825. if (blockSucc->stackSymToGuardedProperties != nullptr)
  826. {
  827. #if DBG_DUMP
  828. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  829. {
  830. blockSucc->stackSymToGuardedProperties->Dump();
  831. Output::Print(_u("\n"));
  832. }
  833. #endif
  834. if (stackSymToGuardedProperties == nullptr)
  835. {
  836. stackSymToGuardedProperties = blockSucc->stackSymToGuardedProperties->Copy();
  837. }
  838. else
  839. {
  840. stackSymToGuardedProperties->Or(
  841. blockSucc->stackSymToGuardedProperties, &BackwardPass::MergeGuardedProperties);
  842. }
  843. if (deleteData)
  844. {
  845. blockSucc->stackSymToGuardedProperties->Delete();
  846. blockSucc->stackSymToGuardedProperties = nullptr;
  847. }
  848. }
  849. else
  850. {
  851. #if DBG_DUMP
  852. if (PHASE_VERBOSE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  853. {
  854. Output::Print(_u("null\n"));
  855. }
  856. #endif
  857. }
  858. if (blockSucc->couldRemoveNegZeroBailoutForDef != nullptr)
  859. {
  860. couldRemoveNegZeroBailoutForDef->And(blockSucc->couldRemoveNegZeroBailoutForDef);
  861. if (deleteData)
  862. {
  863. JitAdelete(this->tempAlloc, blockSucc->couldRemoveNegZeroBailoutForDef);
  864. blockSucc->couldRemoveNegZeroBailoutForDef = nullptr;
  865. }
  866. }
  867. }
  868. if (blockSucc->noImplicitCallUses != nullptr)
  869. {
  870. noImplicitCallUses->Or(blockSucc->noImplicitCallUses);
  871. if (deleteData)
  872. {
  873. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallUses);
  874. blockSucc->noImplicitCallUses = nullptr;
  875. }
  876. }
  877. if (blockSucc->noImplicitCallNoMissingValuesUses != nullptr)
  878. {
  879. noImplicitCallNoMissingValuesUses->Or(blockSucc->noImplicitCallNoMissingValuesUses);
  880. if (deleteData)
  881. {
  882. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNoMissingValuesUses);
  883. blockSucc->noImplicitCallNoMissingValuesUses = nullptr;
  884. }
  885. }
  886. if (blockSucc->noImplicitCallNativeArrayUses != nullptr)
  887. {
  888. noImplicitCallNativeArrayUses->Or(blockSucc->noImplicitCallNativeArrayUses);
  889. if (deleteData)
  890. {
  891. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallNativeArrayUses);
  892. blockSucc->noImplicitCallNativeArrayUses = nullptr;
  893. }
  894. }
  895. if (blockSucc->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  896. {
  897. noImplicitCallJsArrayHeadSegmentSymUses->Or(blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  898. if (deleteData)
  899. {
  900. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallJsArrayHeadSegmentSymUses);
  901. blockSucc->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  902. }
  903. }
  904. if (blockSucc->noImplicitCallArrayLengthSymUses != nullptr)
  905. {
  906. noImplicitCallArrayLengthSymUses->Or(blockSucc->noImplicitCallArrayLengthSymUses);
  907. if (deleteData)
  908. {
  909. JitAdelete(this->tempAlloc, blockSucc->noImplicitCallArrayLengthSymUses);
  910. blockSucc->noImplicitCallArrayLengthSymUses = nullptr;
  911. }
  912. }
  913. firstSucc = false;
  914. }
  915. NEXT_SUCCESSOR_BLOCK;
  916. #if DBG_DUMP
  917. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  918. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  919. {
  920. Output::Print(_u("ObjTypeSpecStore: func %s, block %d: "),
  921. this->func->GetDebugNumberSet(debugStringBuffer),
  922. block->GetBlockNum());
  923. if (stackSymToFinalType)
  924. {
  925. stackSymToFinalType->Dump();
  926. }
  927. else
  928. {
  929. Output::Print(_u("null\n"));
  930. }
  931. }
  932. if (PHASE_TRACE(Js::TraceObjTypeSpecTypeGuardsPhase, this->func))
  933. {
  934. Output::Print(_u("ObjTypeSpec: func %s, block %d, guarded properties:\n"),
  935. this->func->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  936. if (stackSymToGuardedProperties)
  937. {
  938. stackSymToGuardedProperties->Dump();
  939. Output::Print(_u("\n"));
  940. }
  941. else
  942. {
  943. Output::Print(_u("null\n"));
  944. }
  945. }
  946. if (PHASE_TRACE(Js::TraceObjTypeSpecWriteGuardsPhase, this->func))
  947. {
  948. Output::Print(_u("ObjTypeSpec: func %s, block %d, write guards: "),
  949. this->func->GetDebugNumberSet(debugStringBuffer), block->GetBlockNum());
  950. if (stackSymToWriteGuardsMap)
  951. {
  952. Output::Print(_u("\n"));
  953. stackSymToWriteGuardsMap->Dump();
  954. Output::Print(_u("\n"));
  955. }
  956. else
  957. {
  958. Output::Print(_u("null\n"));
  959. }
  960. }
  961. #endif
  962. }
  963. #if DBG
  964. if (tempObjectVerifyTracker)
  965. {
  966. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  967. {
  968. Assert(deadBlockSucc->tempObjectVerifyTracker || deadBlockSucc->isLoopHeader);
  969. if (deadBlockSucc->tempObjectVerifyTracker != nullptr)
  970. {
  971. Assert((deadBlockSucc->loop != nullptr) == deadBlockSucc->tempObjectVerifyTracker->HasTempTransferDependencies());
  972. // Dead block don't effect non temp use, we only need to carry the removed use bit vector forward
  973. // and put all the upward exposed use to the set that we might found out to be mark temp
  974. // after globopt
  975. tempObjectVerifyTracker->MergeDeadData(deadBlockSucc);
  976. }
  977. if (!byteCodeUpwardExposedUsed)
  978. {
  979. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  980. {
  981. Assert(deadBlockSucc->GetDataUseCount() != 0);
  982. if (deadBlockSucc->DecrementDataUseCount() == 0)
  983. {
  984. this->DeleteBlockData(deadBlockSucc);
  985. }
  986. }
  987. }
  988. }
  989. NEXT_DEAD_SUCCESSOR_BLOCK;
  990. }
  991. #endif
  992. if (byteCodeUpwardExposedUsed)
  993. {
  994. FOREACH_DEAD_SUCCESSOR_BLOCK(deadBlockSucc, block)
  995. {
  996. #if DBG
  997. if (excludeByteCodeUpwardExposedTracking && deadBlockSucc->excludeByteCodeUpwardExposedTracking)
  998. {
  999. excludeByteCodeUpwardExposedTracking->Or(deadBlockSucc->excludeByteCodeUpwardExposedTracking);
  1000. }
  1001. #endif
  1002. Assert(deadBlockSucc->byteCodeUpwardExposedUsed || deadBlockSucc->isLoopHeader);
  1003. if (deadBlockSucc->byteCodeUpwardExposedUsed)
  1004. {
  1005. byteCodeUpwardExposedUsed->Or(deadBlockSucc->byteCodeUpwardExposedUsed);
  1006. if (this->tag == Js::DeadStorePhase)
  1007. {
  1008. #if DBG
  1009. for (uint i = 0; i < byteCodeLocalsCount; i++)
  1010. {
  1011. if (byteCodeRestoreSyms[i] == nullptr)
  1012. {
  1013. byteCodeRestoreSyms[i] = deadBlockSucc->byteCodeRestoreSyms[i];
  1014. }
  1015. else
  1016. {
  1017. Assert(deadBlockSucc->byteCodeRestoreSyms[i] == nullptr
  1018. || byteCodeRestoreSyms[i] == deadBlockSucc->byteCodeRestoreSyms[i]);
  1019. }
  1020. }
  1021. #endif
  1022. }
  1023. }
  1024. if (!deadBlockSucc->isLoopHeader && deadBlockSucc->backwardPassCurrentLoop == this->currentPrePassLoop)
  1025. {
  1026. Assert(deadBlockSucc->GetDataUseCount() != 0);
  1027. if (deadBlockSucc->DecrementDataUseCount() == 0)
  1028. {
  1029. this->DeleteBlockData(deadBlockSucc);
  1030. }
  1031. }
  1032. }
  1033. NEXT_DEAD_SUCCESSOR_BLOCK;
  1034. }
  1035. if (block->isLoopHeader)
  1036. {
  1037. this->DeleteBlockData(block);
  1038. }
  1039. else
  1040. {
  1041. if(block->GetDataUseCount() == 0)
  1042. {
  1043. Assert(block->slotDeadStoreCandidates == nullptr);
  1044. Assert(block->tempNumberTracker == nullptr);
  1045. Assert(block->tempObjectTracker == nullptr);
  1046. Assert(block->tempObjectVerifyTracker == nullptr);
  1047. Assert(block->upwardExposedUses == nullptr);
  1048. Assert(block->upwardExposedFields == nullptr);
  1049. Assert(block->typesNeedingKnownObjectLayout == nullptr);
  1050. // byteCodeUpwardExposedUsed is required to populate the writeThroughSymbolsSet for the try region in the backwards pass
  1051. Assert(block->byteCodeUpwardExposedUsed == nullptr || (this->DoByteCodeUpwardExposedUsed()));
  1052. Assert(block->byteCodeRestoreSyms == nullptr);
  1053. Assert(block->excludeByteCodeUpwardExposedTracking == nullptr || (this->DoByteCodeUpwardExposedUsed()));
  1054. Assert(block->stackSymToFinalType == nullptr);
  1055. Assert(block->stackSymToGuardedProperties == nullptr);
  1056. Assert(block->stackSymToWriteGuardsMap == nullptr);
  1057. Assert(block->cloneStrCandidates == nullptr);
  1058. Assert(block->noImplicitCallUses == nullptr);
  1059. Assert(block->noImplicitCallNoMissingValuesUses == nullptr);
  1060. Assert(block->noImplicitCallNativeArrayUses == nullptr);
  1061. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses == nullptr);
  1062. Assert(block->noImplicitCallArrayLengthSymUses == nullptr);
  1063. Assert(block->couldRemoveNegZeroBailoutForDef == nullptr);
  1064. }
  1065. else
  1066. {
  1067. // The collection pass sometimes does not know whether it can delete a successor block's data, so it may leave some
  1068. // blocks with data intact. Delete the block data now.
  1069. Assert(block->backwardPassCurrentLoop);
  1070. Assert(block->backwardPassCurrentLoop->hasDeadStoreCollectionPass);
  1071. // The two situations where we might be keeping data around are either before we do
  1072. // the prepass, or when we're storing the data because we have a speculation-cancel
  1073. // block, which has longer lifetimes for its data.
  1074. Assert(!block->backwardPassCurrentLoop->hasDeadStorePrepass || block->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence);
  1075. DeleteBlockData(block);
  1076. }
  1077. block->backwardPassCurrentLoop = this->currentPrePassLoop;
  1078. if (this->DoByteCodeUpwardExposedUsed()
  1079. #if DBG
  1080. || this->DoMarkTempObjectVerify()
  1081. #endif
  1082. )
  1083. {
  1084. block->SetDataUseCount(block->GetPredList()->Count() + block->GetDeadPredList()->Count());
  1085. }
  1086. else
  1087. {
  1088. block->SetDataUseCount(block->GetPredList()->Count());
  1089. }
  1090. }
  1091. block->upwardExposedUses = upwardExposedUses;
  1092. block->upwardExposedFields = upwardExposedFields;
  1093. block->typesNeedingKnownObjectLayout = typesNeedingKnownObjectLayout;
  1094. block->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  1095. #if DBG
  1096. block->byteCodeRestoreSyms = byteCodeRestoreSyms;
  1097. block->excludeByteCodeUpwardExposedTracking = excludeByteCodeUpwardExposedTracking;
  1098. #endif
  1099. block->slotDeadStoreCandidates = slotDeadStoreCandidates;
  1100. block->tempNumberTracker = tempNumberTracker;
  1101. block->tempObjectTracker = tempObjectTracker;
  1102. #if DBG
  1103. block->tempObjectVerifyTracker = tempObjectVerifyTracker;
  1104. #endif
  1105. block->stackSymToFinalType = stackSymToFinalType;
  1106. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  1107. block->stackSymToWriteGuardsMap = stackSymToWriteGuardsMap;
  1108. block->cloneStrCandidates = cloneStrCandidates;
  1109. block->noImplicitCallUses = noImplicitCallUses;
  1110. block->noImplicitCallNoMissingValuesUses = noImplicitCallNoMissingValuesUses;
  1111. block->noImplicitCallNativeArrayUses = noImplicitCallNativeArrayUses;
  1112. block->noImplicitCallJsArrayHeadSegmentSymUses = noImplicitCallJsArrayHeadSegmentSymUses;
  1113. block->noImplicitCallArrayLengthSymUses = noImplicitCallArrayLengthSymUses;
  1114. block->couldRemoveNegZeroBailoutForDef = couldRemoveNegZeroBailoutForDef;
  1115. block->liveFixedFields = liveFixedFields;
  1116. }
  1117. ObjTypeGuardBucket
  1118. BackwardPass::MergeGuardedProperties(ObjTypeGuardBucket bucket1, ObjTypeGuardBucket bucket2)
  1119. {
  1120. BVSparse<JitArenaAllocator> *guardedPropertyOps1 = bucket1.GetGuardedPropertyOps();
  1121. BVSparse<JitArenaAllocator> *guardedPropertyOps2 = bucket2.GetGuardedPropertyOps();
  1122. Assert(guardedPropertyOps1 || guardedPropertyOps2);
  1123. BVSparse<JitArenaAllocator> *mergedPropertyOps;
  1124. if (guardedPropertyOps1)
  1125. {
  1126. mergedPropertyOps = guardedPropertyOps1->CopyNew();
  1127. if (guardedPropertyOps2)
  1128. {
  1129. mergedPropertyOps->Or(guardedPropertyOps2);
  1130. }
  1131. }
  1132. else
  1133. {
  1134. mergedPropertyOps = guardedPropertyOps2->CopyNew();
  1135. }
  1136. ObjTypeGuardBucket bucket;
  1137. bucket.SetGuardedPropertyOps(mergedPropertyOps);
  1138. JITTypeHolder monoGuardType = bucket1.GetMonoGuardType();
  1139. if (monoGuardType != nullptr)
  1140. {
  1141. Assert(!bucket2.NeedsMonoCheck() || monoGuardType == bucket2.GetMonoGuardType());
  1142. }
  1143. else
  1144. {
  1145. monoGuardType = bucket2.GetMonoGuardType();
  1146. }
  1147. bucket.SetMonoGuardType(monoGuardType);
  1148. return bucket;
  1149. }
  1150. ObjWriteGuardBucket
  1151. BackwardPass::MergeWriteGuards(ObjWriteGuardBucket bucket1, ObjWriteGuardBucket bucket2)
  1152. {
  1153. BVSparse<JitArenaAllocator> *writeGuards1 = bucket1.GetWriteGuards();
  1154. BVSparse<JitArenaAllocator> *writeGuards2 = bucket2.GetWriteGuards();
  1155. Assert(writeGuards1 || writeGuards2);
  1156. BVSparse<JitArenaAllocator> *mergedWriteGuards;
  1157. if (writeGuards1)
  1158. {
  1159. mergedWriteGuards = writeGuards1->CopyNew();
  1160. if (writeGuards2)
  1161. {
  1162. mergedWriteGuards->Or(writeGuards2);
  1163. }
  1164. }
  1165. else
  1166. {
  1167. mergedWriteGuards = writeGuards2->CopyNew();
  1168. }
  1169. ObjWriteGuardBucket bucket;
  1170. bucket.SetWriteGuards(mergedWriteGuards);
  1171. return bucket;
  1172. }
  1173. void
  1174. BackwardPass::DeleteBlockData(BasicBlock * block)
  1175. {
  1176. if (block->slotDeadStoreCandidates != nullptr)
  1177. {
  1178. JitAdelete(this->tempAlloc, block->slotDeadStoreCandidates);
  1179. block->slotDeadStoreCandidates = nullptr;
  1180. }
  1181. if (block->tempNumberTracker != nullptr)
  1182. {
  1183. JitAdelete(this->tempAlloc, block->tempNumberTracker);
  1184. block->tempNumberTracker = nullptr;
  1185. }
  1186. if (block->tempObjectTracker != nullptr)
  1187. {
  1188. JitAdelete(this->tempAlloc, block->tempObjectTracker);
  1189. block->tempObjectTracker = nullptr;
  1190. }
  1191. #if DBG
  1192. if (block->tempObjectVerifyTracker != nullptr)
  1193. {
  1194. JitAdelete(this->tempAlloc, block->tempObjectVerifyTracker);
  1195. block->tempObjectVerifyTracker = nullptr;
  1196. }
  1197. #endif
  1198. if (block->stackSymToFinalType != nullptr)
  1199. {
  1200. block->stackSymToFinalType->Delete();
  1201. block->stackSymToFinalType = nullptr;
  1202. }
  1203. if (block->stackSymToGuardedProperties != nullptr)
  1204. {
  1205. block->stackSymToGuardedProperties->Delete();
  1206. block->stackSymToGuardedProperties = nullptr;
  1207. }
  1208. if (block->stackSymToWriteGuardsMap != nullptr)
  1209. {
  1210. block->stackSymToWriteGuardsMap->Delete();
  1211. block->stackSymToWriteGuardsMap = nullptr;
  1212. }
  1213. if (block->cloneStrCandidates != nullptr)
  1214. {
  1215. Assert(this->tag == Js::BackwardPhase);
  1216. JitAdelete(this->globOpt->alloc, block->cloneStrCandidates);
  1217. block->cloneStrCandidates = nullptr;
  1218. }
  1219. if (block->noImplicitCallUses != nullptr)
  1220. {
  1221. JitAdelete(this->tempAlloc, block->noImplicitCallUses);
  1222. block->noImplicitCallUses = nullptr;
  1223. }
  1224. if (block->noImplicitCallNoMissingValuesUses != nullptr)
  1225. {
  1226. JitAdelete(this->tempAlloc, block->noImplicitCallNoMissingValuesUses);
  1227. block->noImplicitCallNoMissingValuesUses = nullptr;
  1228. }
  1229. if (block->noImplicitCallNativeArrayUses != nullptr)
  1230. {
  1231. JitAdelete(this->tempAlloc, block->noImplicitCallNativeArrayUses);
  1232. block->noImplicitCallNativeArrayUses = nullptr;
  1233. }
  1234. if (block->noImplicitCallJsArrayHeadSegmentSymUses != nullptr)
  1235. {
  1236. JitAdelete(this->tempAlloc, block->noImplicitCallJsArrayHeadSegmentSymUses);
  1237. block->noImplicitCallJsArrayHeadSegmentSymUses = nullptr;
  1238. }
  1239. if (block->noImplicitCallArrayLengthSymUses != nullptr)
  1240. {
  1241. JitAdelete(this->tempAlloc, block->noImplicitCallArrayLengthSymUses);
  1242. block->noImplicitCallArrayLengthSymUses = nullptr;
  1243. }
  1244. if (block->liveFixedFields != nullptr)
  1245. {
  1246. JitArenaAllocator *liveFixedFieldsArena = this->tempAlloc;
  1247. JitAdelete(liveFixedFieldsArena, block->liveFixedFields);
  1248. block->liveFixedFields = nullptr;
  1249. }
  1250. if (block->upwardExposedUses != nullptr)
  1251. {
  1252. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1253. JitAdelete(upwardExposedArena, block->upwardExposedUses);
  1254. block->upwardExposedUses = nullptr;
  1255. }
  1256. if (block->upwardExposedFields != nullptr)
  1257. {
  1258. JitArenaAllocator *upwardExposedArena = (this->tag == Js::BackwardPhase) ? this->globOpt->alloc : this->tempAlloc;
  1259. JitAdelete(upwardExposedArena, block->upwardExposedFields);
  1260. block->upwardExposedFields = nullptr;
  1261. }
  1262. if (block->typesNeedingKnownObjectLayout != nullptr)
  1263. {
  1264. JitAdelete(this->tempAlloc, block->typesNeedingKnownObjectLayout);
  1265. block->typesNeedingKnownObjectLayout = nullptr;
  1266. }
  1267. if (block->byteCodeUpwardExposedUsed != nullptr)
  1268. {
  1269. JitAdelete(this->tempAlloc, block->byteCodeUpwardExposedUsed);
  1270. block->byteCodeUpwardExposedUsed = nullptr;
  1271. #if DBG
  1272. JitAdeleteArray(this->tempAlloc, func->GetJITFunctionBody()->GetLocalsCount(), block->byteCodeRestoreSyms);
  1273. block->byteCodeRestoreSyms = nullptr;
  1274. JitAdelete(this->tempAlloc, block->excludeByteCodeUpwardExposedTracking);
  1275. block->excludeByteCodeUpwardExposedTracking = nullptr;
  1276. #endif
  1277. }
  1278. if (block->couldRemoveNegZeroBailoutForDef != nullptr)
  1279. {
  1280. JitAdelete(this->tempAlloc, block->couldRemoveNegZeroBailoutForDef);
  1281. block->couldRemoveNegZeroBailoutForDef = nullptr;
  1282. }
  1283. }
  1284. void
  1285. BackwardPass::ProcessLoopCollectionPass(BasicBlock *const lastBlock)
  1286. {
  1287. // The collection pass is done before the prepass, to collect and propagate a minimal amount of information into nested
  1288. // loops, for cases where the information is needed to make appropriate decisions on changing other state. For instance,
  1289. // bailouts in nested loops need to be able to see all byte-code uses that are exposed to the bailout so that the
  1290. // appropriate syms can be made upwards-exposed during the prepass. Byte-code uses that occur before the bailout in the
  1291. // flow, or byte-code uses after the current loop, are not seen by bailouts inside the loop. The collection pass collects
  1292. // byte-code uses and propagates them at least into each loop's header such that when bailouts are processed in the prepass,
  1293. // they will have full visibility of byte-code upwards-exposed uses.
  1294. //
  1295. // For the collection pass, one pass is needed to collect all byte-code uses of a loop to the loop header. If the loop has
  1296. // inner loops, another pass is needed to propagate byte-code uses in the outer loop into the inner loop's header, since
  1297. // some byte-code uses may occur before the inner loop in the flow. The process continues recursively for inner loops. The
  1298. // second pass only needs to walk as far as the first inner loop's header, since the purpose of that pass is only to
  1299. // propagate collected information into the inner loops' headers.
  1300. //
  1301. // Consider the following case:
  1302. // (Block 1, Loop 1 header)
  1303. // ByteCodeUses s1
  1304. // (Block 2, Loop 2 header)
  1305. // (Block 3, Loop 3 header)
  1306. // (Block 4)
  1307. // BailOut
  1308. // (Block 5, Loop 3 back-edge)
  1309. // (Block 6, Loop 2 back-edge)
  1310. // (Block 7, Loop 1 back-edge)
  1311. //
  1312. // Assume that the exit branch in each of these loops is in the loop's header block, like a 'while' loop. For the byte-code
  1313. // use of 's1' to become visible to the bailout in the innermost loop, we need to walk the following blocks:
  1314. // - Collection pass
  1315. // - 7, 6, 5, 4, 3, 2, 1, 7 - block 1 is the first block in loop 1 that sees 's1', and since block 7 has block 1 as its
  1316. // successor, block 7 sees 's1' now as well
  1317. // - 6, 5, 4, 3, 2, 6 - block 2 is the first block in loop 2 that sees 's1', and since block 6 has block 2 as its
  1318. // successor, block 6 sees 's1' now as well
  1319. // - 5, 4, 3 - block 3 is the first block in loop 3 that sees 's1'
  1320. // - The collection pass does not have to do another pass through the innermost loop because it does not have any inner
  1321. // loops of its own. It's sufficient to propagate the byte-code uses up to the loop header of each loop, as the
  1322. // prepass will do the remaining propagation.
  1323. // - Prepass
  1324. // - 7, 6, 5, 4, ... - since block 5 has block 3 as its successor, block 5 sees 's1', and so does block 4. So, the bailout
  1325. // finally sees 's1' as a byte-code upwards-exposed use.
  1326. //
  1327. // The collection pass walks as described above, and consists of one pass, followed by another pass if there are inner
  1328. // loops. The second pass only walks up to the first inner loop's header block, and during this pass upon reaching an inner
  1329. // loop, the algorithm goes recursively for that inner loop, and once it returns, the second pass continues from above that
  1330. // inner loop. Each bullet of the walk in the example above is a recursive call to ProcessLoopCollectionPass, except the
  1331. // first line, which is the initial call.
  1332. //
  1333. // Imagine the whole example above is inside another loop, and at the bottom of that loop there is an assignment to 's1'. If
  1334. // the bailout is the only use of 's1', then it needs to register 's1' as a use in the prepass to prevent treating the
  1335. // assignment to 's1' as a dead store.
  1336. Assert(tag == Js::DeadStorePhase);
  1337. Assert(IsCollectionPass());
  1338. Assert(lastBlock);
  1339. Loop *const collectionPassLoop = lastBlock->loop;
  1340. Assert(collectionPassLoop);
  1341. Assert(!collectionPassLoop->hasDeadStoreCollectionPass);
  1342. collectionPassLoop->hasDeadStoreCollectionPass = true;
  1343. Loop *const previousPrepassLoop = currentPrePassLoop;
  1344. currentPrePassLoop = collectionPassLoop;
  1345. Assert(IsPrePass());
  1346. // This is also the location where we do the additional step of tracking what opnds
  1347. // are used inside the loop in memory dereferences, and thus need masking for cache
  1348. // attacks (Spectre). This is a fairly conservative approach, where we just track a
  1349. // set of symbols which are determined by each other inside the loop. This lets the
  1350. // second pass later on determine if a particular operation generating a symbol can
  1351. // avoid the Spectre masking overhead, since a symbol not dereferenced in the loops
  1352. // can be masked on the out-edge of the loop, which should be significantly cheaper
  1353. // than masking it every iteration.
  1354. AssertMsg(collectionPassLoop->symClusterList == nullptr, "clusterList should not have been initialized yet!");
  1355. // This is needed to work around tokenization issues with preprocessor macros which
  1356. // present themselves when using multiple template parameters.
  1357. #ifndef _M_ARM
  1358. typedef SegmentClusterList<SymID, JitArenaAllocator> symClusterListType;
  1359. collectionPassLoop->symClusterList = JitAnew(this->func->m_fg->alloc, symClusterListType, this->func->m_fg->alloc, 256);
  1360. collectionPassLoop->internallyDereferencedSyms = JitAnew(this->func->m_fg->alloc, BVSparse<JitArenaAllocator>, this->func->m_fg->alloc);
  1361. #endif
  1362. // First pass
  1363. BasicBlock *firstInnerLoopHeader = nullptr;
  1364. {
  1365. #if DBG_DUMP
  1366. if(IsTraceEnabled())
  1367. {
  1368. Output::Print(_u("******* COLLECTION PASS 1 START: Loop %u ********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1369. }
  1370. #endif
  1371. // We want to be able to disambiguate this in ProcessBlock
  1372. CollectionPassSubPhase prevCollectionPassSubPhase = this->collectionPassSubPhase;
  1373. this->collectionPassSubPhase = CollectionPassSubPhase::FirstPass;
  1374. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1375. {
  1376. ProcessBlock(block);
  1377. if(block->isLoopHeader)
  1378. {
  1379. if(block->loop == collectionPassLoop)
  1380. {
  1381. break;
  1382. }
  1383. // Keep track of the first inner loop's header for the second pass, which need only walk up to that block
  1384. firstInnerLoopHeader = block;
  1385. }
  1386. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1387. this->collectionPassSubPhase = prevCollectionPassSubPhase;
  1388. #if DBG_DUMP
  1389. if(IsTraceEnabled())
  1390. {
  1391. Output::Print(_u("******** COLLECTION PASS 1 END: Loop %u *********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1392. }
  1393. #endif
  1394. }
  1395. #ifndef _M_ARM
  1396. // Since we generated the base data structures for the spectre handling, we can now
  1397. // cross-reference them to get the full set of what may be dereferenced in the loop
  1398. // and what is safe in speculation.
  1399. #if DBG_DUMP
  1400. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1401. {
  1402. Output::Print(_u("Analysis Results for loop %u:\n"), collectionPassLoop->GetLoopNumber());
  1403. Output::Print(_u("ClusterList pre-consolidation: "));
  1404. collectionPassLoop->symClusterList->Dump();
  1405. }
  1406. #endif // DBG_DUMP
  1407. collectionPassLoop->symClusterList->Consolidate();
  1408. #if DBG_DUMP
  1409. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1410. {
  1411. Output::Print(_u("ClusterList post-consolidation: "));
  1412. collectionPassLoop->symClusterList->Dump();
  1413. Output::Print(_u("Internally dereferenced syms pre-propagation: "));
  1414. collectionPassLoop->internallyDereferencedSyms->Dump();
  1415. }
  1416. #endif // DBG_DUMP
  1417. collectionPassLoop->symClusterList->Map<BVSparse<JitArenaAllocator>*, true>([](SymID index, SymID containingSetRoot, BVSparse<JitArenaAllocator>* bv){
  1418. if (bv->Test(index))
  1419. {
  1420. bv->Set(containingSetRoot);
  1421. }
  1422. }, collectionPassLoop->internallyDereferencedSyms);
  1423. collectionPassLoop->symClusterList->Map<BVSparse<JitArenaAllocator>*, true>([](SymID index, SymID containingSetRoot, BVSparse<JitArenaAllocator>* bv){
  1424. if (bv->Test(containingSetRoot))
  1425. {
  1426. bv->Set(index);
  1427. }
  1428. }, collectionPassLoop->internallyDereferencedSyms);
  1429. #if DBG_DUMP
  1430. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  1431. {
  1432. Output::Print(_u("Internally dereferenced syms post-propagation: "));
  1433. collectionPassLoop->internallyDereferencedSyms->Dump();
  1434. }
  1435. #endif // DBG_DUMP
  1436. #endif // defined(_M_ARM)
  1437. // Second pass, only needs to run if there are any inner loops, to propagate collected information into those loops
  1438. if(firstInnerLoopHeader)
  1439. {
  1440. #if DBG_DUMP
  1441. if(IsTraceEnabled())
  1442. {
  1443. Output::Print(_u("******* COLLECTION PASS 2 START: Loop %u ********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1444. }
  1445. #endif
  1446. // We want to be able to disambiguate this in ProcessBlock
  1447. CollectionPassSubPhase prevCollectionPassSubPhase = this->collectionPassSubPhase;
  1448. this->collectionPassSubPhase = CollectionPassSubPhase::SecondPass;
  1449. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, firstInnerLoopHeader)
  1450. {
  1451. Loop *const loop = block->loop;
  1452. if(loop && loop != collectionPassLoop && !loop->hasDeadStoreCollectionPass)
  1453. {
  1454. // About to make a recursive call, so when jitting in the foreground, probe the stack
  1455. if(!func->IsBackgroundJIT())
  1456. {
  1457. PROBE_STACK_NO_DISPOSE(func->GetScriptContext(), Js::Constants::MinStackDefault);
  1458. }
  1459. ProcessLoopCollectionPass(block);
  1460. // The inner loop's collection pass would have propagated collected information to its header block. Skip to the
  1461. // inner loop's header block and continue from the block before it.
  1462. block = loop->GetHeadBlock();
  1463. Assert(block->isLoopHeader);
  1464. continue;
  1465. }
  1466. ProcessBlock(block);
  1467. } NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1468. this->collectionPassSubPhase = prevCollectionPassSubPhase;
  1469. #if DBG_DUMP
  1470. if(IsTraceEnabled())
  1471. {
  1472. Output::Print(_u("******** COLLECTION PASS 2 END: Loop %u *********\n"), collectionPassLoop->GetLoopTopInstr()->m_id);
  1473. }
  1474. #endif
  1475. }
  1476. currentPrePassLoop = previousPrepassLoop;
  1477. }
  1478. void
  1479. BackwardPass::ProcessLoop(BasicBlock * lastBlock)
  1480. {
  1481. #if DBG_DUMP
  1482. if (this->IsTraceEnabled())
  1483. {
  1484. Output::Print(_u("******* PREPASS START ********\n"));
  1485. }
  1486. #endif
  1487. Loop *loop = lastBlock->loop;
  1488. bool prevIsLoopPrepass = this->isLoopPrepass;
  1489. this->isLoopPrepass = true;
  1490. // This code doesn't work quite as intended. It is meant to capture fields that are live out of a loop to limit the
  1491. // number of implicit call bailouts the forward pass must create (only compiler throughput optimization, no impact
  1492. // on emitted code), but because it looks only at the lexically last block in the loop, it does the right thing only
  1493. // for do-while loops. For other loops (for and while) the last block does not exit the loop. Even for do-while loops
  1494. // this tracking can have the adverse effect of killing fields that should stay live after copy prop. Disabled by default.
  1495. // Left in under a flag, in case we find compiler throughput issues and want to do additional experiments.
  1496. if (PHASE_ON(Js::LiveOutFieldsPhase, this->func))
  1497. {
  1498. if (this->globOpt->DoFieldOpts(loop) || this->globOpt->DoFieldRefOpts(loop))
  1499. {
  1500. // Get the live-out set at the loop bottom.
  1501. // This may not be the only loop exit, but all loop exits either leave the function or pass through here.
  1502. // In the forward pass, we'll use this set to trim the live fields on exit from the loop
  1503. // in order to limit the number of bailout points following the loop.
  1504. BVSparse<JitArenaAllocator> *bv = JitAnew(this->func->m_fg->alloc, BVSparse<JitArenaAllocator>, this->func->m_fg->alloc);
  1505. FOREACH_SUCCESSOR_BLOCK(blockSucc, lastBlock)
  1506. {
  1507. if (blockSucc->loop != loop)
  1508. {
  1509. // Would like to assert this, but in strange exprgen cases involving "break LABEL" in nested
  1510. // loops the loop graph seems to get confused.
  1511. //Assert(!blockSucc->loop || blockSucc->loop->IsDescendentOrSelf(loop));
  1512. Assert(!blockSucc->loop || blockSucc->loop->hasDeadStorePrepass);
  1513. bv->Or(blockSucc->upwardExposedFields);
  1514. }
  1515. }
  1516. NEXT_SUCCESSOR_BLOCK;
  1517. lastBlock->loop->liveOutFields = bv;
  1518. }
  1519. }
  1520. if(tag == Js::DeadStorePhase && !loop->hasDeadStoreCollectionPass)
  1521. {
  1522. Assert(!IsCollectionPass());
  1523. Assert(!IsPrePass());
  1524. isCollectionPass = true;
  1525. ProcessLoopCollectionPass(lastBlock);
  1526. isCollectionPass = false;
  1527. }
  1528. Assert(!this->IsPrePass());
  1529. this->currentPrePassLoop = loop;
  1530. if (tag == Js::BackwardPhase)
  1531. {
  1532. Assert(loop->symsAssignedToInLoop == nullptr);
  1533. loop->symsAssignedToInLoop = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  1534. Assert(loop->preservesNumberValue == nullptr);
  1535. loop->preservesNumberValue = JitAnew(this->globOpt->alloc, BVSparse<JitArenaAllocator>, this->globOpt->alloc);
  1536. }
  1537. FOREACH_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE(block, lastBlock, nullptr)
  1538. {
  1539. this->ProcessBlock(block);
  1540. if (block->isLoopHeader && block->loop == lastBlock->loop)
  1541. {
  1542. break;
  1543. }
  1544. }
  1545. NEXT_BLOCK_BACKWARD_IN_RANGE_DEAD_OR_ALIVE;
  1546. this->currentPrePassLoop = nullptr;
  1547. Assert(lastBlock);
  1548. __analysis_assume(lastBlock);
  1549. lastBlock->loop->hasDeadStorePrepass = true;
  1550. this->isLoopPrepass = prevIsLoopPrepass;
  1551. #if DBG_DUMP
  1552. if (this->IsTraceEnabled())
  1553. {
  1554. Output::Print(_u("******** PREPASS END *********\n"));
  1555. }
  1556. #endif
  1557. }
  1558. void
  1559. BackwardPass::OptBlock(BasicBlock * block)
  1560. {
  1561. this->func->ThrowIfScriptClosed();
  1562. if (block->loop && !block->loop->hasDeadStorePrepass)
  1563. {
  1564. ProcessLoop(block);
  1565. }
  1566. this->ProcessBlock(block);
  1567. if(DoTrackNegativeZero())
  1568. {
  1569. negativeZeroDoesNotMatterBySymId->ClearAll();
  1570. }
  1571. if (DoTrackBitOpsOrNumber())
  1572. {
  1573. symUsedOnlyForBitOpsBySymId->ClearAll();
  1574. symUsedOnlyForNumberBySymId->ClearAll();
  1575. }
  1576. if(DoTrackIntOverflow())
  1577. {
  1578. intOverflowDoesNotMatterBySymId->ClearAll();
  1579. if(DoTrackCompoundedIntOverflow())
  1580. {
  1581. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  1582. }
  1583. }
  1584. }
  1585. void
  1586. BackwardPass::ProcessBailOutArgObj(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed)
  1587. {
  1588. Assert(this->tag != Js::BackwardPhase);
  1589. if (this->globOpt->TrackArgumentsObject() && bailOutInfo->capturedValues->argObjSyms)
  1590. {
  1591. FOREACH_BITSET_IN_SPARSEBV(symId, bailOutInfo->capturedValues->argObjSyms)
  1592. {
  1593. if (byteCodeUpwardExposedUsed->TestAndClear(symId))
  1594. {
  1595. if (bailOutInfo->usedCapturedValues->argObjSyms == nullptr)
  1596. {
  1597. bailOutInfo->usedCapturedValues->argObjSyms = JitAnew(this->func->m_alloc,
  1598. BVSparse<JitArenaAllocator>, this->func->m_alloc);
  1599. }
  1600. bailOutInfo->usedCapturedValues->argObjSyms->Set(symId);
  1601. }
  1602. }
  1603. NEXT_BITSET_IN_SPARSEBV;
  1604. }
  1605. if (bailOutInfo->usedCapturedValues->argObjSyms)
  1606. {
  1607. byteCodeUpwardExposedUsed->Minus(bailOutInfo->usedCapturedValues->argObjSyms);
  1608. }
  1609. }
  1610. void
  1611. BackwardPass::ProcessBailOutConstants(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1612. {
  1613. Assert(this->tag != Js::BackwardPhase);
  1614. // Remove constants that we are already going to restore
  1615. SListBase<ConstantStackSymValue> * usedConstantValues = &bailOutInfo->usedCapturedValues->constantValues;
  1616. FOREACH_SLISTBASE_ENTRY(ConstantStackSymValue, value, usedConstantValues)
  1617. {
  1618. byteCodeUpwardExposedUsed->Clear(value.Key()->m_id);
  1619. bailoutReferencedArgSymsBv->Clear(value.Key()->m_id);
  1620. }
  1621. NEXT_SLISTBASE_ENTRY;
  1622. // Find other constants that we need to restore
  1623. FOREACH_SLISTBASE_ENTRY_EDITING(ConstantStackSymValue, value, &bailOutInfo->capturedValues->constantValues, iter)
  1624. {
  1625. if (byteCodeUpwardExposedUsed->TestAndClear(value.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(value.Key()->m_id))
  1626. {
  1627. // Constant need to be restore, move it to the restore list
  1628. iter.MoveCurrentTo(usedConstantValues);
  1629. }
  1630. else if (!this->IsPrePass())
  1631. {
  1632. // Constants don't need to be restored, delete
  1633. iter.RemoveCurrent(this->func->m_alloc);
  1634. }
  1635. }
  1636. NEXT_SLISTBASE_ENTRY_EDITING;
  1637. }
  1638. void
  1639. BackwardPass::ProcessBailOutCopyProps(BailOutInfo * bailOutInfo, BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed, BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv)
  1640. {
  1641. Assert(this->tag != Js::BackwardPhase);
  1642. Assert(!this->func->GetJITFunctionBody()->IsAsmJsMode());
  1643. // Remove copy prop that we were already going to restore
  1644. SListBase<CopyPropSyms> * usedCopyPropSyms = &bailOutInfo->usedCapturedValues->copyPropSyms;
  1645. FOREACH_SLISTBASE_ENTRY(CopyPropSyms, copyPropSyms, usedCopyPropSyms)
  1646. {
  1647. byteCodeUpwardExposedUsed->Clear(copyPropSyms.Key()->m_id);
  1648. this->currentBlock->upwardExposedUses->Set(copyPropSyms.Value()->m_id);
  1649. }
  1650. NEXT_SLISTBASE_ENTRY;
  1651. JitArenaAllocator * allocator = this->func->m_alloc;
  1652. BasicBlock * block = this->currentBlock;
  1653. BVSparse<JitArenaAllocator> * upwardExposedUses = block->upwardExposedUses;
  1654. // Find other copy prop that we need to restore
  1655. FOREACH_SLISTBASE_ENTRY_EDITING(CopyPropSyms, copyPropSyms, &bailOutInfo->capturedValues->copyPropSyms, iter)
  1656. {
  1657. // Copy prop syms should be vars
  1658. Assert(!copyPropSyms.Key()->IsTypeSpec());
  1659. Assert(!copyPropSyms.Value()->IsTypeSpec());
  1660. if (byteCodeUpwardExposedUsed->TestAndClear(copyPropSyms.Key()->m_id) || bailoutReferencedArgSymsBv->TestAndClear(copyPropSyms.Key()->m_id))
  1661. {
  1662. // This copy-prop sym needs to be restored; add it to the restore list.
  1663. /*
  1664. - copyPropSyms.Key() - original sym that is byte-code upwards-exposed, its corresponding byte-code register needs
  1665. to be restored
  1666. - copyPropSyms.Value() - copy-prop sym whose value the original sym has at the point of this instruction
  1667. Heuristic:
  1668. - By default, use the copy-prop sym to restore its corresponding byte code register
  1669. - This is typically better because that allows the value of the original sym, if it's not used after the copy-prop
  1670. sym is changed, to be discarded and we only have one lifetime (the copy-prop sym's lifetime) in to deal with for
  1671. register allocation
  1672. - Additionally, if the transferring store, which caused the original sym to have the same value as the copy-prop
  1673. sym, becomes a dead store, the original sym won't actually attain the value of the copy-prop sym. In that case,
  1674. the copy-prop sym must be used to restore the byte code register corresponding to original sym.
  1675. Special case for functional correctness:
  1676. - Consider that we always use the copy-prop sym to restore, and consider the following case:
  1677. b = a
  1678. a = c * d <Pre-op bail-out>
  1679. = b
  1680. - This is rewritten by the lowerer as follows:
  1681. b = a
  1682. a = c
  1683. a = a * d <Pre-op bail-out> (to make dst and src1 the same)
  1684. = b
  1685. - The problem here is that at the point of the bail-out instruction, 'a' would be used to restore the value of 'b',
  1686. but the value of 'a' has changed before the bail-out (at 'a = c').
  1687. - In this case, we need to use 'b' (the original sym) to restore the value of 'b'. Because 'b' is upwards-exposed,
  1688. 'b = a' cannot be a dead store, therefore making it valid to use 'b' to restore.
  1689. - Use the original sym to restore when all of the following are true:
  1690. - The bailout is a pre-op bailout, and the bailout check is done after overwriting the destination
  1691. - It's an int-specialized unary or binary operation that produces a value
  1692. - The copy-prop sym is the destination of this instruction
  1693. - None of the sources are the copy-prop sym. Otherwise, the value of the copy-prop sym will be saved as
  1694. necessary by the bailout code.
  1695. */
  1696. StackSym * stackSym = copyPropSyms.Key(); // assume that we'll use the original sym to restore
  1697. SymID symId = stackSym->m_id;
  1698. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially
  1699. // expensive ToVars that can more easily be eliminated due to being dead stores
  1700. StackSym * int32StackSym = nullptr;
  1701. StackSym * float64StackSym = nullptr;
  1702. StackSym * simd128StackSym = nullptr;
  1703. // If the sym is type specialized, we need to check for upward exposed uses of the specialized sym and not the equivalent var sym. If there are no
  1704. // uses and we use the copy prop sym to restore, we'll need to find the type specialize sym for that sym as well.
  1705. StackSym * typeSpecSym = nullptr;
  1706. auto findTypeSpecSym = [&]()
  1707. {
  1708. if (bailOutInfo->liveLosslessInt32Syms->Test(symId))
  1709. {
  1710. // Var version of the sym is not live, use the int32 version
  1711. int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  1712. typeSpecSym = int32StackSym;
  1713. Assert(int32StackSym);
  1714. }
  1715. else if(bailOutInfo->liveFloat64Syms->Test(symId))
  1716. {
  1717. // Var/int32 version of the sym is not live, use the float64 version
  1718. float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  1719. typeSpecSym = float64StackSym;
  1720. Assert(float64StackSym);
  1721. }
  1722. else
  1723. {
  1724. Assert(bailOutInfo->liveVarSyms->Test(symId));
  1725. typeSpecSym = stackSym;
  1726. }
  1727. };
  1728. findTypeSpecSym();
  1729. Assert(typeSpecSym != nullptr);
  1730. IR::Instr *const instr = bailOutInfo->bailOutInstr;
  1731. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  1732. if(instr->GetBailOutKind() & IR::BailOutOnResultConditions &&
  1733. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  1734. bailOutInfo->bailOutOffset <= instr->GetByteCodeOffset() &&
  1735. dstSym &&
  1736. dstSym->IsInt32() &&
  1737. dstSym->IsTypeSpec() &&
  1738. dstSym->GetVarEquivSym(nullptr) == copyPropSyms.Value() &&
  1739. instr->GetSrc1() &&
  1740. !instr->GetDst()->IsEqual(instr->GetSrc1()) &&
  1741. !(instr->GetSrc2() && instr->GetDst()->IsEqual(instr->GetSrc2())))
  1742. {
  1743. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  1744. // Need to use the original sym to restore. The original sym is byte-code upwards-exposed, which is why it needs
  1745. // to be restored. Because the original sym needs to be restored and the copy-prop sym is changing here, the
  1746. // original sym must be live in some fashion at the point of this instruction, that will be verified below. The
  1747. // original sym will also be made upwards-exposed from here, so the aforementioned transferring store of the
  1748. // copy-prop sym to the original sym will not be a dead store.
  1749. }
  1750. else if (block->upwardExposedUses->Test(typeSpecSym->m_id) && !block->upwardExposedUses->Test(copyPropSyms.Value()->m_id))
  1751. {
  1752. // Don't use the copy prop sym if it is not used and the orig sym still has uses.
  1753. // No point in extending the lifetime of the copy prop sym unnecessarily.
  1754. }
  1755. else
  1756. {
  1757. // Need to use the copy-prop sym to restore
  1758. stackSym = copyPropSyms.Value();
  1759. symId = stackSym->m_id;
  1760. int32StackSym = nullptr;
  1761. float64StackSym = nullptr;
  1762. simd128StackSym = nullptr;
  1763. findTypeSpecSym();
  1764. }
  1765. // We did not end up using the copy prop sym. Let's make sure the use of the original sym by the bailout is captured.
  1766. if (stackSym != copyPropSyms.Value() && stackSym->HasArgSlotNum())
  1767. {
  1768. bailoutReferencedArgSymsBv->Set(stackSym->m_id);
  1769. }
  1770. if (int32StackSym != nullptr)
  1771. {
  1772. Assert(float64StackSym == nullptr);
  1773. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), int32StackSym);
  1774. iter.RemoveCurrent(allocator);
  1775. upwardExposedUses->Set(int32StackSym->m_id);
  1776. }
  1777. else if (float64StackSym != nullptr)
  1778. {
  1779. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  1780. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  1781. // specialized using BailOutNumberOnly.
  1782. float64StackSym->m_requiresBailOnNotNumber = true;
  1783. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), float64StackSym);
  1784. iter.RemoveCurrent(allocator);
  1785. upwardExposedUses->Set(float64StackSym->m_id);
  1786. }
  1787. // SIMD_JS
  1788. else if (simd128StackSym != nullptr)
  1789. {
  1790. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), simd128StackSym);
  1791. iter.RemoveCurrent(allocator);
  1792. upwardExposedUses->Set(simd128StackSym->m_id);
  1793. }
  1794. else
  1795. {
  1796. usedCopyPropSyms->PrependNode(allocator, copyPropSyms.Key(), stackSym);
  1797. iter.RemoveCurrent(allocator);
  1798. upwardExposedUses->Set(symId);
  1799. }
  1800. }
  1801. else if (!this->IsPrePass())
  1802. {
  1803. // Copy prop sym doesn't need to be restored, delete.
  1804. iter.RemoveCurrent(allocator);
  1805. }
  1806. }
  1807. NEXT_SLISTBASE_ENTRY_EDITING;
  1808. }
  1809. StackSym*
  1810. BackwardPass::ProcessByteCodeUsesDst(IR::ByteCodeUsesInstr * byteCodeUsesInstr)
  1811. {
  1812. Assert(this->DoByteCodeUpwardExposedUsed());
  1813. IR::Opnd * dst = byteCodeUsesInstr->GetDst();
  1814. if (dst)
  1815. {
  1816. IR::RegOpnd * dstRegOpnd = dst->AsRegOpnd();
  1817. StackSym * dstStackSym = dstRegOpnd->m_sym->AsStackSym();
  1818. Assert(!dstRegOpnd->GetIsJITOptimizedReg());
  1819. Assert(dstStackSym->GetByteCodeRegSlot() != Js::Constants::NoRegister);
  1820. if (dstStackSym->GetType() != TyVar)
  1821. {
  1822. dstStackSym = dstStackSym->GetVarEquivSym(nullptr);
  1823. }
  1824. // If the current region is a Try, symbols in its write-through set shouldn't be cleared.
  1825. // Otherwise, symbols in the write-through set of the first try ancestor shouldn't be cleared.
  1826. if (!this->currentRegion ||
  1827. !this->CheckWriteThroughSymInRegion(this->currentRegion, dstStackSym))
  1828. {
  1829. this->currentBlock->byteCodeUpwardExposedUsed->Clear(dstStackSym->m_id);
  1830. return dstStackSym;
  1831. }
  1832. }
  1833. return nullptr;
  1834. }
  1835. const BVSparse<JitArenaAllocator>*
  1836. BackwardPass::ProcessByteCodeUsesSrcs(IR::ByteCodeUsesInstr * byteCodeUsesInstr)
  1837. {
  1838. Assert(this->DoByteCodeUpwardExposedUsed() || tag == Js::BackwardPhase);
  1839. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = byteCodeUsesInstr->GetByteCodeUpwardExposedUsed();
  1840. if (byteCodeUpwardExposedUsed && this->DoByteCodeUpwardExposedUsed())
  1841. {
  1842. this->currentBlock->byteCodeUpwardExposedUsed->Or(byteCodeUpwardExposedUsed);
  1843. }
  1844. return byteCodeUpwardExposedUsed;
  1845. }
  1846. bool
  1847. BackwardPass::ProcessByteCodeUsesInstr(IR::Instr * instr)
  1848. {
  1849. if (!instr->IsByteCodeUsesInstr())
  1850. {
  1851. return false;
  1852. }
  1853. IR::ByteCodeUsesInstr * byteCodeUsesInstr = instr->AsByteCodeUsesInstr();
  1854. if (this->tag == Js::BackwardPhase)
  1855. {
  1856. // FGPeeps inserts bytecodeuses instrs with srcs. We need to look at them to set the proper
  1857. // UpwardExposedUsed info and keep the defs alive.
  1858. // The inliner inserts bytecodeuses instrs withs dsts, but we don't want to look at them for upwardExposedUsed
  1859. // as it would cause real defs to look dead. We use these for bytecodeUpwardExposedUsed info only, which is needed
  1860. // in the dead-store pass only.
  1861. //
  1862. // Handle the source side.
  1863. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = ProcessByteCodeUsesSrcs(byteCodeUsesInstr);
  1864. if (byteCodeUpwardExposedUsed != nullptr)
  1865. {
  1866. this->currentBlock->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  1867. }
  1868. }
  1869. #if DBG
  1870. else if (tag == Js::CaptureByteCodeRegUsePhase)
  1871. {
  1872. ProcessByteCodeUsesDst(byteCodeUsesInstr);
  1873. ProcessByteCodeUsesSrcs(byteCodeUsesInstr);
  1874. }
  1875. #endif
  1876. else
  1877. {
  1878. Assert(tag == Js::DeadStorePhase);
  1879. Assert(instr->m_opcode == Js::OpCode::ByteCodeUses);
  1880. #if DBG
  1881. if (this->DoMarkTempObjectVerify() && (this->currentBlock->isDead || !this->func->hasBailout))
  1882. {
  1883. if (IsCollectionPass())
  1884. {
  1885. if (!this->func->hasBailout)
  1886. {
  1887. // Prevent byte code uses from being remove on collection pass for mark temp object verify
  1888. // if we don't have any bailout
  1889. return true;
  1890. }
  1891. }
  1892. else
  1893. {
  1894. this->currentBlock->tempObjectVerifyTracker->NotifyDeadByteCodeUses(instr);
  1895. }
  1896. }
  1897. #endif
  1898. if (this->func->hasBailout)
  1899. {
  1900. // Just collect the byte code uses, and remove the instruction
  1901. // We are going backward, process the dst first and then the src
  1902. StackSym *dstStackSym = ProcessByteCodeUsesDst(byteCodeUsesInstr);
  1903. #if DBG
  1904. // We can only track first level function stack syms right now
  1905. if (dstStackSym && dstStackSym->GetByteCodeFunc() == this->func)
  1906. {
  1907. this->currentBlock->byteCodeRestoreSyms[dstStackSym->GetByteCodeRegSlot()] = nullptr;
  1908. }
  1909. #endif
  1910. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = ProcessByteCodeUsesSrcs(byteCodeUsesInstr);
  1911. #if DBG
  1912. if (byteCodeUpwardExposedUsed)
  1913. {
  1914. FOREACH_BITSET_IN_SPARSEBV(symId, byteCodeUpwardExposedUsed)
  1915. {
  1916. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  1917. Assert(!stackSym->IsTypeSpec());
  1918. // We can only track first level function stack syms right now
  1919. if (stackSym->GetByteCodeFunc() == this->func)
  1920. {
  1921. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  1922. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  1923. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  1924. {
  1925. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  1926. "Can't have two active lifetime for the same byte code register");
  1927. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  1928. }
  1929. }
  1930. }
  1931. NEXT_BITSET_IN_SPARSEBV;
  1932. }
  1933. #endif
  1934. if (IsCollectionPass())
  1935. {
  1936. return true;
  1937. }
  1938. PropertySym *propertySymUse = byteCodeUsesInstr->propertySymUse;
  1939. if (propertySymUse && !this->currentBlock->isDead)
  1940. {
  1941. this->currentBlock->upwardExposedFields->Set(propertySymUse->m_id);
  1942. }
  1943. if (this->IsPrePass())
  1944. {
  1945. // Don't remove the instruction yet if we are in the prepass
  1946. // But tell the caller we don't need to process the instruction any more
  1947. return true;
  1948. }
  1949. }
  1950. this->currentBlock->RemoveInstr(instr);
  1951. }
  1952. return true;
  1953. }
  1954. bool
  1955. BackwardPass::ProcessBailOutInfo(IR::Instr * instr)
  1956. {
  1957. Assert(!instr->IsByteCodeUsesInstr());
  1958. if (this->tag == Js::BackwardPhase)
  1959. {
  1960. // We don't need to fill in the bailout instruction in backward pass
  1961. Assert(this->func->hasBailout || !instr->HasBailOutInfo());
  1962. Assert(!instr->HasBailOutInfo() || instr->GetBailOutInfo()->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTry()));
  1963. return false;
  1964. }
  1965. if(IsCollectionPass())
  1966. {
  1967. return false;
  1968. }
  1969. Assert(tag == Js::DeadStorePhase);
  1970. if (instr->HasBailOutInfo())
  1971. {
  1972. Assert(this->func->hasBailout);
  1973. Assert(this->DoByteCodeUpwardExposedUsed());
  1974. BailOutInfo * bailOutInfo = instr->GetBailOutInfo();
  1975. // Only process the bailout info if this is the main bailout point (instead of shared)
  1976. if (bailOutInfo->bailOutInstr == instr)
  1977. {
  1978. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset ||
  1979. bailOutInfo->bailOutOffset > instr->GetByteCodeOffset())
  1980. {
  1981. // Currently, we only have post-op bailout with BailOutOnImplicitCalls,
  1982. // LazyBailOut, or JIT inserted operation (which no byte code offsets).
  1983. // If there are other bailouts that we want to bailout after the operation,
  1984. // we have to make sure that it still doesn't do the implicit call
  1985. // if it is done on the stack object.
  1986. // Otherwise, the stack object will be passed to the implicit call functions.
  1987. Assert(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset
  1988. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCalls
  1989. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::LazyBailOut
  1990. || (instr->GetBailOutKind() & ~IR::BailOutKindBits) == IR::BailOutInvalid);
  1991. // This instruction bails out to a later byte-code instruction, so process the bailout info now
  1992. this->ProcessBailOutInfo(instr, bailOutInfo);
  1993. if (instr->HasLazyBailOut())
  1994. {
  1995. this->ClearDstUseForPostOpLazyBailOut(instr);
  1996. }
  1997. }
  1998. else
  1999. {
  2000. // This instruction bails out to the equivalent byte code instruction. This instruction and ByteCodeUses
  2001. // instructions relevant to this instruction need to be processed before the bailout info for this instruction
  2002. // can be processed, so that it can be determined what byte code registers are used by the equivalent byte code
  2003. // instruction and need to be restored. Save the instruction for bailout info processing later.
  2004. Assert(bailOutInfo->bailOutOffset == instr->GetByteCodeOffset());
  2005. Assert(!preOpBailOutInstrToProcess);
  2006. preOpBailOutInstrToProcess = instr;
  2007. }
  2008. }
  2009. }
  2010. return false;
  2011. }
  2012. bool
  2013. BackwardPass::IsLazyBailOutCurrentlyNeeeded(IR::Instr * instr) const
  2014. {
  2015. if (!this->func->ShouldDoLazyBailOut())
  2016. {
  2017. return false;
  2018. }
  2019. Assert(this->tag == Js::DeadStorePhase);
  2020. // We insert potential lazy bailout points in the forward pass, so if the instruction doesn't
  2021. // have bailout info at this point, we know for sure lazy bailout is not needed.
  2022. if (!instr->HasLazyBailOut() || this->currentBlock->isDead)
  2023. {
  2024. return false;
  2025. }
  2026. AssertMsg(
  2027. this->currentBlock->liveFixedFields != nullptr,
  2028. "liveFixedField is null, MergeSuccBlocksInfo might have not initialized it?"
  2029. );
  2030. if (instr->IsStFldVariant())
  2031. {
  2032. Assert(instr->GetDst());
  2033. Js::PropertyId id = instr->GetDst()->GetSym()->AsPropertySym()->m_propertyId;
  2034. // We only need to protect against SetFld if it is setting to one of the live fixed fields
  2035. return this->currentBlock->liveFixedFields->Test(id);
  2036. }
  2037. return !this->currentBlock->liveFixedFields->IsEmpty();
  2038. }
  2039. bool
  2040. BackwardPass::IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool needLazyBailOut, bool hasLiveFields)
  2041. {
  2042. return this->globOpt->IsImplicitCallBailOutCurrentlyNeeded(
  2043. instr, nullptr /* src1Val */, nullptr /* src2Val */,
  2044. this->currentBlock, hasLiveFields, mayNeedImplicitCallBailOut, false /* isForwardPass */, needLazyBailOut
  2045. ) ||
  2046. this->NeedBailOutOnImplicitCallsForTypedArrayStore(instr);
  2047. }
  2048. void
  2049. BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr)
  2050. {
  2051. // Good news: There are cases where the forward pass installs BailOutFailedTypeCheck, but the dead store pass
  2052. // discovers that the checked type is dead.
  2053. // Bad news: We may still need implicit call bailout, and it's up to the dead store pass to figure this out.
  2054. // Worse news: BailOutFailedTypeCheck is pre-op, and BailOutOnImplicitCall is post-op. We'll use a special
  2055. // bailout kind to indicate implicit call bailout that targets its own instruction. The lowerer will emit
  2056. // code to disable/re-enable implicit calls around the operation.
  2057. Assert(this->tag == Js::DeadStorePhase);
  2058. if (this->IsPrePass() || !instr->HasBailOutInfo())
  2059. {
  2060. return;
  2061. }
  2062. // By default, do not do this for stores, as it makes the presence of type checks unpredictable in the forward pass.
  2063. // For instance, we can't predict which stores may cause reallocation of aux slots.
  2064. if (!PHASE_ON(Js::DeadStoreTypeChecksOnStoresPhase, this->func) && instr->GetDst() && instr->GetDst()->IsSymOpnd())
  2065. {
  2066. return;
  2067. }
  2068. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  2069. if (!IR::IsTypeCheckBailOutKind(oldBailOutKind))
  2070. {
  2071. return;
  2072. }
  2073. // Either src1 or dst must be a property sym operand
  2074. Assert((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd()) ||
  2075. (instr->GetDst() && instr->GetDst()->IsSymOpnd() && instr->GetDst()->AsSymOpnd()->IsPropertySymOpnd()));
  2076. IR::PropertySymOpnd *propertySymOpnd =
  2077. (instr->GetDst() && instr->GetDst()->IsSymOpnd()) ? instr->GetDst()->AsPropertySymOpnd() : instr->GetSrc1()->AsPropertySymOpnd();
  2078. if (propertySymOpnd->TypeCheckRequired())
  2079. {
  2080. return;
  2081. }
  2082. bool isTypeCheckProtected = false;
  2083. IR::BailOutKind bailOutKind;
  2084. if (GlobOpt::NeedsTypeCheckBailOut(instr, propertySymOpnd, propertySymOpnd == instr->GetDst(), &isTypeCheckProtected, &bailOutKind))
  2085. {
  2086. // If we installed a failed type check bailout in the forward pass, but we are now discovering that the checked
  2087. // type is dead, we may still need a bailout on failed fixed field type check. These type checks are required
  2088. // regardless of whether the checked type is dead. Hence, the bailout kind may change here.
  2089. Assert((oldBailOutKind & ~IR::BailOutKindBits) == bailOutKind ||
  2090. bailOutKind == IR::BailOutFailedFixedFieldTypeCheck || bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck);
  2091. instr->SetBailOutKind(bailOutKind);
  2092. return;
  2093. }
  2094. else if (isTypeCheckProtected)
  2095. {
  2096. instr->ClearBailOutInfo();
  2097. if (preOpBailOutInstrToProcess == instr)
  2098. {
  2099. preOpBailOutInstrToProcess = nullptr;
  2100. }
  2101. return;
  2102. }
  2103. Assert(!propertySymOpnd->IsTypeCheckProtected());
  2104. // If all we're doing here is checking the type (e.g. because we've hoisted a field load or store out of the loop, but needed
  2105. // the type check to remain in the loop), and now it turns out we don't need the type checked, we can simply turn this into
  2106. // a NOP and remove the bailout.
  2107. if (instr->m_opcode == Js::OpCode::CheckObjType)
  2108. {
  2109. Assert(instr->GetDst() == nullptr && instr->GetSrc1() != nullptr && instr->GetSrc2() == nullptr);
  2110. instr->m_opcode = Js::OpCode::Nop;
  2111. instr->FreeSrc1();
  2112. instr->ClearBailOutInfo();
  2113. if (this->preOpBailOutInstrToProcess == instr)
  2114. {
  2115. this->preOpBailOutInstrToProcess = nullptr;
  2116. }
  2117. return;
  2118. }
  2119. // We don't need BailOutFailedTypeCheck but may need BailOutOnImplicitCall.
  2120. // Consider: are we in the loop landing pad? If so, no bailout, since implicit calls will be checked at
  2121. // the end of the block.
  2122. if (this->currentBlock->IsLandingPad())
  2123. {
  2124. // We're in the landing pad.
  2125. if (preOpBailOutInstrToProcess == instr)
  2126. {
  2127. preOpBailOutInstrToProcess = nullptr;
  2128. }
  2129. instr->UnlinkBailOutInfo();
  2130. return;
  2131. }
  2132. // If bailOutKind is equivTypeCheck then leave alone the bailout
  2133. if (bailOutKind == IR::BailOutFailedEquivalentTypeCheck ||
  2134. bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck)
  2135. {
  2136. return;
  2137. }
  2138. // We're not checking for polymorphism, so don't let the bailout indicate that we
  2139. // detected polymorphism.
  2140. instr->GetBailOutInfo()->polymorphicCacheIndex = (uint)-1;
  2141. // Keep the mark temp object bit if it is there so that we will not remove the implicit call check
  2142. IR::BailOutKind newBailOutKind = IR::BailOutOnImplicitCallsPreOp | (oldBailOutKind & IR::BailOutMarkTempObject);
  2143. if (BailOutInfo::HasLazyBailOut(oldBailOutKind))
  2144. {
  2145. instr->SetBailOutKind(BailOutInfo::WithLazyBailOut(newBailOutKind));
  2146. }
  2147. else
  2148. {
  2149. instr->SetBailOutKind(newBailOutKind);
  2150. }
  2151. }
  2152. void
  2153. BackwardPass::DeadStoreLazyBailOut(IR::Instr * instr, bool needsLazyBailOut)
  2154. {
  2155. if (!this->IsPrePass() && !needsLazyBailOut && instr->HasLazyBailOut())
  2156. {
  2157. instr->ClearLazyBailOut();
  2158. if (!instr->HasBailOutInfo())
  2159. {
  2160. if (this->preOpBailOutInstrToProcess == instr)
  2161. {
  2162. this->preOpBailOutInstrToProcess = nullptr;
  2163. }
  2164. }
  2165. }
  2166. }
  2167. void
  2168. BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields, bool needsLazyBailOut)
  2169. {
  2170. Assert(this->tag == Js::DeadStorePhase);
  2171. if (this->IsPrePass() || !instr->HasBailOutInfo())
  2172. {
  2173. // Don't do this in the pre-pass, because, for instance, we don't have live-on-back-edge fields yet.
  2174. return;
  2175. }
  2176. if (OpCodeAttr::BailOutRec(instr->m_opcode))
  2177. {
  2178. // This is something like OpCode::BailOutOnNotEqual. Assume it needs what it's got.
  2179. return;
  2180. }
  2181. UpdateArrayBailOutKind(instr);
  2182. // Install the implicit call PreOp for mark temp object if we need one.
  2183. if ((instr->GetBailOutKind() & IR::BailOutMarkTempObject) != 0 && instr->GetBailOutKindNoBits() != IR::BailOutOnImplicitCallsPreOp)
  2184. {
  2185. IR::BailOutKind kind = instr->GetBailOutKind();
  2186. const IR::BailOutKind kindNoBits = instr->GetBailOutKindNoBits();
  2187. Assert(kindNoBits != IR::BailOutOnImplicitCalls);
  2188. if (kindNoBits == IR::BailOutInvalid)
  2189. {
  2190. // We should only have combined with array bits or lazy bailout
  2191. Assert(BailOutInfo::WithoutLazyBailOut(kind & ~IR::BailOutForArrayBits) == IR::BailOutMarkTempObject);
  2192. // Don't need to install if we are not going to do helper calls,
  2193. // or we are in the landingPad since implicit calls are already turned off.
  2194. if ((kind & IR::BailOutOnArrayAccessHelperCall) == 0 && !this->currentBlock->IsLandingPad())
  2195. {
  2196. kind += IR::BailOutOnImplicitCallsPreOp;
  2197. instr->SetBailOutKind(kind);
  2198. }
  2199. }
  2200. }
  2201. // Currently only try to eliminate these bailout kinds. The others are required in cases
  2202. // where we don't necessarily have live/hoisted fields.
  2203. const bool mayNeedBailOnImplicitCall = BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind());
  2204. if (!mayNeedBailOnImplicitCall)
  2205. {
  2206. const IR::BailOutKind kind = instr->GetBailOutKind();
  2207. if (kind & IR::BailOutMarkTempObject)
  2208. {
  2209. if (kind == IR::BailOutMarkTempObject)
  2210. {
  2211. // Landing pad does not need per-instr implicit call bailouts.
  2212. Assert(this->currentBlock->IsLandingPad());
  2213. instr->ClearBailOutInfo();
  2214. if (this->preOpBailOutInstrToProcess == instr)
  2215. {
  2216. this->preOpBailOutInstrToProcess = nullptr;
  2217. }
  2218. }
  2219. else
  2220. {
  2221. // Mark temp object bit is not needed after dead store pass
  2222. instr->SetBailOutKind(kind & ~IR::BailOutMarkTempObject);
  2223. }
  2224. }
  2225. return;
  2226. }
  2227. // We have an implicit call bailout in the code, and we want to make sure that it's required.
  2228. // Do this now, because only in the dead store pass do we have complete forward and backward liveness info.
  2229. bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, needsLazyBailOut, hasLiveFields);
  2230. if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall, needsLazyBailOut))
  2231. {
  2232. instr->ClearBailOutInfo();
  2233. if (preOpBailOutInstrToProcess == instr)
  2234. {
  2235. preOpBailOutInstrToProcess = nullptr;
  2236. }
  2237. #if DBG
  2238. if (this->DoMarkTempObjectVerify())
  2239. {
  2240. this->currentBlock->tempObjectVerifyTracker->NotifyBailOutRemoval(instr, this);
  2241. }
  2242. #endif
  2243. }
  2244. }
  2245. bool
  2246. BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr* const instr, bool needsBailOutOnImplicitCall, bool needsLazyBailOut)
  2247. {
  2248. Assert(instr);
  2249. Assert(instr->HasBailOutInfo());
  2250. Assert(BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind()));
  2251. AssertMsg(
  2252. needsLazyBailOut || instr->GetBailOutKind() == BailOutInfo::WithoutLazyBailOut(instr->GetBailOutKind()),
  2253. "We should have removed all lazy bailout bit at this point if we decided that we wouldn't need it"
  2254. );
  2255. AssertMsg(
  2256. !needsLazyBailOut || instr->GetBailOutKind() == BailOutInfo::WithLazyBailOut(instr->GetBailOutKind()),
  2257. "The lazy bailout bit should be present at this point. We might have removed it incorrectly."
  2258. );
  2259. IR::BailOutKind bailOutKindWithBits = instr->GetBailOutKind();
  2260. const bool hasMarkTempObject = bailOutKindWithBits & IR::BailOutMarkTempObject;
  2261. // Firstly, we remove the mark temp object bit, as it is not needed after the dead store pass.
  2262. // We will later skip removing BailOutOnImplicitCalls when there is a mark temp object bit regardless
  2263. // of `needsBailOutOnImplicitCall`.
  2264. if (hasMarkTempObject)
  2265. {
  2266. bailOutKindWithBits &= ~IR::BailOutMarkTempObject;
  2267. instr->SetBailOutKind(bailOutKindWithBits);
  2268. }
  2269. if (needsBailOutOnImplicitCall)
  2270. {
  2271. // We decided that BailOutOnImplicitCall is needed. So lazy bailout is unnecessary
  2272. // because we are already protected from potential side effects unless the operation
  2273. // itself can change fields' values (StFld/StElem).
  2274. if (needsLazyBailOut && !instr->CanChangeFieldValueWithoutImplicitCall())
  2275. {
  2276. instr->ClearLazyBailOut();
  2277. }
  2278. return true;
  2279. }
  2280. else
  2281. {
  2282. // `needsBailOutOnImplicitCall` also captures our intention to keep BailOutOnImplicitCalls
  2283. // because we want to do fixed field lazy bailout optimization. So if we don't need them,
  2284. // just remove our lazy bailout.
  2285. instr->ClearLazyBailOut();
  2286. if (!instr->HasBailOutInfo())
  2287. {
  2288. return true;
  2289. }
  2290. }
  2291. const IR::BailOutKind bailOutKindWithoutBits = instr->GetBailOutKindNoBits();
  2292. if (!instr->GetBailOutInfo()->canDeadStore)
  2293. {
  2294. // revisit if canDeadStore is used for anything other than BailOutMarkTempObject
  2295. Assert(hasMarkTempObject);
  2296. // Don't remove the implicit call pre op bailout for mark temp object.
  2297. Assert(bailOutKindWithoutBits == IR::BailOutOnImplicitCallsPreOp);
  2298. return true;
  2299. }
  2300. // At this point, we don't need the bail on implicit calls.
  2301. // Simply use the bailout kind bits as our new bailout kind.
  2302. IR::BailOutKind newBailOutKind = bailOutKindWithBits - bailOutKindWithoutBits;
  2303. if (newBailOutKind == IR::BailOutInvalid)
  2304. {
  2305. return false;
  2306. }
  2307. instr->SetBailOutKind(newBailOutKind);
  2308. return true;
  2309. }
  2310. bool
  2311. BackwardPass::NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr)
  2312. {
  2313. if ((instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  2314. instr->GetDst()->IsIndirOpnd() &&
  2315. instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyTypedArray())
  2316. {
  2317. IR::Opnd * opnd = instr->GetSrc1();
  2318. if (opnd->IsRegOpnd())
  2319. {
  2320. return !opnd->AsRegOpnd()->GetValueType().IsPrimitive() &&
  2321. !opnd->AsRegOpnd()->m_sym->IsInt32() &&
  2322. !opnd->AsRegOpnd()->m_sym->IsFloat64() &&
  2323. !opnd->AsRegOpnd()->m_sym->IsFloatConst() &&
  2324. !opnd->AsRegOpnd()->m_sym->IsIntConst();
  2325. }
  2326. else
  2327. {
  2328. Assert(opnd->IsIntConstOpnd() || opnd->IsInt64ConstOpnd() || opnd->IsFloat32ConstOpnd() || opnd->IsFloatConstOpnd() || opnd->IsAddrOpnd());
  2329. }
  2330. }
  2331. return false;
  2332. }
  2333. IR::Instr*
  2334. BackwardPass::ProcessPendingPreOpBailOutInfo(IR::Instr *const currentInstr)
  2335. {
  2336. Assert(!IsCollectionPass());
  2337. if(!preOpBailOutInstrToProcess)
  2338. {
  2339. return currentInstr->m_prev;
  2340. }
  2341. Assert(preOpBailOutInstrToProcess == currentInstr);
  2342. if (!this->IsPrePass())
  2343. {
  2344. IR::Instr* prev = preOpBailOutInstrToProcess->m_prev;
  2345. while (prev && preOpBailOutInstrToProcess->CanAggregateByteCodeUsesAcrossInstr(prev))
  2346. {
  2347. IR::Instr* instr = prev;
  2348. prev = prev->m_prev;
  2349. if (instr->IsByteCodeUsesInstrFor(preOpBailOutInstrToProcess))
  2350. {
  2351. // If instr is a ByteCodeUsesInstr, it will remove it
  2352. ProcessByteCodeUsesInstr(instr);
  2353. }
  2354. }
  2355. }
  2356. // A pre-op bailout instruction was saved for bailout info processing after the instruction and relevant ByteCodeUses
  2357. // instructions before it have been processed. We can process the bailout info for that instruction now.
  2358. BailOutInfo *const bailOutInfo = preOpBailOutInstrToProcess->GetBailOutInfo();
  2359. Assert(bailOutInfo->bailOutInstr == preOpBailOutInstrToProcess);
  2360. Assert(bailOutInfo->bailOutOffset == preOpBailOutInstrToProcess->GetByteCodeOffset());
  2361. ProcessBailOutInfo(preOpBailOutInstrToProcess, bailOutInfo);
  2362. preOpBailOutInstrToProcess = nullptr;
  2363. // We might have removed the prev instr if it was a ByteCodeUsesInstr
  2364. // Update the prevInstr on the main loop
  2365. return currentInstr->m_prev;
  2366. }
  2367. void
  2368. BackwardPass::ProcessBailOutInfo(IR::Instr * instr, BailOutInfo * bailOutInfo)
  2369. {
  2370. /*
  2371. When we optimize functions having try-catch, we install a bailout at the starting of the catch block, namely, BailOnException.
  2372. We don't have flow edges from all the possible exception points in the try to the catch block. As a result, this bailout should
  2373. not try to restore from the constant values or copy-prop syms or the type specialized syms, as these may not necessarily be/have
  2374. the right values. For example,
  2375. //constant values
  2376. c =
  2377. try
  2378. {
  2379. <exception>
  2380. c = k (constant)
  2381. }
  2382. catch
  2383. {
  2384. BailOnException
  2385. = c <-- We need to restore c from the value outside the try.
  2386. }
  2387. //copy-prop syms
  2388. c =
  2389. try
  2390. {
  2391. b = a
  2392. <exception>
  2393. c = b
  2394. }
  2395. catch
  2396. {
  2397. BailOnException
  2398. = c <-- We really want to restore c from its original sym, and not from its copy-prop sym, a
  2399. }
  2400. //type specialized syms
  2401. a =
  2402. try
  2403. {
  2404. <exception>
  2405. a++ <-- type specializes a
  2406. }
  2407. catch
  2408. {
  2409. BailOnException
  2410. = a <-- We need to restore a from its var version.
  2411. }
  2412. */
  2413. BasicBlock * block = this->currentBlock;
  2414. BVSparse<JitArenaAllocator> * byteCodeUpwardExposedUsed = nullptr;
  2415. #if DBG
  2416. if (DoCaptureByteCodeUpwardExposedUsed() &&
  2417. !IsPrePass() &&
  2418. bailOutInfo->bailOutFunc->HasByteCodeOffset() &&
  2419. bailOutInfo->bailOutFunc->byteCodeRegisterUses)
  2420. {
  2421. uint32 offset = bailOutInfo->bailOutOffset;
  2422. Assert(offset != Js::Constants::NoByteCodeOffset);
  2423. BVSparse<JitArenaAllocator>* trackingByteCodeUpwardExposedUsed = bailOutInfo->bailOutFunc->GetByteCodeOffsetUses(offset);
  2424. if (trackingByteCodeUpwardExposedUsed)
  2425. {
  2426. BVSparse<JitArenaAllocator>* tmpBv = nullptr;
  2427. if (instr->IsBranchInstr())
  2428. {
  2429. IR::BranchInstr* branchInstr = instr->AsBranchInstr();
  2430. IR::LabelInstr* target = branchInstr->GetTarget();
  2431. uint32 targetOffset = target->GetByteCodeOffset();
  2432. // If the instr's label has the same bytecode offset as the instr then move the targetOffset
  2433. // to the next bytecode instr. This can happen when we have airlock blocks or compensation
  2434. // code, but also for infinite loops. Don't do it for the latter.
  2435. if (targetOffset == instr->GetByteCodeOffset() && block != target->GetBasicBlock())
  2436. {
  2437. // This can happen if the target is a break or airlock block.
  2438. Assert(
  2439. target->GetBasicBlock()->isAirLockBlock ||
  2440. target->GetBasicBlock()->isAirLockCompensationBlock ||
  2441. target->GetBasicBlock()->isBreakBlock ||
  2442. target->GetBasicBlock()->isBreakCompensationBlockAtSink ||
  2443. target->GetBasicBlock()->isBreakCompensationBlockAtSource
  2444. );
  2445. targetOffset = target->GetNextByteCodeInstr()->GetByteCodeOffset();
  2446. }
  2447. BVSparse<JitArenaAllocator>* branchTargetUpwardExposed = target->m_func->GetByteCodeOffsetUses(targetOffset);
  2448. if (branchTargetUpwardExposed)
  2449. {
  2450. // The bailout should restore both the bailout destination and
  2451. // the branch target since we don't know where we'll end up.
  2452. trackingByteCodeUpwardExposedUsed = tmpBv = trackingByteCodeUpwardExposedUsed->OrNew(branchTargetUpwardExposed);
  2453. }
  2454. }
  2455. Assert(trackingByteCodeUpwardExposedUsed);
  2456. VerifyByteCodeUpwardExposed(block, bailOutInfo->bailOutFunc, trackingByteCodeUpwardExposedUsed, instr, offset);
  2457. if (tmpBv)
  2458. {
  2459. JitAdelete(tmpBv->GetAllocator(), tmpBv);
  2460. }
  2461. }
  2462. }
  2463. #endif
  2464. Assert(bailOutInfo->bailOutInstr == instr);
  2465. // The byteCodeUpwardExposedUsed should only be assigned once. The only case which would break this
  2466. // assumption is when we are optimizing a function having try-catch. In that case, we need the
  2467. // byteCodeUpwardExposedUsed analysis in the initial backward pass too.
  2468. Assert(bailOutInfo->byteCodeUpwardExposedUsed == nullptr || (this->func->HasTry() && this->func->DoOptimizeTry()));
  2469. // Make a copy of the byteCodeUpwardExposedUsed so we can remove the constants
  2470. if (!this->IsPrePass())
  2471. {
  2472. // Create the BV of symbols that need to be restored in the BailOutRecord
  2473. byteCodeUpwardExposedUsed = block->byteCodeUpwardExposedUsed->CopyNew(this->func->m_alloc);
  2474. bailOutInfo->byteCodeUpwardExposedUsed = byteCodeUpwardExposedUsed;
  2475. }
  2476. else
  2477. {
  2478. // Create a temporary byteCodeUpwardExposedUsed
  2479. byteCodeUpwardExposedUsed = block->byteCodeUpwardExposedUsed->CopyNew(this->tempAlloc);
  2480. }
  2481. // All the register-based argument syms need to be tracked. They are either:
  2482. // 1. Referenced as constants in bailOutInfo->usedcapturedValues.constantValues
  2483. // 2. Referenced using copy prop syms in bailOutInfo->usedcapturedValues.copyPropSyms
  2484. // 3. Marked as m_isBailOutReferenced = true & added to upwardExposedUsed bit vector to ensure we do not dead store their defs.
  2485. // The third set of syms is represented by the bailoutReferencedArgSymsBv.
  2486. BVSparse<JitArenaAllocator>* bailoutReferencedArgSymsBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  2487. if (!this->IsPrePass())
  2488. {
  2489. bailOutInfo->IterateArgOutSyms([=](uint, uint, StackSym* sym) {
  2490. if (!sym->IsArgSlotSym())
  2491. {
  2492. bailoutReferencedArgSymsBv->Set(sym->m_id);
  2493. }
  2494. });
  2495. }
  2496. // Process Argument object first, as they can be found on the stack and don't need to rely on copy prop
  2497. this->ProcessBailOutArgObj(bailOutInfo, byteCodeUpwardExposedUsed);
  2498. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2499. {
  2500. this->ProcessBailOutConstants(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2501. this->ProcessBailOutCopyProps(bailOutInfo, byteCodeUpwardExposedUsed, bailoutReferencedArgSymsBv);
  2502. }
  2503. BVSparse<JitArenaAllocator> * tempBv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  2504. if (bailOutInfo->liveVarSyms)
  2505. {
  2506. // Prefer to restore from type-specialized versions of the sym, as that will reduce the need for potentially expensive
  2507. // ToVars that can more easily be eliminated due to being dead stores.
  2508. #if DBG
  2509. Assert(tempBv->IsEmpty());
  2510. // Verify that all syms to restore are live in some fashion
  2511. tempBv->Minus(byteCodeUpwardExposedUsed, bailOutInfo->liveVarSyms);
  2512. tempBv->Minus(bailOutInfo->liveLosslessInt32Syms);
  2513. tempBv->Minus(bailOutInfo->liveFloat64Syms);
  2514. Assert(tempBv->IsEmpty());
  2515. #endif
  2516. if (this->func->IsJitInDebugMode())
  2517. {
  2518. // Add to byteCodeUpwardExposedUsed the non-temp local vars used so far to restore during bail out.
  2519. // The ones that are not used so far will get their values from bytecode when we continue after bail out in interpreter.
  2520. Assert(this->func->m_nonTempLocalVars);
  2521. tempBv->And(this->func->m_nonTempLocalVars, bailOutInfo->liveVarSyms);
  2522. // Remove syms that are restored in other ways than byteCodeUpwardExposedUsed.
  2523. FOREACH_SLIST_ENTRY(ConstantStackSymValue, value, &bailOutInfo->usedCapturedValues->constantValues)
  2524. {
  2525. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2526. if (value.Key()->HasByteCodeRegSlot())
  2527. {
  2528. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2529. }
  2530. }
  2531. NEXT_SLIST_ENTRY;
  2532. FOREACH_SLIST_ENTRY(CopyPropSyms, value, &bailOutInfo->usedCapturedValues->copyPropSyms)
  2533. {
  2534. Assert(value.Key()->HasByteCodeRegSlot() || value.Key()->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture);
  2535. if (value.Key()->HasByteCodeRegSlot())
  2536. {
  2537. tempBv->Clear(value.Key()->GetByteCodeRegSlot());
  2538. }
  2539. }
  2540. NEXT_SLIST_ENTRY;
  2541. if (bailOutInfo->usedCapturedValues->argObjSyms)
  2542. {
  2543. tempBv->Minus(bailOutInfo->usedCapturedValues->argObjSyms);
  2544. }
  2545. byteCodeUpwardExposedUsed->Or(tempBv);
  2546. }
  2547. if (instr->m_opcode != Js::OpCode::BailOnException) // see comment at the beginning of this function
  2548. {
  2549. // Int32
  2550. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveLosslessInt32Syms);
  2551. byteCodeUpwardExposedUsed->Minus(tempBv);
  2552. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2553. {
  2554. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2555. Assert(stackSym->GetType() == TyVar);
  2556. StackSym * int32StackSym = stackSym->GetInt32EquivSym(nullptr);
  2557. Assert(int32StackSym);
  2558. byteCodeUpwardExposedUsed->Set(int32StackSym->m_id);
  2559. }
  2560. NEXT_BITSET_IN_SPARSEBV;
  2561. // Float64
  2562. tempBv->And(byteCodeUpwardExposedUsed, bailOutInfo->liveFloat64Syms);
  2563. byteCodeUpwardExposedUsed->Minus(tempBv);
  2564. FOREACH_BITSET_IN_SPARSEBV(symId, tempBv)
  2565. {
  2566. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  2567. Assert(stackSym->GetType() == TyVar);
  2568. StackSym * float64StackSym = stackSym->GetFloat64EquivSym(nullptr);
  2569. Assert(float64StackSym);
  2570. byteCodeUpwardExposedUsed->Set(float64StackSym->m_id);
  2571. // This float-specialized sym is going to be used to restore the corresponding byte-code register. Need to
  2572. // ensure that the float value can be precisely coerced back to the original Var value by requiring that it is
  2573. // specialized using BailOutNumberOnly.
  2574. float64StackSym->m_requiresBailOnNotNumber = true;
  2575. }
  2576. NEXT_BITSET_IN_SPARSEBV;
  2577. }
  2578. // Var
  2579. // Any remaining syms to restore will be restored from their var versions
  2580. }
  2581. else
  2582. {
  2583. Assert(!this->func->DoGlobOpt());
  2584. }
  2585. JitAdelete(this->tempAlloc, tempBv);
  2586. // BailOnNoProfile makes some edges dead. Upward exposed symbols info set after the BailOnProfile won't
  2587. // flow through these edges, and, in turn, not through predecessor edges of the block containing the
  2588. // BailOnNoProfile. This is specifically bad for an inlinee's argout syms as they are set as upward exposed
  2589. // when we see the InlineeEnd, but may not look so to some blocks and may get overwritten.
  2590. // Set the argout syms as upward exposed here.
  2591. if (instr->m_opcode == Js::OpCode::BailOnNoProfile && instr->m_func->IsInlinee() &&
  2592. instr->m_func->m_hasInlineArgsOpt && instr->m_func->frameInfo->isRecorded)
  2593. {
  2594. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  2595. {
  2596. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  2597. });
  2598. }
  2599. // Mark all the register that we need to restore as used (excluding constants)
  2600. block->upwardExposedUses->Or(byteCodeUpwardExposedUsed);
  2601. block->upwardExposedUses->Or(bailoutReferencedArgSymsBv);
  2602. if (!this->IsPrePass())
  2603. {
  2604. bailOutInfo->IterateArgOutSyms([=](uint index, uint, StackSym* sym) {
  2605. if (sym->IsArgSlotSym() || bailoutReferencedArgSymsBv->Test(sym->m_id))
  2606. {
  2607. bailOutInfo->argOutSyms[index]->m_isBailOutReferenced = true;
  2608. }
  2609. });
  2610. }
  2611. JitAdelete(this->tempAlloc, bailoutReferencedArgSymsBv);
  2612. if (this->IsPrePass())
  2613. {
  2614. JitAdelete(this->tempAlloc, byteCodeUpwardExposedUsed);
  2615. }
  2616. }
  2617. void
  2618. BackwardPass::ClearDstUseForPostOpLazyBailOut(IR::Instr *instr)
  2619. {
  2620. // Refer to comments on BailOutInfo::ClearUseOfDst()
  2621. Assert(instr->HasLazyBailOut());
  2622. IR::Opnd *dst = instr->GetDst();
  2623. if (!this->IsPrePass() && dst && dst->IsRegOpnd())
  2624. {
  2625. StackSym *stackSym = dst->GetStackSym();
  2626. if (stackSym) {
  2627. instr->GetBailOutInfo()->ClearUseOfDst(stackSym->m_id);
  2628. }
  2629. }
  2630. }
  2631. void
  2632. BackwardPass::ProcessBlock(BasicBlock * block)
  2633. {
  2634. this->currentBlock = block;
  2635. this->MergeSuccBlocksInfo(block);
  2636. #if DBG
  2637. struct ByteCodeRegisterUsesTracker
  2638. {
  2639. Js::OpCode opcode = Js::OpCode::Nop;
  2640. uint32 offset = Js::Constants::NoByteCodeOffset;
  2641. Func* func = nullptr;
  2642. bool active = false;
  2643. void Capture(BackwardPass* backwardPass, BasicBlock* block)
  2644. {
  2645. if (offset != Js::Constants::NoByteCodeOffset)
  2646. {
  2647. backwardPass->CaptureByteCodeUpwardExposed(block, func, opcode, offset);
  2648. offset = Js::Constants::NoByteCodeOffset;
  2649. }
  2650. }
  2651. static bool IsValidByteCodeOffset(IR::Instr* instr)
  2652. {
  2653. return instr->m_func->HasByteCodeOffset() &&
  2654. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset;
  2655. };
  2656. static bool IsInstrOffsetBoundary(IR::Instr* instr)
  2657. {
  2658. if (IsValidByteCodeOffset(instr))
  2659. {
  2660. if (instr->m_opcode == Js::OpCode::Leave)
  2661. {
  2662. // Leave is a special case, capture now and ignore other instrs at that offset
  2663. return true;
  2664. }
  2665. else
  2666. {
  2667. uint32 bytecodeOffset = instr->GetByteCodeOffset();
  2668. IR::Instr* prev = instr->m_prev;
  2669. while (prev && !IsValidByteCodeOffset(prev))
  2670. {
  2671. prev = prev->m_prev;
  2672. }
  2673. return !prev || prev->GetByteCodeOffset() != bytecodeOffset;
  2674. }
  2675. }
  2676. return false;
  2677. }
  2678. void CheckInstrIsOffsetBoundary(IR::Instr* instr)
  2679. {
  2680. if (active && IsInstrOffsetBoundary(instr))
  2681. {
  2682. // This is the last occurence of that bytecode offset
  2683. // We need to process this instr before we capture and there are too many `continue`
  2684. // to safely do this check at the end of the loop
  2685. // Save the info and Capture the ByteCodeUpwardExposedUsed on next loop iteration
  2686. opcode = instr->m_opcode;
  2687. offset = instr->GetByteCodeOffset();
  2688. func = instr->m_func;
  2689. }
  2690. }
  2691. };
  2692. ByteCodeRegisterUsesTracker tracker;
  2693. tracker.active = tag == Js::CaptureByteCodeRegUsePhase && DoCaptureByteCodeUpwardExposedUsed();
  2694. #endif
  2695. #if DBG_DUMP
  2696. TraceBlockUses(block, true);
  2697. #endif
  2698. FOREACH_INSTR_BACKWARD_IN_BLOCK_EDITING(instr, instrPrev, block)
  2699. {
  2700. #if DBG_DUMP
  2701. TraceInstrUses(block, instr, true);
  2702. #endif
  2703. #if DBG
  2704. if (tracker.active)
  2705. {
  2706. // Track Symbol with weird lifetime to exclude them from the ByteCodeUpwardExpose verification
  2707. if (instr->m_func->GetScopeObjSym())
  2708. {
  2709. StackSym* sym = instr->m_func->GetScopeObjSym();
  2710. if (sym->HasByteCodeRegSlot())
  2711. {
  2712. block->excludeByteCodeUpwardExposedTracking->Set(sym->GetByteCodeRegSlot());
  2713. }
  2714. }
  2715. tracker.Capture(this, block);
  2716. tracker.CheckInstrIsOffsetBoundary(instr);
  2717. }
  2718. #endif
  2719. AssertOrFailFastMsg(!instr->IsLowered(), "Lowered instruction detected in pre-lower context!");
  2720. this->currentInstr = instr;
  2721. this->currentRegion = this->currentBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  2722. IR::Instr * insertedInstr = TryChangeInstrForStackArgOpt();
  2723. if (insertedInstr != nullptr)
  2724. {
  2725. instrPrev = insertedInstr;
  2726. continue;
  2727. }
  2728. MarkScopeObjSymUseForStackArgOpt();
  2729. ProcessBailOnStackArgsOutOfActualsRange();
  2730. if (ProcessNoImplicitCallUses(instr) || this->ProcessByteCodeUsesInstr(instr) || this->ProcessBailOutInfo(instr))
  2731. {
  2732. continue;
  2733. }
  2734. IR::Instr *instrNext = instr->m_next;
  2735. if (this->TrackNoImplicitCallInlinees(instr))
  2736. {
  2737. instrPrev = instrNext->m_prev;
  2738. continue;
  2739. }
  2740. if (CanDeadStoreInstrForScopeObjRemoval() && DeadStoreOrChangeInstrForScopeObjRemoval(&instrPrev))
  2741. {
  2742. continue;
  2743. }
  2744. bool hasLiveFields = (block->upwardExposedFields && !block->upwardExposedFields->IsEmpty());
  2745. if (this->tag == Js::DeadStorePhase && block->stackSymToFinalType != nullptr)
  2746. {
  2747. this->InsertTypeTransitionsAtPotentialKills();
  2748. }
  2749. IR::Opnd * opnd = instr->GetDst();
  2750. if (opnd != nullptr)
  2751. {
  2752. bool isRemoved = ReverseCopyProp(instr);
  2753. if (isRemoved)
  2754. {
  2755. instrPrev = instrNext->m_prev;
  2756. continue;
  2757. }
  2758. if (instr->m_opcode == Js::OpCode::Conv_Bool)
  2759. {
  2760. isRemoved = this->FoldCmBool(instr);
  2761. if (isRemoved)
  2762. {
  2763. continue;
  2764. }
  2765. }
  2766. ProcessNewScObject(instr);
  2767. this->ProcessTransfers(instr);
  2768. isRemoved = this->ProcessDef(opnd);
  2769. if (isRemoved)
  2770. {
  2771. continue;
  2772. }
  2773. }
  2774. if(!IsCollectionPass())
  2775. {
  2776. this->MarkTempProcessInstr(instr);
  2777. this->ProcessFieldKills(instr);
  2778. if (this->DoDeadStoreSlots()
  2779. && (instr->HasAnyImplicitCalls() || instr->HasBailOutInfo() || instr->UsesAllFields()))
  2780. {
  2781. // Can't dead-store slots if there can be an implicit-call, an exception, or a bailout
  2782. block->slotDeadStoreCandidates->ClearAll();
  2783. }
  2784. TrackIntUsage(instr);
  2785. TrackBitWiseOrNumberOp(instr);
  2786. TrackFloatSymEquivalence(instr);
  2787. }
  2788. opnd = instr->GetSrc1();
  2789. if (opnd != nullptr)
  2790. {
  2791. this->ProcessUse(opnd);
  2792. opnd = instr->GetSrc2();
  2793. if (opnd != nullptr)
  2794. {
  2795. this->ProcessUse(opnd);
  2796. }
  2797. }
  2798. if(IsCollectionPass())
  2799. {
  2800. #ifndef _M_ARM
  2801. if (
  2802. this->collectionPassSubPhase == CollectionPassSubPhase::FirstPass
  2803. && !this->func->IsSimpleJit()
  2804. )
  2805. {
  2806. // In the collection pass we do multiple passes over loops. In these passes we keep
  2807. // track of sets of symbols, such that we can know whether or not they are used in
  2808. // ways that we need to protect them from side-channel attacks.
  2809. IR::Opnd const * src1 = instr->GetSrc1();
  2810. IR::Opnd const * src2 = instr->GetSrc2();
  2811. IR::Opnd const * dest = instr->GetDst();
  2812. // The marking is as follows, by default:
  2813. // 1. symbols on an instruction directly get marked as being part of the same set.
  2814. // 2. symbols used in indiropnds on an instruction get marked as being dereferenced.
  2815. // 3. symbols used as sources for some instructions get marked as being dereferenced.
  2816. // 4. non-type-specialized symbols tend to get marked as dereferenced.
  2817. // First, we need to find any symbol associated with this instruction as a targeted
  2818. // symid for the merge operations. This simplifies the later code.
  2819. auto getAnyDirectSymID = [](IR::Opnd const* opnd)
  2820. {
  2821. SymID temp = SymID_Invalid;
  2822. if (opnd == nullptr)
  2823. {
  2824. return temp;
  2825. }
  2826. switch (opnd->m_kind)
  2827. {
  2828. case IR::OpndKind::OpndKindInvalid:
  2829. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2830. break;
  2831. case IR::OpndKind::OpndKindIntConst:
  2832. case IR::OpndKind::OpndKindInt64Const:
  2833. case IR::OpndKind::OpndKindFloatConst:
  2834. case IR::OpndKind::OpndKindFloat32Const:
  2835. case IR::OpndKind::OpndKindSimd128Const:
  2836. // Nothing to do here, no symbols involved
  2837. break;
  2838. case IR::OpndKind::OpndKindHelperCall:
  2839. // Nothing here either, I think?
  2840. break;
  2841. case IR::OpndKind::OpndKindSym:
  2842. temp = opnd->AsSymOpnd()->m_sym->m_id;
  2843. break;
  2844. case IR::OpndKind::OpndKindReg:
  2845. temp = opnd->AsRegOpnd()->m_sym->m_id;
  2846. break;
  2847. case IR::OpndKind::OpndKindAddr:
  2848. // Should be constant, so nothing to do
  2849. break;
  2850. case IR::OpndKind::OpndKindIndir:
  2851. // IndirOpnds don't themselves have symbols
  2852. break;
  2853. case IR::OpndKind::OpndKindLabel:
  2854. // Should be constant, so not an issue
  2855. break;
  2856. case IR::OpndKind::OpndKindMemRef:
  2857. // Should get a closer look, but looks ok?
  2858. break;
  2859. case IR::OpndKind::OpndKindRegBV:
  2860. // Should be ok
  2861. break;
  2862. case IR::OpndKind::OpndKindList:
  2863. // Since it's a list of RegOpnds, we just need to look at the first
  2864. {
  2865. IR::ListOpnd const* list = opnd->AsListOpnd();
  2866. if (list->Count() > 0)
  2867. {
  2868. temp = list->Item(0)->m_sym->m_id;
  2869. }
  2870. }
  2871. break;
  2872. default:
  2873. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2874. break;
  2875. }
  2876. return temp;
  2877. };
  2878. SymID destSymID = getAnyDirectSymID(dest);
  2879. if (destSymID == SymID_Invalid)
  2880. {
  2881. // It looks like we have no assignment to a symbol. As this pass is to mark the
  2882. // symbols that are in the same set through assignment or computation, the lack
  2883. // of a destination means that we don't have any set joins to do. We may need a
  2884. // pass over the source operands to mark dereferences, but that's simpler.
  2885. }
  2886. else
  2887. {
  2888. // We have a base, so now we want to go through and add any symbols to that set
  2889. // if they're on the base level of operands on the function.
  2890. auto addSymbolToSet = [](IR::Opnd const* opnd, Loop::LoopSymClusterList* scl, SymID targetSymID)
  2891. {
  2892. if (opnd == nullptr)
  2893. {
  2894. return;
  2895. }
  2896. switch (opnd->m_kind)
  2897. {
  2898. case IR::OpndKind::OpndKindInvalid:
  2899. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2900. break;
  2901. case IR::OpndKind::OpndKindIntConst:
  2902. case IR::OpndKind::OpndKindInt64Const:
  2903. case IR::OpndKind::OpndKindFloatConst:
  2904. case IR::OpndKind::OpndKindFloat32Const:
  2905. case IR::OpndKind::OpndKindSimd128Const:
  2906. // Nothing to do here, no symbols involved
  2907. break;
  2908. case IR::OpndKind::OpndKindHelperCall:
  2909. // Nothing here either, I think?
  2910. break;
  2911. case IR::OpndKind::OpndKindSym:
  2912. scl->Merge(targetSymID, opnd->AsSymOpnd()->m_sym->m_id);
  2913. break;
  2914. case IR::OpndKind::OpndKindReg:
  2915. scl->Merge(targetSymID, opnd->AsRegOpnd()->m_sym->m_id);
  2916. break;
  2917. case IR::OpndKind::OpndKindAddr:
  2918. // Should be constant, so nothing to do
  2919. break;
  2920. case IR::OpndKind::OpndKindIndir:
  2921. // IndirOpnds don't themselves have symbols
  2922. break;
  2923. case IR::OpndKind::OpndKindLabel:
  2924. // Should be constant, so not an issue
  2925. break;
  2926. case IR::OpndKind::OpndKindMemRef:
  2927. // Should get a closer look, but looks ok?
  2928. break;
  2929. case IR::OpndKind::OpndKindRegBV:
  2930. // Should be ok
  2931. break;
  2932. case IR::OpndKind::OpndKindList:
  2933. // Needs iteration, but is straightforward beyond that
  2934. {
  2935. IR::ListOpnd const* list = opnd->AsListOpnd();
  2936. for (int iter = 0; iter < list->Count(); iter++)
  2937. {
  2938. scl->Merge(targetSymID, list->Item(iter)->m_sym->m_id);
  2939. }
  2940. }
  2941. break;
  2942. default:
  2943. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  2944. break;
  2945. }
  2946. };
  2947. addSymbolToSet(src1, this->currentPrePassLoop->symClusterList, destSymID);
  2948. addSymbolToSet(src2, this->currentPrePassLoop->symClusterList, destSymID);
  2949. }
  2950. // Now we get to the second part - symbols used in indiropnds get marked as dereferenced
  2951. // This is just a matter of updating a bitvector, so it's fairly straightforward.
  2952. auto markDereferences = [](IR::Opnd const* opnd, BVSparse<JitArenaAllocator>* bv)
  2953. {
  2954. if (opnd == nullptr)
  2955. {
  2956. return;
  2957. }
  2958. switch (opnd->m_kind)
  2959. {
  2960. case IR::OpndKind::OpndKindInvalid:
  2961. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  2962. break;
  2963. case IR::OpndKind::OpndKindIntConst:
  2964. case IR::OpndKind::OpndKindInt64Const:
  2965. case IR::OpndKind::OpndKindFloatConst:
  2966. case IR::OpndKind::OpndKindFloat32Const:
  2967. case IR::OpndKind::OpndKindSimd128Const:
  2968. // Nothing to do here, no symbols involved
  2969. break;
  2970. case IR::OpndKind::OpndKindHelperCall:
  2971. // Nothing here either, I think?
  2972. break;
  2973. case IR::OpndKind::OpndKindSym:
  2974. // If it's not type-specialized, we may dereference it.
  2975. if (!(opnd->GetValueType().IsNotObject()))
  2976. {
  2977. bv->Set(opnd->AsSymOpnd()->m_sym->m_id);
  2978. }
  2979. break;
  2980. case IR::OpndKind::OpndKindReg:
  2981. // If it's not type-specialized, we may dereference it.
  2982. if (!(opnd->GetValueType().IsNotObject()) && !opnd->AsRegOpnd()->m_sym->IsTypeSpec())
  2983. {
  2984. bv->Set(opnd->AsRegOpnd()->m_sym->m_id);
  2985. }
  2986. break;
  2987. case IR::OpndKind::OpndKindAddr:
  2988. // Should be constant, so nothing to do
  2989. break;
  2990. case IR::OpndKind::OpndKindIndir:
  2991. // Need to handle each component
  2992. {
  2993. IR::IndirOpnd const* indirOpnd = opnd->AsIndirOpnd();
  2994. if (indirOpnd->GetBaseOpnd())
  2995. {
  2996. bv->Set(indirOpnd->GetBaseOpnd()->m_sym->m_id);
  2997. }
  2998. if (indirOpnd->GetIndexOpnd())
  2999. {
  3000. bv->Set(indirOpnd->GetIndexOpnd()->m_sym->m_id);
  3001. }
  3002. }
  3003. break;
  3004. case IR::OpndKind::OpndKindLabel:
  3005. // Should be constant, so not an issue
  3006. break;
  3007. case IR::OpndKind::OpndKindMemRef:
  3008. // Should get a closer look, but looks ok?
  3009. break;
  3010. case IR::OpndKind::OpndKindRegBV:
  3011. // Should be ok
  3012. break;
  3013. case IR::OpndKind::OpndKindList:
  3014. // Needs iteration, but is straightforward beyond that
  3015. {
  3016. IR::ListOpnd const* list = opnd->AsListOpnd();
  3017. for (int iter = 0; iter < list->Count(); iter++)
  3018. {
  3019. // should be the same as OpndKindReg, since ListOpndType is RegOpnd
  3020. if (!(list->Item(iter)->GetValueType().IsNotObject()) && !opnd->AsRegOpnd()->m_sym->IsTypeSpec())
  3021. {
  3022. bv->Set(list->Item(iter)->m_sym->m_id);
  3023. }
  3024. }
  3025. }
  3026. break;
  3027. default:
  3028. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  3029. break;
  3030. }
  3031. };
  3032. markDereferences(dest, this->currentPrePassLoop->internallyDereferencedSyms);
  3033. markDereferences(src1, this->currentPrePassLoop->internallyDereferencedSyms);
  3034. markDereferences(src2, this->currentPrePassLoop->internallyDereferencedSyms);
  3035. auto explicitlyMarkDereferenced = [](IR::Opnd const* opnd, BVSparse<JitArenaAllocator>* bv)
  3036. {
  3037. if (opnd == nullptr)
  3038. {
  3039. return;
  3040. }
  3041. switch (opnd->m_kind)
  3042. {
  3043. case IR::OpndKind::OpndKindInvalid:
  3044. AssertOrFailFastMsg(false, "There should be no invalid operand kinds at this point...");
  3045. break;
  3046. case IR::OpndKind::OpndKindIntConst:
  3047. case IR::OpndKind::OpndKindInt64Const:
  3048. case IR::OpndKind::OpndKindFloatConst:
  3049. case IR::OpndKind::OpndKindFloat32Const:
  3050. case IR::OpndKind::OpndKindSimd128Const:
  3051. // Nothing to do here, no symbols involved
  3052. break;
  3053. case IR::OpndKind::OpndKindHelperCall:
  3054. // Nothing here either, I think?
  3055. break;
  3056. case IR::OpndKind::OpndKindSym:
  3057. // The instruction using this means that we may dereference the symbol,
  3058. // regardless of type spec
  3059. bv->Set(opnd->AsSymOpnd()->m_sym->m_id);
  3060. break;
  3061. case IR::OpndKind::OpndKindReg:
  3062. // The instruction using this means that we may dereference the symbol,
  3063. // regardless of type spec
  3064. bv->Set(opnd->AsRegOpnd()->m_sym->m_id);
  3065. break;
  3066. case IR::OpndKind::OpndKindAddr:
  3067. // Should be constant, so nothing to do
  3068. break;
  3069. case IR::OpndKind::OpndKindIndir:
  3070. // Need to handle each component
  3071. {
  3072. IR::IndirOpnd const* indirOpnd = opnd->AsIndirOpnd();
  3073. if (indirOpnd->GetBaseOpnd())
  3074. {
  3075. bv->Set(indirOpnd->GetBaseOpnd()->m_sym->m_id);
  3076. }
  3077. if (indirOpnd->GetIndexOpnd())
  3078. {
  3079. bv->Set(indirOpnd->GetIndexOpnd()->m_sym->m_id);
  3080. }
  3081. }
  3082. break;
  3083. case IR::OpndKind::OpndKindLabel:
  3084. // Should be constant, so not an issue
  3085. break;
  3086. case IR::OpndKind::OpndKindMemRef:
  3087. // Should get a closer look, but looks ok?
  3088. break;
  3089. case IR::OpndKind::OpndKindRegBV:
  3090. // Should be ok
  3091. break;
  3092. case IR::OpndKind::OpndKindList:
  3093. // Needs iteration, but is straightforward beyond that
  3094. {
  3095. IR::ListOpnd const* list = opnd->AsListOpnd();
  3096. for (int iter = 0; iter < list->Count(); iter++)
  3097. {
  3098. // The instruction using this means that we may dereference the symbol,
  3099. // regardless of type spec
  3100. bv->Set(list->Item(iter)->m_sym->m_id);
  3101. }
  3102. }
  3103. break;
  3104. default:
  3105. AssertOrFailFastMsg(false, "This should be unreachable - if we've added another OpndKind, add proper handling for it");
  3106. break;
  3107. }
  3108. };
  3109. // We may also have some specific instructions that dereference things - we can
  3110. // handle those specifically, since there's only a few of them
  3111. switch (instr->m_opcode)
  3112. {
  3113. case Js::OpCode::StArrInlineItem_CI4:
  3114. case Js::OpCode::StArrItemC_CI4:
  3115. case Js::OpCode::StArrItemI_CI4:
  3116. case Js::OpCode::StArrSegElemC:
  3117. case Js::OpCode::StArrSegItem_A:
  3118. case Js::OpCode::StArrSegItem_CI4:
  3119. case Js::OpCode::StArrViewElem:
  3120. case Js::OpCode::StAtomicWasm:
  3121. case Js::OpCode::StElemC:
  3122. case Js::OpCode::StElemI_A:
  3123. case Js::OpCode::StElemI_A_Strict:
  3124. case Js::OpCode::StEnvObjSlot:
  3125. case Js::OpCode::StEnvObjSlotChkUndecl:
  3126. case Js::OpCode::StFld:
  3127. case Js::OpCode::StFldStrict:
  3128. case Js::OpCode::StFuncExpr:
  3129. case Js::OpCode::StInnerObjSlot:
  3130. case Js::OpCode::StInnerObjSlotChkUndecl:
  3131. case Js::OpCode::StInnerSlot:
  3132. case Js::OpCode::StInnerSlotChkUndecl:
  3133. case Js::OpCode::StLocalFld:
  3134. case Js::OpCode::StLocalFuncExpr:
  3135. case Js::OpCode::StLocalObjSlot:
  3136. case Js::OpCode::StLocalObjSlotChkUndecl:
  3137. case Js::OpCode::StLocalSlot:
  3138. case Js::OpCode::StLocalSlotChkUndecl:
  3139. case Js::OpCode::StLoopBodyCount:
  3140. case Js::OpCode::StModuleSlot:
  3141. case Js::OpCode::StObjSlot:
  3142. case Js::OpCode::StObjSlotChkUndecl:
  3143. case Js::OpCode::StParamObjSlot:
  3144. case Js::OpCode::StParamObjSlotChkUndecl:
  3145. case Js::OpCode::StParamSlot:
  3146. case Js::OpCode::StParamSlotChkUndecl:
  3147. case Js::OpCode::StRootFld:
  3148. case Js::OpCode::StRootFldStrict:
  3149. case Js::OpCode::StSlot:
  3150. case Js::OpCode::StSlotBoxTemp:
  3151. case Js::OpCode::StSlotChkUndecl:
  3152. case Js::OpCode::StSuperFld:
  3153. case Js::OpCode::StSuperFldStrict:
  3154. case Js::OpCode::ProfiledStElemI_A:
  3155. case Js::OpCode::ProfiledStElemI_A_Strict:
  3156. case Js::OpCode::ProfiledStFld:
  3157. case Js::OpCode::ProfiledStFldStrict:
  3158. case Js::OpCode::ProfiledStLocalFld:
  3159. case Js::OpCode::ProfiledStRootFld:
  3160. case Js::OpCode::ProfiledStRootFldStrict:
  3161. case Js::OpCode::ProfiledStSuperFld:
  3162. case Js::OpCode::ProfiledStSuperFldStrict:
  3163. // Unfortunately, being fed into a store means that we could have aliasing, and the
  3164. // consequence is that it may be re-read and then dereferenced. Note that we can do
  3165. // this case if we poison any array symbol that we store to on the way out, but the
  3166. // aliasing problem remains.
  3167. case Js::OpCode::ArgOut_A:
  3168. case Js::OpCode::ArgOut_ANonVar:
  3169. case Js::OpCode::ArgOut_A_Dynamic:
  3170. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  3171. case Js::OpCode::ArgOut_A_FromStackArgs:
  3172. case Js::OpCode::ProfiledArgOut_A:
  3173. // Getting passed to another function is a boundary that we can't analyze over.
  3174. case Js::OpCode::Ret:
  3175. // Return arcs are pretty short in speculation, so we have to assume that we may be
  3176. // returning to a situation that will dereference the symbol. Note that we will not
  3177. // hit this path in normal jitted code, but it's more common in jitloopbody'd code.
  3178. explicitlyMarkDereferenced(instr->GetSrc1(), this->currentPrePassLoop->internallyDereferencedSyms);
  3179. break;
  3180. default:
  3181. // most instructions don't have this sort of behavior
  3182. break;
  3183. }
  3184. }
  3185. #endif
  3186. // Continue normal CollectionPass behavior
  3187. #if DBG_DUMP
  3188. TraceInstrUses(block, instr, false);
  3189. #endif
  3190. continue;
  3191. }
  3192. if (this->tag == Js::DeadStorePhase)
  3193. {
  3194. #ifndef _M_ARM
  3195. if(
  3196. block->loop
  3197. && !this->isLoopPrepass
  3198. && !this->func->IsSimpleJit()
  3199. )
  3200. {
  3201. // In the second pass, we mark instructions that we go by as being safe or unsafe.
  3202. //
  3203. // This is all based on the information which we gathered in the previous pass. The
  3204. // symbol sets are cross-referenced and the bit-vector information is set such that
  3205. // the bit vector now holds a complete list of which symbols are dereferenced, both
  3206. // directly or indirectly, in the loop, so we can see if a particular instr creates
  3207. // such a symbol. If it doesn't, then we will not mask its destination, as it's not
  3208. // necessary to create a safe program.
  3209. //
  3210. // Note that if we avoiding doing the masking here, we need to instead do it on the
  3211. // out-edges of the loop - otherwise an unsafe use of the symbol could happen after
  3212. // the loop and not get caught.
  3213. // This helper goes through and marks loop out-edges for a particular symbol set.
  3214. static void (*addOutEdgeMasking)(SymID, Loop*, JitArenaAllocator*) = [](SymID symID, Loop* loop, JitArenaAllocator *alloc) -> void
  3215. {
  3216. // There are rare cases where we have no out-edges (the only way to leave this loop
  3217. // is via a return inside the jitloopbody); in this case, we don't need to mask any
  3218. // symbols on the out-edges, as we only need to worry about the store cases.
  3219. if(loop->outwardSpeculationMaskInstrs == nullptr)
  3220. {
  3221. return;
  3222. }
  3223. BVSparse<JitArenaAllocator> *syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  3224. // We only need to do this for stack syms, and only for ones that are upwardexposed
  3225. // in the block sourcing to the masking block, but it needs to be for all symbols a
  3226. // mask-skipped load may be written to.
  3227. loop->symClusterList->MapSet<BVSparse<JitArenaAllocator>*>(symID, [](SymID a, BVSparse<JitArenaAllocator> *symbols) {
  3228. symbols->Set(a);
  3229. }, syms);
  3230. SymTable* symTable = loop->GetFunc()->m_symTable;
  3231. FOREACH_BITSET_IN_SPARSEBV(curSymID, syms)
  3232. {
  3233. Sym* potentialSym = symTable->Find(curSymID);
  3234. if (potentialSym == nullptr || !potentialSym->IsStackSym())
  3235. {
  3236. syms->Clear(curSymID);
  3237. }
  3238. } NEXT_BITSET_IN_SPARSEBV;
  3239. if (syms->IsEmpty())
  3240. {
  3241. // If there's no non-stack symids, we have nothing to mask
  3242. return;
  3243. }
  3244. // Now that we have a bitvector of things to try to mask on the out-edges, we'll go
  3245. // over the list of outmask instructions.
  3246. FOREACH_SLIST_ENTRY(IR::ByteCodeUsesInstr*, bcuInstr, loop->outwardSpeculationMaskInstrs)
  3247. {
  3248. // Get the upwardExposed information for the previous block
  3249. IR::LabelInstr *blockLabel = bcuInstr->GetBlockStartInstr()->AsLabelInstr();
  3250. BasicBlock* maskingBlock = blockLabel->GetBasicBlock();
  3251. // Since it's possible we have a multi-level loop structure (each with its own mask
  3252. // instructions and dereferenced symbol list), we may be able to avoid masking some
  3253. // symbols in interior loop->exterior loop edges if they're not dereferenced in the
  3254. // exterior loop. This does mean, however, that we need to mask them further out.
  3255. Loop* maskingBlockLoop = maskingBlock->loop;
  3256. if (maskingBlockLoop != nullptr && !maskingBlockLoop->internallyDereferencedSyms->Test(symID))
  3257. {
  3258. addOutEdgeMasking(symID, maskingBlockLoop, alloc);
  3259. continue;
  3260. }
  3261. // Instead of looking at the previous block (inside the loop), which may be cleaned
  3262. // up or may yet be processed for dead stores, we instead can look at the mask/cmov
  3263. // block, which we can keep from being cleaned up, and which will always be handled
  3264. // before the loop is looked at (in this phase), since it is placed after the loop.
  3265. AssertOrFailFast(maskingBlock->upwardExposedUses);
  3266. AssertOrFailFast(maskingBlock->upwardExposedFields);
  3267. BVSparse<JitArenaAllocator> *symsToMask = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  3268. symsToMask->Or(maskingBlock->upwardExposedUses);
  3269. symsToMask->Or(maskingBlock->upwardExposedFields);
  3270. symsToMask->And(syms);
  3271. // If nothing is exposed, we have nothing to mask, and nothing to do here.
  3272. if (!symsToMask->IsEmpty())
  3273. {
  3274. if (bcuInstr->GetByteCodeUpwardExposedUsed() == nullptr)
  3275. {
  3276. // This will initialize the internal structure properly
  3277. bcuInstr->SetBV(JitAnew(bcuInstr->m_func->m_alloc, BVSparse<JitArenaAllocator>, bcuInstr->m_func->m_alloc));
  3278. }
  3279. #if DBG_DUMP
  3280. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, loop->topFunc))
  3281. {
  3282. Output::Print(_u("Adding symbols to out-edge masking for loop %u outward block %u:\n"), loop->GetLoopNumber(), maskingBlock->GetBlockNum());
  3283. symsToMask->Dump();
  3284. }
  3285. #endif
  3286. // Add the syms to the mask set
  3287. const_cast<BVSparse<JitArenaAllocator> *>(bcuInstr->GetByteCodeUpwardExposedUsed())->Or(symsToMask);
  3288. }
  3289. } NEXT_SLIST_ENTRY;
  3290. };
  3291. switch (instr->m_opcode)
  3292. {
  3293. case Js::OpCode::LdElemI_A:
  3294. case Js::OpCode::ProfiledLdElemI_A:
  3295. {
  3296. IR::Opnd* dest = instr->GetDst();
  3297. if (dest->IsRegOpnd())
  3298. {
  3299. SymID symid = dest->AsRegOpnd()->m_sym->m_id;
  3300. if (!block->loop->internallyDereferencedSyms->Test(symid))
  3301. {
  3302. instr->SetIsSafeToSpeculate(true);
  3303. addOutEdgeMasking(symid, block->loop, this->tempAlloc);
  3304. #if DBG_DUMP
  3305. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  3306. {
  3307. Output::Print(_u("Marking instruction as safe:\n"));
  3308. instr->highlight = 0x0f;
  3309. instr->Dump();
  3310. }
  3311. #endif
  3312. }
  3313. }
  3314. else if (dest->IsSymOpnd())
  3315. {
  3316. SymID symid = dest->AsSymOpnd()->m_sym->m_id;
  3317. if (!block->loop->internallyDereferencedSyms->Test(symid))
  3318. {
  3319. instr->SetIsSafeToSpeculate(true);
  3320. addOutEdgeMasking(symid, block->loop, this->tempAlloc);
  3321. #if DBG_DUMP
  3322. if (PHASE_TRACE(Js::SpeculationPropagationAnalysisPhase, this->func))
  3323. {
  3324. Output::Print(_u("Marking instruction as safe:\n"));
  3325. instr->highlight = 0x0f;
  3326. instr->Dump();
  3327. }
  3328. #endif
  3329. }
  3330. }
  3331. }
  3332. break;
  3333. default:
  3334. // Most instructions don't have any particular handling needed here, as they don't
  3335. // get any masking regardless.
  3336. break;
  3337. }
  3338. }
  3339. #endif
  3340. switch(instr->m_opcode)
  3341. {
  3342. case Js::OpCode::CheckFixedFld:
  3343. {
  3344. if (!this->IsPrePass())
  3345. {
  3346. // Use `propertyId` instead of `propertySymId` to track live fixed fields
  3347. // During jit transfer (`CreateFrom()`), all properties that can be fixed are transferred
  3348. // over and also invalidated using `propertyId` regardless of whether we choose to fix them in the jit.
  3349. // So all properties with the same name are invalidated even though not all of them are fixed.
  3350. // Therefore we need to attach lazy bailout using propertyId so that all of them can be protected.
  3351. Assert(instr->GetSrc1() && block->liveFixedFields);
  3352. block->liveFixedFields->Set(instr->GetSrc1()->GetSym()->AsPropertySym()->m_propertyId);
  3353. }
  3354. break;
  3355. }
  3356. case Js::OpCode::LdSlot:
  3357. {
  3358. DeadStoreOrChangeInstrForScopeObjRemoval(&instrPrev);
  3359. break;
  3360. }
  3361. case Js::OpCode::InlineArrayPush:
  3362. case Js::OpCode::InlineArrayPop:
  3363. {
  3364. IR::Opnd *const thisOpnd = instr->GetSrc1();
  3365. if(thisOpnd && thisOpnd->IsRegOpnd())
  3366. {
  3367. IR::RegOpnd *const thisRegOpnd = thisOpnd->AsRegOpnd();
  3368. if(thisRegOpnd->IsArrayRegOpnd())
  3369. {
  3370. // Process the array use at the point of the array built-in call, since the array will actually
  3371. // be used at the call, not at the ArgOut_A_InlineBuiltIn
  3372. ProcessArrayRegOpndUse(instr, thisRegOpnd->AsArrayRegOpnd());
  3373. }
  3374. }
  3375. }
  3376. #if !INT32VAR // the following is not valid on 64-bit platforms
  3377. case Js::OpCode::BoundCheck:
  3378. {
  3379. if(IsPrePass())
  3380. {
  3381. break;
  3382. }
  3383. // Look for:
  3384. // BoundCheck 0 <= s1
  3385. // BoundCheck s1 <= s2 + c, where c == 0 || c == -1
  3386. //
  3387. // And change it to:
  3388. // UnsignedBoundCheck s1 <= s2 + c
  3389. //
  3390. // The BoundCheck instruction is a signed operation, so any unsigned operand used in the instruction must be
  3391. // guaranteed to be >= 0 and <= int32 max when its value is interpreted as signed. Due to the restricted
  3392. // range of s2 above, by using an unsigned comparison instead, the negative check on s1 will also be
  3393. // covered.
  3394. //
  3395. // A BoundCheck instruction takes the form (src1 <= src2 + dst).
  3396. // Check the current instruction's pattern for:
  3397. // BoundCheck s1 <= s2 + c, where c <= 0
  3398. if(!instr->GetSrc1()->IsRegOpnd() ||
  3399. !instr->GetSrc1()->IsInt32() ||
  3400. !instr->GetSrc2() ||
  3401. instr->GetSrc2()->IsIntConstOpnd())
  3402. {
  3403. break;
  3404. }
  3405. if(instr->GetDst())
  3406. {
  3407. const int c = instr->GetDst()->AsIntConstOpnd()->GetValue();
  3408. if(c != 0 && c != -1)
  3409. {
  3410. break;
  3411. }
  3412. }
  3413. // Check the previous instruction's pattern for:
  3414. // BoundCheck 0 <= s1
  3415. IR::Instr *const lowerBoundCheck = instr->m_prev;
  3416. if(lowerBoundCheck->m_opcode != Js::OpCode::BoundCheck ||
  3417. !lowerBoundCheck->GetSrc1()->IsIntConstOpnd() ||
  3418. lowerBoundCheck->GetSrc1()->AsIntConstOpnd()->GetValue() != 0 ||
  3419. !lowerBoundCheck->GetSrc2() ||
  3420. !instr->GetSrc1()->AsRegOpnd()->IsEqual(lowerBoundCheck->GetSrc2()) ||
  3421. lowerBoundCheck->GetDst() && lowerBoundCheck->GetDst()->AsIntConstOpnd()->GetValue() != 0)
  3422. {
  3423. break;
  3424. }
  3425. // Remove the previous lower bound check, and change the current upper bound check to:
  3426. // UnsignedBoundCheck s1 <= s2 + c
  3427. instr->m_opcode = Js::OpCode::UnsignedBoundCheck;
  3428. currentBlock->RemoveInstr(lowerBoundCheck);
  3429. instrPrev = instr->m_prev;
  3430. break;
  3431. }
  3432. #endif
  3433. }
  3434. bool needsLazyBailOut = this->IsLazyBailOutCurrentlyNeeeded(instr);
  3435. AssertMsg(
  3436. !needsLazyBailOut || instr->HasLazyBailOut(),
  3437. "Instruction does not have the lazy bailout bit. Forward pass did not insert it correctly?"
  3438. );
  3439. DeadStoreTypeCheckBailOut(instr);
  3440. DeadStoreLazyBailOut(instr, needsLazyBailOut);
  3441. DeadStoreImplicitCallBailOut(instr, hasLiveFields, needsLazyBailOut);
  3442. AssertMsg(
  3443. this->IsPrePass() || (needsLazyBailOut || !instr->HasLazyBailOut()),
  3444. "We didn't remove lazy bailout after prepass even though we don't need it?"
  3445. );
  3446. // NoImplicitCallUses transfers need to be processed after determining whether implicit calls need to be disabled
  3447. // for the current instruction, because the instruction where the def occurs also needs implicit calls disabled.
  3448. // Array value type for the destination needs to be updated before transfers have been processed by
  3449. // ProcessNoImplicitCallDef, and array value types for sources need to be updated after transfers have been
  3450. // processed by ProcessNoImplicitCallDef, as it requires the no-implicit-call tracking bit-vectors to be precise at
  3451. // the point of the update.
  3452. if(!IsPrePass())
  3453. {
  3454. UpdateArrayValueTypes(instr, instr->GetDst());
  3455. }
  3456. ProcessNoImplicitCallDef(instr);
  3457. if(!IsPrePass())
  3458. {
  3459. UpdateArrayValueTypes(instr, instr->GetSrc1());
  3460. UpdateArrayValueTypes(instr, instr->GetSrc2());
  3461. }
  3462. }
  3463. else
  3464. {
  3465. switch (instr->m_opcode)
  3466. {
  3467. case Js::OpCode::BailOnNoProfile:
  3468. {
  3469. this->ProcessBailOnNoProfile(instr, block);
  3470. // this call could change the last instr of the previous block... Adjust instrStop.
  3471. instrStop = block->GetFirstInstr()->m_prev;
  3472. Assert(this->tag != Js::DeadStorePhase);
  3473. continue;
  3474. }
  3475. case Js::OpCode::Catch:
  3476. {
  3477. if (this->func->DoOptimizeTry() && !this->IsPrePass())
  3478. {
  3479. // Execute the "Catch" in the JIT'ed code, and bailout to the next instruction. This way, the bailout will restore the exception object automatically.
  3480. IR::BailOutInstr* bailOnException = IR::BailOutInstr::New(Js::OpCode::BailOnException, IR::BailOutOnException, instr->m_next, instr->m_func);
  3481. instr->InsertAfter(bailOnException);
  3482. Assert(instr->GetDst()->IsRegOpnd() && instr->GetDst()->GetStackSym()->HasByteCodeRegSlot());
  3483. StackSym * exceptionObjSym = instr->GetDst()->GetStackSym();
  3484. Assert(instr->m_prev->IsLabelInstr() && (instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionTypeCatch));
  3485. instr->m_prev->AsLabelInstr()->GetRegion()->SetExceptionObjectSym(exceptionObjSym);
  3486. }
  3487. break;
  3488. }
  3489. case Js::OpCode::Throw:
  3490. case Js::OpCode::EHThrow:
  3491. case Js::OpCode::InlineThrow:
  3492. this->func->SetHasThrow();
  3493. break;
  3494. }
  3495. }
  3496. if (instr->m_opcode == Js::OpCode::InlineeEnd)
  3497. {
  3498. this->ProcessInlineeEnd(instr);
  3499. }
  3500. if ((instr->IsLabelInstr() && instr->m_next->m_opcode == Js::OpCode::Catch) || (instr->IsLabelInstr() && instr->m_next->m_opcode == Js::OpCode::Finally))
  3501. {
  3502. if (!this->currentRegion)
  3503. {
  3504. Assert(!this->func->DoOptimizeTry() && !(this->func->IsSimpleJit() && this->func->hasBailout));
  3505. }
  3506. else
  3507. {
  3508. Assert(this->currentRegion->GetType() == RegionTypeCatch || this->currentRegion->GetType() == RegionTypeFinally);
  3509. Region * matchingTryRegion = this->currentRegion->GetMatchingTryRegion();
  3510. Assert(matchingTryRegion);
  3511. // We need live-on-back-edge info to accurately set write-through symbols for try-catches in a loop.
  3512. // Don't set write-through symbols in pre-pass
  3513. if (!this->IsPrePass() && !matchingTryRegion->writeThroughSymbolsSet)
  3514. {
  3515. if (this->tag == Js::DeadStorePhase)
  3516. {
  3517. Assert(!this->func->DoGlobOpt());
  3518. }
  3519. // FullJit: Write-through symbols info must be populated in the backward pass as
  3520. // 1. the forward pass needs it to insert ToVars.
  3521. // 2. the deadstore pass needs it to not clear such symbols from the
  3522. // byteCodeUpwardExposedUsed BV upon a def in the try region. This is required
  3523. // because any bailout in the try region needs to restore all write-through
  3524. // symbols.
  3525. // SimpleJit: Won't run the initial backward pass, but write-through symbols info is still
  3526. // needed in the deadstore pass for <2> above.
  3527. this->SetWriteThroughSymbolsSetForRegion(this->currentBlock, matchingTryRegion);
  3528. }
  3529. }
  3530. }
  3531. #if DBG
  3532. if (instr->m_opcode == Js::OpCode::TryCatch)
  3533. {
  3534. if (!this->IsPrePass() && (this->func->DoOptimizeTry() || (this->func->IsSimpleJit() && this->func->hasBailout)))
  3535. {
  3536. Assert(instr->m_next->IsLabelInstr() && (instr->m_next->AsLabelInstr()->GetRegion() != nullptr));
  3537. Region * tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  3538. Assert(tryRegion && tryRegion->GetType() == RegionType::RegionTypeTry && tryRegion->GetMatchingCatchRegion() != nullptr);
  3539. Assert(tryRegion->writeThroughSymbolsSet);
  3540. }
  3541. }
  3542. #endif
  3543. instrPrev = ProcessPendingPreOpBailOutInfo(instr);
  3544. #if DBG_DUMP
  3545. TraceInstrUses(block, instr, false);
  3546. #endif
  3547. }
  3548. NEXT_INSTR_BACKWARD_IN_BLOCK_EDITING;
  3549. #if DBG
  3550. tracker.Capture(this, block);
  3551. if (tag == Js::CaptureByteCodeRegUsePhase)
  3552. {
  3553. return;
  3554. }
  3555. #endif
  3556. #ifndef _M_ARM
  3557. if (
  3558. this->tag == Js::DeadStorePhase
  3559. // We don't do the masking in simplejit due to reduced perf concerns and the issues
  3560. // with handling try/catch structures with late-added blocks
  3561. && this->func->DoGlobOpt()
  3562. // We don't need the masking blocks in asmjs/wasm mode
  3563. && !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsAsmJsMode()
  3564. && !block->GetFirstInstr()->m_func->GetJITFunctionBody()->IsWasmFunction()
  3565. && !block->isDead
  3566. && !block->isDeleted
  3567. && CONFIG_FLAG_RELEASE(AddMaskingBlocks)
  3568. )
  3569. {
  3570. FOREACH_PREDECESSOR_BLOCK(blockPred, block)
  3571. {
  3572. // Now we need to handle loop out-edges. These need blocks inserted to prevent load
  3573. // of those symbols in speculation; the easiest way to do this is to CMOV them with
  3574. // a flag that we always know will be false, as this introduces a dependency on the
  3575. // register that can't be speculated (currently).
  3576. //
  3577. // Note that we're doing this backwards - looking from the target into the loop. We
  3578. // do this because this way because we're going backwards over the blocks anyway; a
  3579. // block inserted after the branch may be impossible to correctly handle.
  3580. if (!blockPred->isDead && !blockPred->isDeleted && blockPred->loop != nullptr)
  3581. {
  3582. Loop* targetLoop = block->loop;
  3583. Loop* startingLoop = blockPred->loop;
  3584. bool addMaskingBlock = false;
  3585. if (targetLoop == nullptr)
  3586. {
  3587. // If we're leaving to a non-looping context, we definitely want the masking block
  3588. addMaskingBlock = true;
  3589. }
  3590. else if (targetLoop == startingLoop)
  3591. {
  3592. // If we're still inside the same loop, we don't want a masking block
  3593. addMaskingBlock = false;
  3594. }
  3595. else
  3596. {
  3597. // We want a masking block if we're going to a loop enclosing the current one.
  3598. Loop* loopTest = targetLoop;
  3599. addMaskingBlock = true;
  3600. while (loopTest != nullptr)
  3601. {
  3602. if (loopTest == startingLoop)
  3603. {
  3604. // the target loop is a child of the starting loop, so don't mask on the way
  3605. addMaskingBlock = false;
  3606. break;
  3607. }
  3608. loopTest = loopTest->parent;
  3609. }
  3610. }
  3611. if (addMaskingBlock)
  3612. {
  3613. // Avoid masking on the way from a masking block - we're already masking this jmp
  3614. if (block->GetFirstInstr()->m_next->m_opcode == Js::OpCode::SpeculatedLoadFence)
  3615. {
  3616. addMaskingBlock = false;
  3617. }
  3618. }
  3619. if (addMaskingBlock)
  3620. {
  3621. // It's architecture dependent, so we just mark the block here and leave the actual
  3622. // generation of the masking to the Lowerer.
  3623. // Generated code here:
  3624. // newTarget:
  3625. // syms = targetedloadfence syms
  3626. // jmp oldTarget
  3627. // We need to increment the data use count since we're changing a successor.
  3628. blockPred->IncrementDataUseCount();
  3629. BasicBlock *newBlock = this->func->m_fg->InsertAirlockBlock(this->func->m_fg->FindEdge(blockPred, block), true);
  3630. LABELNAMESET(newBlock->GetFirstInstr()->AsLabelInstr(), "Loop out-edge masking block");
  3631. // This is a little bit of a misuse of ByteCodeUsesInstr - we're using it as just
  3632. // a bitvector that we can add things to.
  3633. IR::ByteCodeUsesInstr* masker = IR::ByteCodeUsesInstr::New(newBlock->GetFirstInstr());
  3634. masker->m_opcode = Js::OpCode::SpeculatedLoadFence;
  3635. // Add the one instruction we need to this block
  3636. newBlock->GetFirstInstr()->InsertAfter(masker);
  3637. // We need to initialize the data for this block, so that later stages of deadstore work properly.
  3638. // Setting use count to 0 makes mergesucc create the structures
  3639. newBlock->SetDataUseCount(0);
  3640. // If we inserted an airlock block compensation block, we need to set the use count on that too.
  3641. if (newBlock->prev && newBlock->prev->isAirLockCompensationBlock)
  3642. {
  3643. newBlock->prev->SetDataUseCount(0);
  3644. }
  3645. if (startingLoop->outwardSpeculationMaskInstrs == nullptr)
  3646. {
  3647. startingLoop->outwardSpeculationMaskInstrs = JitAnew(this->func->m_fg->alloc, SList<IR::ByteCodeUsesInstr*>, this->func->m_fg->alloc);
  3648. }
  3649. // We fill in the instruction later, so we need to add it to the loop's list of such instructions.
  3650. startingLoop->outwardSpeculationMaskInstrs->Prepend(masker);
  3651. }
  3652. }
  3653. } NEXT_PREDECESSOR_BLOCK;
  3654. }
  3655. #endif
  3656. EndIntOverflowDoesNotMatterRange();
  3657. if (!this->IsPrePass() && !block->isDead && block->isLoopHeader)
  3658. {
  3659. // Copy the upward exposed use as the live on back edge regs
  3660. block->loop->regAlloc.liveOnBackEdgeSyms = block->upwardExposedUses->CopyNew(this->func->m_alloc);
  3661. }
  3662. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  3663. #if DBG_DUMP
  3664. TraceBlockUses(block, false);
  3665. #endif
  3666. }
  3667. bool
  3668. BackwardPass::CanDeadStoreInstrForScopeObjRemoval(Sym *sym) const
  3669. {
  3670. if (tag == Js::DeadStorePhase && this->currentInstr->m_func->IsStackArgsEnabled())
  3671. {
  3672. Func * currFunc = this->currentInstr->m_func;
  3673. bool doScopeObjCreation = currFunc->GetJITFunctionBody()->GetDoScopeObjectCreation();
  3674. switch (this->currentInstr->m_opcode)
  3675. {
  3676. case Js::OpCode::InitCachedScope:
  3677. {
  3678. if(!doScopeObjCreation && this->currentInstr->GetDst()->IsScopeObjOpnd(currFunc))
  3679. {
  3680. /*
  3681. * We don't really dead store this instruction. We just want the source sym of this instruction
  3682. * to NOT be tracked as USED by this instruction.
  3683. * This instr will effectively be lowered to dest = MOV NULLObject, in the lowerer phase.
  3684. */
  3685. return true;
  3686. }
  3687. break;
  3688. }
  3689. case Js::OpCode::LdSlot:
  3690. {
  3691. if (sym && IsFormalParamSym(currFunc, sym))
  3692. {
  3693. return true;
  3694. }
  3695. break;
  3696. }
  3697. case Js::OpCode::CommitScope:
  3698. case Js::OpCode::GetCachedFunc:
  3699. {
  3700. return !doScopeObjCreation && this->currentInstr->GetSrc1()->IsScopeObjOpnd(currFunc);
  3701. }
  3702. case Js::OpCode::BrFncCachedScopeEq:
  3703. case Js::OpCode::BrFncCachedScopeNeq:
  3704. {
  3705. return !doScopeObjCreation && this->currentInstr->GetSrc2()->IsScopeObjOpnd(currFunc);
  3706. }
  3707. case Js::OpCode::CallHelper:
  3708. {
  3709. if (!doScopeObjCreation && this->currentInstr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperOP_InitCachedFuncs)
  3710. {
  3711. IR::RegOpnd * scopeObjOpnd = this->currentInstr->GetSrc2()->GetStackSym()->GetInstrDef()->GetSrc1()->AsRegOpnd();
  3712. return scopeObjOpnd->IsScopeObjOpnd(currFunc);
  3713. }
  3714. break;
  3715. }
  3716. }
  3717. }
  3718. return false;
  3719. }
  3720. /*
  3721. * This is for Eliminating Scope Object Creation during Heap arguments optimization.
  3722. */
  3723. bool
  3724. BackwardPass::DeadStoreOrChangeInstrForScopeObjRemoval(IR::Instr ** pInstrPrev)
  3725. {
  3726. IR::Instr * instr = this->currentInstr;
  3727. Func * currFunc = instr->m_func;
  3728. if (this->tag == Js::DeadStorePhase && instr->m_func->IsStackArgsEnabled() && (IsPrePass() || !currentBlock->loop))
  3729. {
  3730. switch (instr->m_opcode)
  3731. {
  3732. /*
  3733. * This LdSlot loads the formal from the formals array. We replace this a Ld_A <ArgInSym>.
  3734. * ArgInSym is inserted at the beginning of the function during the start of the deadstore pass- for the top func.
  3735. * In case of inlinee, it will be from the source sym of the ArgOut Instruction to the inlinee.
  3736. */
  3737. case Js::OpCode::LdSlot:
  3738. {
  3739. IR::Opnd * src1 = instr->GetSrc1();
  3740. if (src1 && src1->IsSymOpnd())
  3741. {
  3742. Sym * sym = src1->AsSymOpnd()->m_sym;
  3743. Assert(sym);
  3744. if (IsFormalParamSym(currFunc, sym))
  3745. {
  3746. AssertMsg(!currFunc->GetJITFunctionBody()->HasImplicitArgIns(), "We don't have mappings between named formals and arguments object here");
  3747. instr->m_opcode = Js::OpCode::Ld_A;
  3748. PropertySym * propSym = sym->AsPropertySym();
  3749. Js::ArgSlot value = (Js::ArgSlot)propSym->m_propertyId;
  3750. Assert(currFunc->HasStackSymForFormal(value));
  3751. StackSym * paramStackSym = currFunc->GetStackSymForFormal(value);
  3752. IR::RegOpnd * srcOpnd = IR::RegOpnd::New(paramStackSym, TyVar, currFunc);
  3753. srcOpnd->SetIsJITOptimizedReg(true);
  3754. instr->ReplaceSrc1(srcOpnd);
  3755. this->ProcessSymUse(paramStackSym, true, true);
  3756. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  3757. {
  3758. Output::Print(_u("StackArgFormals : %s (%d) :Replacing LdSlot with Ld_A in Deadstore pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  3759. Output::Flush();
  3760. }
  3761. }
  3762. }
  3763. break;
  3764. }
  3765. case Js::OpCode::CommitScope:
  3766. {
  3767. if (instr->GetSrc1()->IsScopeObjOpnd(currFunc))
  3768. {
  3769. instr->Remove();
  3770. return true;
  3771. }
  3772. break;
  3773. }
  3774. case Js::OpCode::BrFncCachedScopeEq:
  3775. case Js::OpCode::BrFncCachedScopeNeq:
  3776. {
  3777. if (instr->GetSrc2()->IsScopeObjOpnd(currFunc))
  3778. {
  3779. instr->Remove();
  3780. return true;
  3781. }
  3782. break;
  3783. }
  3784. case Js::OpCode::CallHelper:
  3785. {
  3786. //Remove the CALL and all its Argout instrs.
  3787. if (instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper == IR::JnHelperMethod::HelperOP_InitCachedFuncs)
  3788. {
  3789. IR::RegOpnd * scopeObjOpnd = instr->GetSrc2()->GetStackSym()->GetInstrDef()->GetSrc1()->AsRegOpnd();
  3790. if (scopeObjOpnd->IsScopeObjOpnd(currFunc))
  3791. {
  3792. IR::Instr * instrDef = instr;
  3793. IR::Instr * nextInstr = instr->m_next;
  3794. while (instrDef != nullptr)
  3795. {
  3796. IR::Instr * instrToDelete = instrDef;
  3797. if (instrDef->GetSrc2() != nullptr)
  3798. {
  3799. instrDef = instrDef->GetSrc2()->GetStackSym()->GetInstrDef();
  3800. Assert(instrDef->m_opcode == Js::OpCode::ArgOut_A);
  3801. }
  3802. else
  3803. {
  3804. instrDef = nullptr;
  3805. }
  3806. instrToDelete->Remove();
  3807. }
  3808. Assert(nextInstr != nullptr);
  3809. *pInstrPrev = nextInstr->m_prev;
  3810. return true;
  3811. }
  3812. }
  3813. break;
  3814. }
  3815. case Js::OpCode::GetCachedFunc:
  3816. {
  3817. // <dst> = GetCachedFunc <scopeObject>, <functionNum>
  3818. // is converted to
  3819. // <dst> = NewScFunc <functionNum>, <env: FrameDisplay>
  3820. if (instr->GetSrc1()->IsScopeObjOpnd(currFunc))
  3821. {
  3822. instr->m_opcode = Js::OpCode::NewScFunc;
  3823. IR::Opnd * intConstOpnd = instr->UnlinkSrc2();
  3824. Assert(intConstOpnd->IsIntConstOpnd());
  3825. uint nestedFuncIndex = instr->m_func->GetJITFunctionBody()->GetNestedFuncIndexForSlotIdInCachedScope(intConstOpnd->AsIntConstOpnd()->AsUint32());
  3826. intConstOpnd->Free(instr->m_func);
  3827. instr->ReplaceSrc1(IR::IntConstOpnd::New(nestedFuncIndex, TyUint32, instr->m_func));
  3828. instr->SetSrc2(IR::RegOpnd::New(currFunc->GetLocalFrameDisplaySym(), IRType::TyVar, currFunc));
  3829. }
  3830. break;
  3831. }
  3832. }
  3833. }
  3834. return false;
  3835. }
  3836. IR::Instr *
  3837. BackwardPass::TryChangeInstrForStackArgOpt()
  3838. {
  3839. IR::Instr * instr = this->currentInstr;
  3840. if (tag == Js::DeadStorePhase && instr->DoStackArgsOpt())
  3841. {
  3842. switch (instr->m_opcode)
  3843. {
  3844. case Js::OpCode::TypeofElem:
  3845. {
  3846. /*
  3847. Before:
  3848. dst = TypeOfElem arguments[i] <(BailOnStackArgsOutOfActualsRange)>
  3849. After:
  3850. tmpdst = LdElemI_A arguments[i] <(BailOnStackArgsOutOfActualsRange)>
  3851. dst = TypeOf tmpdst
  3852. */
  3853. AssertMsg(instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutKind::BailOnStackArgsOutOfActualsRange), "Why is the bailout kind not set, when it is StackArgOptimized?");
  3854. instr->m_opcode = Js::OpCode::LdElemI_A;
  3855. IR::Opnd * dstOpnd = instr->UnlinkDst();
  3856. IR::RegOpnd * elementOpnd = IR::RegOpnd::New(StackSym::New(instr->m_func), IRType::TyVar, instr->m_func);
  3857. instr->SetDst(elementOpnd);
  3858. IR::Instr * typeOfInstr = IR::Instr::New(Js::OpCode::Typeof, dstOpnd, elementOpnd, instr->m_func);
  3859. instr->InsertAfter(typeOfInstr);
  3860. return typeOfInstr;
  3861. }
  3862. }
  3863. }
  3864. /*
  3865. * Scope Object Sym is kept alive in all code paths.
  3866. * -This is to facilitate Bailout to record the live Scope object Sym, whenever required.
  3867. * -Reason for doing is this because - Scope object has to be implicitly live whenever Heap Arguments object is live.
  3868. * -When we restore HeapArguments object in the bail out path, it expects the scope object also to be restored - if one was created.
  3869. * -We do not know detailed information about Heap arguments obj syms(aliasing etc.) until we complete Forward Pass.
  3870. * -And we want to avoid dead sym clean up (in this case, scope object though not explicitly live, it is live implicitly) during Block merging in the forward pass.
  3871. * -Hence this is the optimal spot to do this.
  3872. */
  3873. if (tag == Js::BackwardPhase && instr->m_func->GetScopeObjSym() != nullptr)
  3874. {
  3875. this->currentBlock->upwardExposedUses->Set(instr->m_func->GetScopeObjSym()->m_id);
  3876. }
  3877. return nullptr;
  3878. }
  3879. void
  3880. BackwardPass::TraceDeadStoreOfInstrsForScopeObjectRemoval()
  3881. {
  3882. IR::Instr * instr = this->currentInstr;
  3883. if (instr->m_func->IsStackArgsEnabled())
  3884. {
  3885. if ((instr->m_opcode == Js::OpCode::InitCachedScope || instr->m_opcode == Js::OpCode::NewScopeObject) && !IsPrePass())
  3886. {
  3887. if (PHASE_TRACE1(Js::StackArgFormalsOptPhase))
  3888. {
  3889. Output::Print(_u("StackArgFormals : %s (%d) :Removing Scope object creation in Deadstore pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetFunctionNumber());
  3890. Output::Flush();
  3891. }
  3892. }
  3893. }
  3894. }
  3895. bool
  3896. BackwardPass::IsFormalParamSym(Func * func, Sym * sym) const
  3897. {
  3898. Assert(sym);
  3899. if (sym->IsPropertySym())
  3900. {
  3901. //If the sym is a propertySym, then see if the propertyId is within the range of the formals
  3902. //We can have other properties stored in the scope object other than the formals (following the formals).
  3903. PropertySym * propSym = sym->AsPropertySym();
  3904. IntConstType value = propSym->m_propertyId;
  3905. return func->IsFormalsArraySym(propSym->m_stackSym->m_id) &&
  3906. (value >= 0 && value < func->GetJITFunctionBody()->GetInParamsCount() - 1);
  3907. }
  3908. else
  3909. {
  3910. Assert(sym->IsStackSym());
  3911. return !!func->IsFormalsArraySym(sym->AsStackSym()->m_id);
  3912. }
  3913. }
  3914. #if DBG_DUMP
  3915. struct BvToDump
  3916. {
  3917. const BVSparse<JitArenaAllocator>* bv;
  3918. const char16* tag;
  3919. size_t tagLen;
  3920. BvToDump(const BVSparse<JitArenaAllocator>* bv, const char16* tag) :
  3921. bv(bv),
  3922. tag(tag),
  3923. tagLen(bv ? wcslen(tag) : 0)
  3924. {}
  3925. };
  3926. void
  3927. BackwardPass::DumpBlockData(BasicBlock * block, IR::Instr* instr)
  3928. {
  3929. const int skip = 8;
  3930. BVSparse<JitArenaAllocator>* byteCodeRegisterUpwardExposed = nullptr;
  3931. if (instr)
  3932. {
  3933. // Instr specific bv to dump
  3934. byteCodeRegisterUpwardExposed = GetByteCodeRegisterUpwardExposed(block, instr->m_func, this->tempAlloc);
  3935. }
  3936. BvToDump bvToDumps[] = {
  3937. { block->upwardExposedUses, _u("Exposed Use") },
  3938. { block->typesNeedingKnownObjectLayout, _u("Needs Known Object Layout") },
  3939. { block->upwardExposedFields, _u("Exposed Fields") },
  3940. { block->byteCodeUpwardExposedUsed, _u("Byte Code Use") },
  3941. { byteCodeRegisterUpwardExposed, _u("Byte Code Reg Use") },
  3942. { !this->IsCollectionPass() && !block->isDead && this->DoDeadStoreSlots() ? block->slotDeadStoreCandidates : nullptr, _u("Slot deadStore candidates") },
  3943. };
  3944. size_t maxTagLen = 0;
  3945. for (int i = 0; i < sizeof(bvToDumps) / sizeof(BvToDump); ++i)
  3946. {
  3947. if (bvToDumps[i].tagLen > maxTagLen)
  3948. {
  3949. maxTagLen = bvToDumps[i].tagLen;
  3950. }
  3951. }
  3952. for (int i = 0; i < sizeof(bvToDumps) / sizeof(BvToDump); ++i)
  3953. {
  3954. if (bvToDumps[i].bv)
  3955. {
  3956. Output::Print((int)(maxTagLen + skip - bvToDumps[i].tagLen), _u("%s: "), bvToDumps[i].tag);
  3957. bvToDumps[i].bv->Dump();
  3958. }
  3959. }
  3960. if (byteCodeRegisterUpwardExposed)
  3961. {
  3962. JitAdelete(this->tempAlloc, byteCodeRegisterUpwardExposed);
  3963. }
  3964. }
  3965. void
  3966. BackwardPass::TraceInstrUses(BasicBlock * block, IR::Instr* instr, bool isStart)
  3967. {
  3968. if ((!IsCollectionPass() || tag == Js::CaptureByteCodeRegUsePhase) && IsTraceEnabled() && Js::Configuration::Global.flags.Verbose)
  3969. {
  3970. const char16* tagName =
  3971. tag == Js::CaptureByteCodeRegUsePhase ? _u("CAPTURE BYTECODE REGISTER") : (
  3972. tag == Js::BackwardPhase ? _u("BACKWARD") : (
  3973. tag == Js::DeadStorePhase ? _u("DEADSTORE") :
  3974. _u("UNKNOWN")
  3975. ));
  3976. if (isStart)
  3977. {
  3978. Output::Print(_u(">>>>>>>>>>>>>>>>>>>>>> %s: Instr Start\n"), tagName);
  3979. }
  3980. else
  3981. {
  3982. Output::Print(_u("---------------------------------------\n"));
  3983. }
  3984. instr->Dump();
  3985. DumpBlockData(block, instr);
  3986. if (isStart)
  3987. {
  3988. Output::Print(_u("----------------------------------------\n"));
  3989. }
  3990. else
  3991. {
  3992. Output::Print(_u("<<<<<<<<<<<<<<<<<<<<<< %s: Instr End\n"), tagName);
  3993. }
  3994. }
  3995. }
  3996. void
  3997. BackwardPass::TraceBlockUses(BasicBlock * block, bool isStart)
  3998. {
  3999. if (this->IsTraceEnabled())
  4000. {
  4001. if (isStart)
  4002. {
  4003. Output::Print(_u("******************************* Before Process Block *******************************\n"));
  4004. }
  4005. else
  4006. {
  4007. Output::Print(_u("******************************* After Process Block *******************************n"));
  4008. }
  4009. block->DumpHeader();
  4010. DumpBlockData(block);
  4011. if (!this->IsCollectionPass() && !block->isDead)
  4012. {
  4013. DumpMarkTemp();
  4014. }
  4015. }
  4016. }
  4017. #endif
  4018. bool
  4019. BackwardPass::ProcessNoImplicitCallUses(IR::Instr *const instr)
  4020. {
  4021. Assert(instr);
  4022. if(instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  4023. {
  4024. return false;
  4025. }
  4026. Assert(tag == Js::DeadStorePhase);
  4027. Assert(!instr->GetDst());
  4028. Assert(instr->GetSrc1());
  4029. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsSymOpnd());
  4030. Assert(!instr->GetSrc2() || instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsSymOpnd());
  4031. if(IsCollectionPass())
  4032. {
  4033. return true;
  4034. }
  4035. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  4036. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  4037. {
  4038. IR::Opnd *const src = srcs[i];
  4039. IR::ArrayRegOpnd *arraySrc = nullptr;
  4040. Sym *sym = nullptr;
  4041. switch(src->GetKind())
  4042. {
  4043. case IR::OpndKindReg:
  4044. {
  4045. IR::RegOpnd *const regSrc = src->AsRegOpnd();
  4046. sym = regSrc->m_sym;
  4047. if(considerSymAsRealUseInNoImplicitCallUses && considerSymAsRealUseInNoImplicitCallUses == sym)
  4048. {
  4049. considerSymAsRealUseInNoImplicitCallUses = nullptr;
  4050. ProcessStackSymUse(sym->AsStackSym(), true);
  4051. }
  4052. if(regSrc->IsArrayRegOpnd())
  4053. {
  4054. arraySrc = regSrc->AsArrayRegOpnd();
  4055. }
  4056. break;
  4057. }
  4058. case IR::OpndKindSym:
  4059. sym = src->AsSymOpnd()->m_sym;
  4060. Assert(sym->IsPropertySym());
  4061. break;
  4062. default:
  4063. Assert(false);
  4064. __assume(false);
  4065. }
  4066. currentBlock->noImplicitCallUses->Set(sym->m_id);
  4067. const ValueType valueType(src->GetValueType());
  4068. if(valueType.IsArrayOrObjectWithArray())
  4069. {
  4070. if(valueType.HasNoMissingValues())
  4071. {
  4072. currentBlock->noImplicitCallNoMissingValuesUses->Set(sym->m_id);
  4073. }
  4074. if(!valueType.HasVarElements())
  4075. {
  4076. currentBlock->noImplicitCallNativeArrayUses->Set(sym->m_id);
  4077. }
  4078. if(arraySrc)
  4079. {
  4080. ProcessArrayRegOpndUse(instr, arraySrc);
  4081. }
  4082. }
  4083. }
  4084. if(!IsPrePass())
  4085. {
  4086. currentBlock->RemoveInstr(instr);
  4087. }
  4088. return true;
  4089. }
  4090. void
  4091. BackwardPass::ProcessNoImplicitCallDef(IR::Instr *const instr)
  4092. {
  4093. Assert(tag == Js::DeadStorePhase);
  4094. Assert(instr);
  4095. IR::Opnd *const dst = instr->GetDst();
  4096. if(!dst)
  4097. {
  4098. return;
  4099. }
  4100. Sym *dstSym;
  4101. switch(dst->GetKind())
  4102. {
  4103. case IR::OpndKindReg:
  4104. dstSym = dst->AsRegOpnd()->m_sym;
  4105. break;
  4106. case IR::OpndKindSym:
  4107. dstSym = dst->AsSymOpnd()->m_sym;
  4108. if(!dstSym->IsPropertySym())
  4109. {
  4110. return;
  4111. }
  4112. break;
  4113. default:
  4114. return;
  4115. }
  4116. if(!currentBlock->noImplicitCallUses->TestAndClear(dstSym->m_id))
  4117. {
  4118. Assert(!currentBlock->noImplicitCallNoMissingValuesUses->Test(dstSym->m_id));
  4119. Assert(!currentBlock->noImplicitCallNativeArrayUses->Test(dstSym->m_id));
  4120. Assert(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Test(dstSym->m_id));
  4121. Assert(!currentBlock->noImplicitCallArrayLengthSymUses->Test(dstSym->m_id));
  4122. return;
  4123. }
  4124. const bool transferNoMissingValuesUse = !!currentBlock->noImplicitCallNoMissingValuesUses->TestAndClear(dstSym->m_id);
  4125. const bool transferNativeArrayUse = !!currentBlock->noImplicitCallNativeArrayUses->TestAndClear(dstSym->m_id);
  4126. const bool transferJsArrayHeadSegmentSymUse =
  4127. !!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->TestAndClear(dstSym->m_id);
  4128. const bool transferArrayLengthSymUse = !!currentBlock->noImplicitCallArrayLengthSymUses->TestAndClear(dstSym->m_id);
  4129. IR::Opnd *const src = instr->GetSrc1();
  4130. // Stop attempting to transfer noImplicitCallUses symbol if the instr is not a transfer instr (based on the opcode's
  4131. // flags) or does not have the attributes to be a transfer instr (based on the existance of src and src2).
  4132. if(!src || (instr->GetSrc2() && !OpCodeAttr::NonIntTransfer(instr->m_opcode)))
  4133. {
  4134. return;
  4135. }
  4136. if(dst->IsRegOpnd() && src->IsRegOpnd())
  4137. {
  4138. if(!OpCodeAttr::NonIntTransfer(instr->m_opcode))
  4139. {
  4140. return;
  4141. }
  4142. }
  4143. else if(
  4144. !(
  4145. // LdFld or similar
  4146. (dst->IsRegOpnd() && src->IsSymOpnd() && src->AsSymOpnd()->m_sym->IsPropertySym()) ||
  4147. // StFld or similar. Don't transfer a field opnd from StFld into the reg opnd src unless the field's value type is
  4148. // definitely array or object with array, because only those value types require implicit calls to be disabled as
  4149. // long as they are live. Other definite value types only require implicit calls to be disabled as long as a live
  4150. // field holds the value, which is up to the StFld when going backwards.
  4151. (src->IsRegOpnd() && dst->GetValueType().IsArrayOrObjectWithArray())
  4152. ) ||
  4153. !instr->TransfersSrcValue())
  4154. {
  4155. return;
  4156. }
  4157. Sym *srcSym = nullptr;
  4158. switch(src->GetKind())
  4159. {
  4160. case IR::OpndKindReg:
  4161. srcSym = src->AsRegOpnd()->m_sym;
  4162. break;
  4163. case IR::OpndKindSym:
  4164. srcSym = src->AsSymOpnd()->m_sym;
  4165. Assert(srcSym->IsPropertySym());
  4166. break;
  4167. default:
  4168. Assert(false);
  4169. __assume(false);
  4170. }
  4171. currentBlock->noImplicitCallUses->Set(srcSym->m_id);
  4172. if(transferNoMissingValuesUse)
  4173. {
  4174. currentBlock->noImplicitCallNoMissingValuesUses->Set(srcSym->m_id);
  4175. }
  4176. if(transferNativeArrayUse)
  4177. {
  4178. currentBlock->noImplicitCallNativeArrayUses->Set(srcSym->m_id);
  4179. }
  4180. if(transferJsArrayHeadSegmentSymUse)
  4181. {
  4182. currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->Set(srcSym->m_id);
  4183. }
  4184. if(transferArrayLengthSymUse)
  4185. {
  4186. currentBlock->noImplicitCallArrayLengthSymUses->Set(srcSym->m_id);
  4187. }
  4188. }
  4189. template<class F>
  4190. IR::Opnd *
  4191. BackwardPass::FindNoImplicitCallUse(
  4192. IR::Instr *const instr,
  4193. StackSym *const sym,
  4194. const F IsCheckedUse,
  4195. IR::Instr * *const noImplicitCallUsesInstrRef)
  4196. {
  4197. IR::RegOpnd *const opnd = IR::RegOpnd::New(sym, sym->GetType(), instr->m_func);
  4198. IR::Opnd *const use = FindNoImplicitCallUse(instr, opnd, IsCheckedUse, noImplicitCallUsesInstrRef);
  4199. opnd->FreeInternal(instr->m_func);
  4200. return use;
  4201. }
  4202. template<class F>
  4203. IR::Opnd *
  4204. BackwardPass::FindNoImplicitCallUse(
  4205. IR::Instr *const instr,
  4206. IR::Opnd *const opnd,
  4207. const F IsCheckedUse,
  4208. IR::Instr * *const noImplicitCallUsesInstrRef)
  4209. {
  4210. Assert(instr);
  4211. Assert(instr->m_opcode != Js::OpCode::NoImplicitCallUses);
  4212. // Skip byte-code uses
  4213. IR::Instr *prevInstr = instr->m_prev;
  4214. while(
  4215. prevInstr &&
  4216. !prevInstr->IsLabelInstr() &&
  4217. (!prevInstr->IsRealInstr() || prevInstr->IsByteCodeUsesInstr()) &&
  4218. prevInstr->m_opcode != Js::OpCode::NoImplicitCallUses)
  4219. {
  4220. prevInstr = prevInstr->m_prev;
  4221. }
  4222. // Find the corresponding use in a NoImplicitCallUses instruction
  4223. for(; prevInstr && prevInstr->m_opcode == Js::OpCode::NoImplicitCallUses; prevInstr = prevInstr->m_prev)
  4224. {
  4225. IR::Opnd *const checkedSrcs[] = { prevInstr->GetSrc1(), prevInstr->GetSrc2() };
  4226. for(int i = 0; i < sizeof(checkedSrcs) / sizeof(checkedSrcs[0]) && checkedSrcs[i]; ++i)
  4227. {
  4228. IR::Opnd *const checkedSrc = checkedSrcs[i];
  4229. if(checkedSrc->IsEqual(opnd) && IsCheckedUse(checkedSrc))
  4230. {
  4231. if(noImplicitCallUsesInstrRef)
  4232. {
  4233. *noImplicitCallUsesInstrRef = prevInstr;
  4234. }
  4235. return checkedSrc;
  4236. }
  4237. }
  4238. }
  4239. if(noImplicitCallUsesInstrRef)
  4240. {
  4241. *noImplicitCallUsesInstrRef = nullptr;
  4242. }
  4243. return nullptr;
  4244. }
  4245. void
  4246. BackwardPass::ProcessArrayRegOpndUse(IR::Instr *const instr, IR::ArrayRegOpnd *const arrayRegOpnd)
  4247. {
  4248. Assert(tag == Js::DeadStorePhase);
  4249. Assert(!IsCollectionPass());
  4250. Assert(instr);
  4251. Assert(arrayRegOpnd);
  4252. if(!(arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym() || arrayRegOpnd->LengthSym()))
  4253. {
  4254. return;
  4255. }
  4256. const ValueType arrayValueType(arrayRegOpnd->GetValueType());
  4257. const bool isJsArray = !arrayValueType.IsLikelyTypedArray();
  4258. Assert(isJsArray == arrayValueType.IsArrayOrObjectWithArray());
  4259. Assert(!isJsArray == arrayValueType.IsOptimizedTypedArray());
  4260. BasicBlock *const block = currentBlock;
  4261. if(!IsPrePass() &&
  4262. (arrayRegOpnd->HeadSegmentSym() || arrayRegOpnd->HeadSegmentLengthSym()) &&
  4263. (!isJsArray || instr->m_opcode != Js::OpCode::NoImplicitCallUses))
  4264. {
  4265. bool headSegmentIsLoadedButUnused =
  4266. instr->loadedArrayHeadSegment &&
  4267. arrayRegOpnd->HeadSegmentSym() &&
  4268. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentSym()->m_id);
  4269. const bool headSegmentLengthIsLoadedButUnused =
  4270. instr->loadedArrayHeadSegmentLength &&
  4271. arrayRegOpnd->HeadSegmentLengthSym() &&
  4272. !block->upwardExposedUses->Test(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4273. if(headSegmentLengthIsLoadedButUnused && instr->extractedUpperBoundCheckWithoutHoisting)
  4274. {
  4275. // Find the upper bound check (index[src1] <= headSegmentLength[src2] + offset[dst])
  4276. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  4277. Assert(upperBoundCheck && upperBoundCheck != instr);
  4278. Assert(upperBoundCheck->GetSrc2()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  4279. // Find the head segment length load
  4280. IR::Instr *headSegmentLengthLoad = this->globOpt->FindArraySegmentLoadInstr(upperBoundCheck);
  4281. Assert(headSegmentLengthLoad->GetDst()->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym());
  4282. Assert(
  4283. headSegmentLengthLoad->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->m_sym ==
  4284. (isJsArray ? arrayRegOpnd->HeadSegmentSym() : arrayRegOpnd->m_sym));
  4285. // Fold the head segment length load into the upper bound check. Keep the load instruction there with a Nop so that
  4286. // the head segment length sym can be marked as unused before the Nop. The lowerer will remove it.
  4287. upperBoundCheck->ReplaceSrc2(headSegmentLengthLoad->UnlinkSrc1());
  4288. headSegmentLengthLoad->m_opcode = Js::OpCode::Nop;
  4289. if(isJsArray)
  4290. {
  4291. // The head segment length is on the head segment, so the bound check now uses the head segment sym
  4292. headSegmentIsLoadedButUnused = false;
  4293. }
  4294. }
  4295. if(headSegmentIsLoadedButUnused || headSegmentLengthIsLoadedButUnused)
  4296. {
  4297. // Check if the head segment / head segment length are being loaded here. If so, remove them and let the fast
  4298. // path load them since it does a better job.
  4299. IR::ArrayRegOpnd *noImplicitCallArrayUse = nullptr;
  4300. if(isJsArray)
  4301. {
  4302. IR::Opnd *const use =
  4303. FindNoImplicitCallUse(
  4304. instr,
  4305. arrayRegOpnd,
  4306. [&](IR::Opnd *const checkedSrc) -> bool
  4307. {
  4308. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4309. if(!checkedSrcValueType.IsLikelyObject() ||
  4310. checkedSrcValueType.GetObjectType() != arrayValueType.GetObjectType())
  4311. {
  4312. return false;
  4313. }
  4314. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  4315. if(!checkedRegSrc->IsArrayRegOpnd())
  4316. {
  4317. return false;
  4318. }
  4319. IR::ArrayRegOpnd *const checkedArraySrc = checkedRegSrc->AsArrayRegOpnd();
  4320. if(headSegmentIsLoadedButUnused &&
  4321. checkedArraySrc->HeadSegmentSym() != arrayRegOpnd->HeadSegmentSym())
  4322. {
  4323. return false;
  4324. }
  4325. if(headSegmentLengthIsLoadedButUnused &&
  4326. checkedArraySrc->HeadSegmentLengthSym() != arrayRegOpnd->HeadSegmentLengthSym())
  4327. {
  4328. return false;
  4329. }
  4330. return true;
  4331. });
  4332. if(use)
  4333. {
  4334. noImplicitCallArrayUse = use->AsRegOpnd()->AsArrayRegOpnd();
  4335. }
  4336. }
  4337. else if(headSegmentLengthIsLoadedButUnused)
  4338. {
  4339. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web
  4340. // worker, so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there
  4341. // are no forward uses of the head segment length sym, to allow removing the extracted head segment length
  4342. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  4343. // removed.
  4344. IR::Instr *noImplicitCallUsesInstr;
  4345. IR::Opnd *const use =
  4346. FindNoImplicitCallUse(
  4347. instr,
  4348. arrayRegOpnd->HeadSegmentLengthSym(),
  4349. [&](IR::Opnd *const checkedSrc) -> bool
  4350. {
  4351. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  4352. },
  4353. &noImplicitCallUsesInstr);
  4354. if(use)
  4355. {
  4356. Assert(noImplicitCallUsesInstr);
  4357. Assert(!noImplicitCallUsesInstr->GetDst());
  4358. Assert(noImplicitCallUsesInstr->GetSrc1());
  4359. if(use == noImplicitCallUsesInstr->GetSrc1())
  4360. {
  4361. if(noImplicitCallUsesInstr->GetSrc2())
  4362. {
  4363. noImplicitCallUsesInstr->ReplaceSrc1(noImplicitCallUsesInstr->UnlinkSrc2());
  4364. }
  4365. else
  4366. {
  4367. noImplicitCallUsesInstr->FreeSrc1();
  4368. noImplicitCallUsesInstr->m_opcode = Js::OpCode::Nop;
  4369. }
  4370. }
  4371. else
  4372. {
  4373. Assert(use == noImplicitCallUsesInstr->GetSrc2());
  4374. noImplicitCallUsesInstr->FreeSrc2();
  4375. }
  4376. }
  4377. }
  4378. if(headSegmentIsLoadedButUnused &&
  4379. (!isJsArray || !arrayRegOpnd->HeadSegmentLengthSym() || headSegmentLengthIsLoadedButUnused))
  4380. {
  4381. // For JS arrays, the head segment length load is dependent on the head segment. So, only remove the head
  4382. // segment load if the head segment length load can also be removed.
  4383. arrayRegOpnd->RemoveHeadSegmentSym();
  4384. instr->loadedArrayHeadSegment = false;
  4385. if(noImplicitCallArrayUse)
  4386. {
  4387. noImplicitCallArrayUse->RemoveHeadSegmentSym();
  4388. }
  4389. }
  4390. if(headSegmentLengthIsLoadedButUnused)
  4391. {
  4392. arrayRegOpnd->RemoveHeadSegmentLengthSym();
  4393. instr->loadedArrayHeadSegmentLength = false;
  4394. if(noImplicitCallArrayUse)
  4395. {
  4396. noImplicitCallArrayUse->RemoveHeadSegmentLengthSym();
  4397. }
  4398. }
  4399. }
  4400. }
  4401. if(isJsArray && instr->m_opcode != Js::OpCode::NoImplicitCallUses)
  4402. {
  4403. // Only uses in NoImplicitCallUses instructions are counted toward liveness
  4404. return;
  4405. }
  4406. // Treat dependent syms as uses. For JS arrays, only uses in NoImplicitCallUses count because only then the assumptions made
  4407. // on the dependent syms are guaranteed to be valid. Similarly for typed arrays, a head segment length sym use counts toward
  4408. // liveness only in a NoImplicitCallUses instruction.
  4409. if(arrayRegOpnd->HeadSegmentSym())
  4410. {
  4411. ProcessStackSymUse(arrayRegOpnd->HeadSegmentSym(), true);
  4412. if(isJsArray)
  4413. {
  4414. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  4415. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentSym()->m_id);
  4416. }
  4417. }
  4418. if(arrayRegOpnd->HeadSegmentLengthSym())
  4419. {
  4420. if(isJsArray)
  4421. {
  4422. ProcessStackSymUse(arrayRegOpnd->HeadSegmentLengthSym(), true);
  4423. block->noImplicitCallUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4424. block->noImplicitCallJsArrayHeadSegmentSymUses->Set(arrayRegOpnd->HeadSegmentLengthSym()->m_id);
  4425. }
  4426. else
  4427. {
  4428. // ProcessNoImplicitCallUses automatically marks JS array reg opnds and their corresponding syms as live. A typed
  4429. // array's head segment length sym also needs to be marked as live at its use in the NoImplicitCallUses instruction,
  4430. // but it is just in a reg opnd. Flag the opnd to have the sym be marked as live when that instruction is processed.
  4431. Assert(!considerSymAsRealUseInNoImplicitCallUses);
  4432. IR::Opnd *const use =
  4433. FindNoImplicitCallUse(
  4434. instr,
  4435. arrayRegOpnd->HeadSegmentLengthSym(),
  4436. [&](IR::Opnd *const checkedSrc) -> bool
  4437. {
  4438. return checkedSrc->AsRegOpnd()->m_sym == arrayRegOpnd->HeadSegmentLengthSym();
  4439. });
  4440. if(use)
  4441. {
  4442. considerSymAsRealUseInNoImplicitCallUses = arrayRegOpnd->HeadSegmentLengthSym();
  4443. }
  4444. }
  4445. }
  4446. StackSym *const lengthSym = arrayRegOpnd->LengthSym();
  4447. if(lengthSym && lengthSym != arrayRegOpnd->HeadSegmentLengthSym())
  4448. {
  4449. ProcessStackSymUse(lengthSym, true);
  4450. Assert(arrayValueType.IsArray());
  4451. block->noImplicitCallUses->Set(lengthSym->m_id);
  4452. block->noImplicitCallArrayLengthSymUses->Set(lengthSym->m_id);
  4453. }
  4454. }
  4455. void
  4456. BackwardPass::ProcessNewScObject(IR::Instr* instr)
  4457. {
  4458. if (this->tag != Js::DeadStorePhase || IsCollectionPass())
  4459. {
  4460. return;
  4461. }
  4462. if (!instr->IsNewScObjectInstr())
  4463. {
  4464. return;
  4465. }
  4466. // The instruction could have a lazy bailout associated with it, which might get cleared
  4467. // later, so we make sure that we only process instructions with the right bailout kind.
  4468. if (instr->HasBailOutInfo() && instr->GetBailOutKindNoBits() == IR::BailOutFailedCtorGuardCheck)
  4469. {
  4470. Assert(instr->IsProfiledInstr());
  4471. Assert(instr->GetDst()->IsRegOpnd());
  4472. BasicBlock * block = this->currentBlock;
  4473. StackSym* objSym = instr->GetDst()->AsRegOpnd()->GetStackSym();
  4474. if (block->upwardExposedUses->Test(objSym->m_id))
  4475. {
  4476. // If the object created here is used downstream, let's capture any property operations we must protect.
  4477. Assert(instr->GetDst()->AsRegOpnd()->GetStackSym()->HasObjectTypeSym());
  4478. JITTimeConstructorCache* ctorCache = instr->m_func->GetConstructorCache(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  4479. if (block->stackSymToFinalType != nullptr)
  4480. {
  4481. // NewScObject is the origin of the object pointer. If we have a final type in hand, do the
  4482. // transition here.
  4483. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(objSym->m_id);
  4484. if (pBucket &&
  4485. pBucket->GetInitialType() != nullptr &&
  4486. pBucket->GetFinalType() != pBucket->GetInitialType())
  4487. {
  4488. Assert(pBucket->GetInitialType() == ctorCache->GetType());
  4489. if (!this->IsPrePass())
  4490. {
  4491. this->InsertTypeTransition(instr->m_next, objSym, pBucket, block->upwardExposedUses);
  4492. }
  4493. #if DBG
  4494. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  4495. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  4496. {
  4497. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  4498. }
  4499. pBucket->SetInitialType(nullptr);
  4500. pBucket->SetFinalType(nullptr);
  4501. #else
  4502. block->stackSymToFinalType->Clear(objSym->m_id);
  4503. #endif
  4504. }
  4505. }
  4506. if (block->stackSymToGuardedProperties != nullptr)
  4507. {
  4508. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties->Get(objSym->m_id);
  4509. if (bucket != nullptr)
  4510. {
  4511. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  4512. if (guardedPropertyOps != nullptr)
  4513. {
  4514. ctorCache->EnsureGuardedPropOps(this->func->m_alloc);
  4515. ctorCache->AddGuardedPropOps(guardedPropertyOps);
  4516. bucket->SetGuardedPropertyOps(nullptr);
  4517. JitAdelete(this->tempAlloc, guardedPropertyOps);
  4518. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  4519. }
  4520. }
  4521. }
  4522. }
  4523. else
  4524. {
  4525. // If the object is not used downstream, let's remove the bailout and let the lowerer emit a fast path along with
  4526. // the fallback on helper, if the ctor cache ever became invalid.
  4527. instr->ClearBailOutInfo();
  4528. if (preOpBailOutInstrToProcess == instr)
  4529. {
  4530. preOpBailOutInstrToProcess = nullptr;
  4531. }
  4532. #if DBG
  4533. // We're creating a brand new object here, so no type check upstream could protect any properties of this
  4534. // object. Let's make sure we don't have any left to protect.
  4535. ObjTypeGuardBucket* bucket = block->stackSymToGuardedProperties != nullptr ?
  4536. block->stackSymToGuardedProperties->Get(objSym->m_id) : nullptr;
  4537. Assert(bucket == nullptr || bucket->GetGuardedPropertyOps()->IsEmpty());
  4538. #endif
  4539. }
  4540. }
  4541. }
  4542. void
  4543. BackwardPass::UpdateArrayValueTypes(IR::Instr *const instr, IR::Opnd *origOpnd)
  4544. {
  4545. Assert(tag == Js::DeadStorePhase);
  4546. Assert(!IsPrePass());
  4547. Assert(instr);
  4548. if(!origOpnd)
  4549. {
  4550. return;
  4551. }
  4552. IR::Instr *opndOwnerInstr = instr;
  4553. switch(instr->m_opcode)
  4554. {
  4555. case Js::OpCode::StElemC:
  4556. case Js::OpCode::StArrSegElemC:
  4557. // These may not be fixed if we are unsure about the type of the array they're storing to
  4558. // (because it relies on profile data) and we weren't able to hoist the array check.
  4559. return;
  4560. }
  4561. Sym *sym;
  4562. IR::Opnd* opnd = origOpnd;
  4563. IR::ArrayRegOpnd *arrayOpnd;
  4564. switch(opnd->GetKind())
  4565. {
  4566. case IR::OpndKindIndir:
  4567. opnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  4568. // fall-through
  4569. case IR::OpndKindReg:
  4570. {
  4571. IR::RegOpnd *const regOpnd = opnd->AsRegOpnd();
  4572. sym = regOpnd->m_sym;
  4573. arrayOpnd = regOpnd->IsArrayRegOpnd() ? regOpnd->AsArrayRegOpnd() : nullptr;
  4574. break;
  4575. }
  4576. case IR::OpndKindSym:
  4577. sym = opnd->AsSymOpnd()->m_sym;
  4578. if(!sym->IsPropertySym())
  4579. {
  4580. return;
  4581. }
  4582. arrayOpnd = nullptr;
  4583. break;
  4584. default:
  4585. return;
  4586. }
  4587. const ValueType valueType(opnd->GetValueType());
  4588. if(!valueType.IsAnyOptimizedArray())
  4589. {
  4590. return;
  4591. }
  4592. const bool isJsArray = valueType.IsArrayOrObjectWithArray();
  4593. Assert(!isJsArray == valueType.IsOptimizedTypedArray());
  4594. const bool noForwardImplicitCallUses = currentBlock->noImplicitCallUses->IsEmpty();
  4595. bool changeArray = isJsArray && !opnd->IsValueTypeFixed() && noForwardImplicitCallUses;
  4596. bool changeNativeArray =
  4597. isJsArray &&
  4598. !opnd->IsValueTypeFixed() &&
  4599. !valueType.HasVarElements() &&
  4600. currentBlock->noImplicitCallNativeArrayUses->IsEmpty();
  4601. bool changeNoMissingValues =
  4602. isJsArray &&
  4603. !opnd->IsValueTypeFixed() &&
  4604. valueType.HasNoMissingValues() &&
  4605. currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty();
  4606. const bool noForwardJsArrayHeadSegmentSymUses = currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty();
  4607. bool removeHeadSegmentSym = isJsArray && arrayOpnd && arrayOpnd->HeadSegmentSym() && noForwardJsArrayHeadSegmentSymUses;
  4608. bool removeHeadSegmentLengthSym =
  4609. arrayOpnd &&
  4610. arrayOpnd->HeadSegmentLengthSym() &&
  4611. (isJsArray ? noForwardJsArrayHeadSegmentSymUses : noForwardImplicitCallUses);
  4612. Assert(!isJsArray || !arrayOpnd || !arrayOpnd->LengthSym() || valueType.IsArray());
  4613. bool removeLengthSym =
  4614. isJsArray &&
  4615. arrayOpnd &&
  4616. arrayOpnd->LengthSym() &&
  4617. currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty();
  4618. if(!(changeArray || changeNoMissingValues || changeNativeArray || removeHeadSegmentSym || removeHeadSegmentLengthSym))
  4619. {
  4620. return;
  4621. }
  4622. // We have a definitely-array value type for the base, but either implicit calls are not currently being disabled for
  4623. // legally using the value type as a definite array, or we are not currently bailing out upon creating a missing value
  4624. // for legally using the value type as a definite array with no missing values.
  4625. // For source opnds, ensure that a NoImplicitCallUses immediately precedes this instruction. Otherwise, convert the value
  4626. // type to an appropriate version so that the lowerer doesn't incorrectly treat it as it says.
  4627. if(opnd != opndOwnerInstr->GetDst())
  4628. {
  4629. if(isJsArray)
  4630. {
  4631. IR::Opnd *const checkedSrc =
  4632. FindNoImplicitCallUse(
  4633. instr,
  4634. opnd,
  4635. [&](IR::Opnd *const checkedSrc) -> bool
  4636. {
  4637. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4638. return
  4639. checkedSrcValueType.IsLikelyObject() &&
  4640. checkedSrcValueType.GetObjectType() == valueType.GetObjectType();
  4641. });
  4642. if(checkedSrc)
  4643. {
  4644. // Implicit calls will be disabled to the point immediately before this instruction
  4645. changeArray = false;
  4646. const ValueType checkedSrcValueType(checkedSrc->GetValueType());
  4647. if(changeNativeArray &&
  4648. !checkedSrcValueType.HasVarElements() &&
  4649. checkedSrcValueType.HasIntElements() == valueType.HasIntElements())
  4650. {
  4651. // If necessary, instructions before this will bail out on converting a native array
  4652. changeNativeArray = false;
  4653. }
  4654. if(changeNoMissingValues && checkedSrcValueType.HasNoMissingValues())
  4655. {
  4656. // If necessary, instructions before this will bail out on creating a missing value
  4657. changeNoMissingValues = false;
  4658. }
  4659. if((removeHeadSegmentSym || removeHeadSegmentLengthSym || removeLengthSym) && checkedSrc->IsRegOpnd())
  4660. {
  4661. IR::RegOpnd *const checkedRegSrc = checkedSrc->AsRegOpnd();
  4662. if(checkedRegSrc->IsArrayRegOpnd())
  4663. {
  4664. IR::ArrayRegOpnd *const checkedArraySrc = checkedSrc->AsRegOpnd()->AsArrayRegOpnd();
  4665. if(removeHeadSegmentSym && checkedArraySrc->HeadSegmentSym() == arrayOpnd->HeadSegmentSym())
  4666. {
  4667. // If necessary, instructions before this will bail out upon invalidating head segment sym
  4668. removeHeadSegmentSym = false;
  4669. }
  4670. if(removeHeadSegmentLengthSym &&
  4671. checkedArraySrc->HeadSegmentLengthSym() == arrayOpnd->HeadSegmentLengthSym())
  4672. {
  4673. // If necessary, instructions before this will bail out upon invalidating head segment length sym
  4674. removeHeadSegmentLengthSym = false;
  4675. }
  4676. if(removeLengthSym && checkedArraySrc->LengthSym() == arrayOpnd->LengthSym())
  4677. {
  4678. // If necessary, instructions before this will bail out upon invalidating a length sym
  4679. removeLengthSym = false;
  4680. }
  4681. }
  4682. }
  4683. }
  4684. }
  4685. else
  4686. {
  4687. Assert(removeHeadSegmentLengthSym);
  4688. // A typed array's head segment length may be zeroed when the typed array's buffer is transferred to a web worker,
  4689. // so the head segment length sym use is included in a NoImplicitCallUses instruction. Since there are no forward
  4690. // uses of any head segment length syms, to allow removing the extracted head segment length
  4691. // load, the corresponding head segment length sym use in the NoImplicitCallUses instruction must also be
  4692. // removed.
  4693. IR::Opnd *const use =
  4694. FindNoImplicitCallUse(
  4695. instr,
  4696. arrayOpnd->HeadSegmentLengthSym(),
  4697. [&](IR::Opnd *const checkedSrc) -> bool
  4698. {
  4699. return checkedSrc->AsRegOpnd()->m_sym == arrayOpnd->HeadSegmentLengthSym();
  4700. });
  4701. if(use)
  4702. {
  4703. // Implicit calls will be disabled to the point immediately before this instruction
  4704. removeHeadSegmentLengthSym = false;
  4705. }
  4706. }
  4707. }
  4708. if(changeArray || changeNativeArray)
  4709. {
  4710. if(arrayOpnd)
  4711. {
  4712. opnd = arrayOpnd->CopyAsRegOpnd(opndOwnerInstr->m_func);
  4713. if (origOpnd->IsIndirOpnd())
  4714. {
  4715. origOpnd->AsIndirOpnd()->ReplaceBaseOpnd(opnd->AsRegOpnd());
  4716. }
  4717. else
  4718. {
  4719. opndOwnerInstr->Replace(arrayOpnd, opnd);
  4720. }
  4721. arrayOpnd = nullptr;
  4722. }
  4723. opnd->SetValueType(valueType.ToLikely());
  4724. }
  4725. else
  4726. {
  4727. if(changeNoMissingValues)
  4728. {
  4729. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  4730. }
  4731. if(removeHeadSegmentSym)
  4732. {
  4733. Assert(arrayOpnd);
  4734. arrayOpnd->RemoveHeadSegmentSym();
  4735. }
  4736. if(removeHeadSegmentLengthSym)
  4737. {
  4738. Assert(arrayOpnd);
  4739. arrayOpnd->RemoveHeadSegmentLengthSym();
  4740. }
  4741. if(removeLengthSym)
  4742. {
  4743. Assert(arrayOpnd);
  4744. arrayOpnd->RemoveLengthSym();
  4745. }
  4746. }
  4747. }
  4748. void
  4749. BackwardPass::UpdateArrayBailOutKind(IR::Instr *const instr)
  4750. {
  4751. Assert(!IsPrePass());
  4752. Assert(instr);
  4753. Assert(instr->HasBailOutInfo());
  4754. if ((instr->m_opcode != Js::OpCode::StElemI_A && instr->m_opcode != Js::OpCode::StElemI_A_Strict &&
  4755. instr->m_opcode != Js::OpCode::Memcopy && instr->m_opcode != Js::OpCode::Memset) ||
  4756. !instr->GetDst()->IsIndirOpnd())
  4757. {
  4758. return;
  4759. }
  4760. IR::RegOpnd *const baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  4761. const ValueType baseValueType(baseOpnd->GetValueType());
  4762. if(baseValueType.IsNotArrayOrObjectWithArray())
  4763. {
  4764. return;
  4765. }
  4766. instr->GetDst()->AsIndirOpnd()->AllowConversion(true);
  4767. IR::BailOutKind includeBailOutKinds = IR::BailOutInvalid;
  4768. if (!baseValueType.IsNotNativeArray() &&
  4769. !currentBlock->noImplicitCallNativeArrayUses->IsEmpty() &&
  4770. !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  4771. {
  4772. // There is an upwards-exposed use of a native array. Since the array referenced by this instruction can be aliased,
  4773. // this instruction needs to bail out if it converts the native array even if this array specifically is not
  4774. // upwards-exposed.
  4775. if (!baseValueType.IsLikelyNativeArray() || instr->GetSrc1()->IsVar())
  4776. {
  4777. includeBailOutKinds |= IR::BailOutConvertedNativeArray;
  4778. }
  4779. else
  4780. {
  4781. // We are assuming that array conversion is impossible here, so make sure we execute code that fails if conversion does happen.
  4782. instr->GetDst()->AsIndirOpnd()->AllowConversion(false);
  4783. }
  4784. }
  4785. if(baseOpnd->IsArrayRegOpnd() && baseOpnd->AsArrayRegOpnd()->EliminatedUpperBoundCheck())
  4786. {
  4787. if(instr->extractedUpperBoundCheckWithoutHoisting && !currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  4788. {
  4789. // See comment below regarding head segment invalidation. A failed upper bound check usually means that it will
  4790. // invalidate the head segment length, so change the bailout kind on the upper bound check to have it bail out for
  4791. // the right reason. Even though the store may actually occur in a non-head segment, which would not invalidate the
  4792. // head segment or length, any store outside the head segment bounds causes head segment load elimination to be
  4793. // turned off for the store, because the segment structure of the array is not guaranteed to be the same every time.
  4794. IR::Instr *upperBoundCheck = this->globOpt->FindUpperBoundsCheckInstr(instr);
  4795. Assert(upperBoundCheck && upperBoundCheck != instr);
  4796. if(upperBoundCheck->GetBailOutKind() == IR::BailOutOnArrayAccessHelperCall)
  4797. {
  4798. upperBoundCheck->SetBailOutKind(IR::BailOutOnInvalidatedArrayHeadSegment);
  4799. }
  4800. else
  4801. {
  4802. Assert(upperBoundCheck->GetBailOutKind() == IR::BailOutOnFailedHoistedBoundCheck);
  4803. }
  4804. }
  4805. }
  4806. else
  4807. {
  4808. if(!currentBlock->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty())
  4809. {
  4810. // There is an upwards-exposed use of a segment sym. Since the head segment syms referenced by this instruction can
  4811. // be aliased, this instruction needs to bail out if it changes the segment syms it references even if the ones it
  4812. // references specifically are not upwards-exposed. This bailout kind also guarantees that this element store will
  4813. // not create missing values.
  4814. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayHeadSegment;
  4815. }
  4816. else if(
  4817. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  4818. !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall))
  4819. {
  4820. // There is an upwards-exposed use of an array with no missing values. Since the array referenced by this
  4821. // instruction can be aliased, this instruction needs to bail out if it creates a missing value in the array even if
  4822. // this array specifically is not upwards-exposed.
  4823. includeBailOutKinds |= IR::BailOutOnMissingValue;
  4824. }
  4825. if(!baseValueType.IsNotArray() && !currentBlock->noImplicitCallArrayLengthSymUses->IsEmpty())
  4826. {
  4827. // There is an upwards-exposed use of a length sym. Since the length sym referenced by this instruction can be
  4828. // aliased, this instruction needs to bail out if it changes the length sym it references even if the ones it
  4829. // references specifically are not upwards-exposed.
  4830. includeBailOutKinds |= IR::BailOutOnInvalidatedArrayLength;
  4831. }
  4832. }
  4833. if(!includeBailOutKinds)
  4834. {
  4835. return;
  4836. }
  4837. Assert(!(includeBailOutKinds & ~IR::BailOutKindBits));
  4838. instr->SetBailOutKind(instr->GetBailOutKind() | includeBailOutKinds);
  4839. }
  4840. bool
  4841. BackwardPass::ProcessStackSymUse(StackSym * stackSym, BOOLEAN isNonByteCodeUse)
  4842. {
  4843. BasicBlock * block = this->currentBlock;
  4844. if (this->DoByteCodeUpwardExposedUsed())
  4845. {
  4846. if (!isNonByteCodeUse && stackSym->HasByteCodeRegSlot())
  4847. {
  4848. // Always track the sym use on the var sym.
  4849. StackSym * byteCodeUseSym = stackSym;
  4850. if (byteCodeUseSym->IsTypeSpec())
  4851. {
  4852. // It has to have a var version for byte code regs
  4853. byteCodeUseSym = byteCodeUseSym->GetVarEquivSym(nullptr);
  4854. }
  4855. block->byteCodeUpwardExposedUsed->Set(byteCodeUseSym->m_id);
  4856. #if DBG
  4857. // We can only track first level function stack syms right now
  4858. if (byteCodeUseSym->GetByteCodeFunc() == this->func)
  4859. {
  4860. Js::RegSlot byteCodeRegSlot = byteCodeUseSym->GetByteCodeRegSlot();
  4861. if (block->byteCodeRestoreSyms[byteCodeRegSlot] != byteCodeUseSym)
  4862. {
  4863. AssertMsg(block->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  4864. "Can't have two active lifetime for the same byte code register");
  4865. block->byteCodeRestoreSyms[byteCodeRegSlot] = byteCodeUseSym;
  4866. }
  4867. }
  4868. #endif
  4869. }
  4870. }
  4871. if(IsCollectionPass())
  4872. {
  4873. return true;
  4874. }
  4875. if (this->DoMarkTempNumbers())
  4876. {
  4877. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  4878. block->tempNumberTracker->ProcessUse(stackSym, this);
  4879. }
  4880. if (this->DoMarkTempObjects())
  4881. {
  4882. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  4883. block->tempObjectTracker->ProcessUse(stackSym, this);
  4884. }
  4885. #if DBG
  4886. if (this->DoMarkTempObjectVerify())
  4887. {
  4888. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  4889. block->tempObjectVerifyTracker->ProcessUse(stackSym, this);
  4890. }
  4891. #endif
  4892. return !!block->upwardExposedUses->TestAndSet(stackSym->m_id);
  4893. }
  4894. bool
  4895. BackwardPass::ProcessSymUse(Sym * sym, bool isRegOpndUse, BOOLEAN isNonByteCodeUse)
  4896. {
  4897. BasicBlock * block = this->currentBlock;
  4898. if (CanDeadStoreInstrForScopeObjRemoval(sym))
  4899. {
  4900. return false;
  4901. }
  4902. if (sym->IsPropertySym())
  4903. {
  4904. PropertySym * propertySym = sym->AsPropertySym();
  4905. ProcessStackSymUse(propertySym->m_stackSym, isNonByteCodeUse);
  4906. if(IsCollectionPass())
  4907. {
  4908. return true;
  4909. }
  4910. if (this->DoDeadStoreSlots())
  4911. {
  4912. block->slotDeadStoreCandidates->Clear(propertySym->m_id);
  4913. }
  4914. if (tag == Js::BackwardPhase)
  4915. {
  4916. // Backward phase tracks liveness of fields to tell GlobOpt where we may need bailout.
  4917. return this->ProcessPropertySymUse(propertySym);
  4918. }
  4919. else
  4920. {
  4921. // Dead-store phase tracks copy propped syms, so it only cares about ByteCodeUses we inserted,
  4922. // not live fields.
  4923. return false;
  4924. }
  4925. }
  4926. return ProcessStackSymUse(sym->AsStackSym(), isNonByteCodeUse);
  4927. }
  4928. bool
  4929. BackwardPass::MayPropertyBeWrittenTo(Js::PropertyId propertyId)
  4930. {
  4931. return this->func->anyPropertyMayBeWrittenTo ||
  4932. (this->func->propertiesWrittenTo != nullptr && this->func->propertiesWrittenTo->ContainsKey(propertyId));
  4933. }
  4934. void
  4935. BackwardPass::ProcessPropertySymOpndUse(IR::PropertySymOpnd * opnd)
  4936. {
  4937. // If this operand doesn't participate in the type check sequence it's a pass-through.
  4938. // We will not set any bits on the operand and we will ignore them when lowering.
  4939. if (!opnd->IsTypeCheckSeqCandidate())
  4940. {
  4941. return;
  4942. }
  4943. AssertMsg(opnd->HasObjectTypeSym(), "Optimized property sym operand without a type sym?");
  4944. SymID typeSymId = opnd->GetObjectTypeSym()->m_id;
  4945. BasicBlock * block = this->currentBlock;
  4946. if (this->tag == Js::BackwardPhase)
  4947. {
  4948. // In the backward phase, we have no availability info, and we're trying to see
  4949. // where there are live fields so we can decide where to put bailouts.
  4950. Assert(opnd->MayNeedTypeCheckProtection());
  4951. block->upwardExposedFields->Set(typeSymId);
  4952. TrackObjTypeSpecWriteGuards(opnd, block);
  4953. }
  4954. else
  4955. {
  4956. // In the dead-store phase, we're trying to see where the lowered code needs to make sure to check
  4957. // types for downstream load/stores. We're also setting up the upward-exposed uses at loop headers
  4958. // so register allocation will be correct.
  4959. Assert(opnd->MayNeedTypeCheckProtection());
  4960. const bool isStore = opnd == this->currentInstr->GetDst();
  4961. // Note that we don't touch upwardExposedUses here.
  4962. if (opnd->IsTypeAvailable())
  4963. {
  4964. opnd->SetTypeDead(!block->upwardExposedFields->TestAndSet(typeSymId));
  4965. if (opnd->IsTypeChecked() && opnd->IsObjectHeaderInlined())
  4966. {
  4967. // The object's type must not change in a way that changes the layout.
  4968. // If we see a StFld with a type check bailout between here and the type check that guards this
  4969. // property, we must not dead-store the StFld's type check bailout, even if that operand's type appears
  4970. // dead, because that object may alias this one.
  4971. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  4972. if (bv == nullptr)
  4973. {
  4974. bv = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  4975. block->typesNeedingKnownObjectLayout = bv;
  4976. }
  4977. bv->Set(typeSymId);
  4978. }
  4979. }
  4980. else
  4981. {
  4982. opnd->SetTypeDead(
  4983. !block->upwardExposedFields->TestAndClear(typeSymId) &&
  4984. (
  4985. // Don't set the type dead if this is a store that may change the layout in a way that invalidates
  4986. // optimized load/stores downstream. Leave it non-dead in that case so the type check bailout
  4987. // is preserved and so that Lower will generate the bailout properly.
  4988. !isStore ||
  4989. !block->typesNeedingKnownObjectLayout ||
  4990. block->typesNeedingKnownObjectLayout->IsEmpty()
  4991. )
  4992. );
  4993. BVSparse<JitArenaAllocator>* bv = block->typesNeedingKnownObjectLayout;
  4994. if (bv != nullptr)
  4995. {
  4996. bv->Clear(typeSymId);
  4997. }
  4998. }
  4999. bool mayNeedTypeTransition = true;
  5000. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  5001. {
  5002. mayNeedTypeTransition = !isStore;
  5003. }
  5004. if (mayNeedTypeTransition &&
  5005. !this->IsPrePass() &&
  5006. !this->currentInstr->HasTypeCheckBailOut() &&
  5007. (opnd->NeedsPrimaryTypeCheck() ||
  5008. opnd->NeedsLocalTypeCheck() ||
  5009. opnd->NeedsLoadFromProtoTypeCheck()))
  5010. {
  5011. // This is a "checked" opnd that nevertheless will have some kind of type check generated for it.
  5012. // (Typical case is a load from prototype with no upstream guard.)
  5013. // If the type check fails, we will call a helper, which will require that the type be correct here.
  5014. // Final type can't be pushed up past this point. Do whatever type transition is required.
  5015. if (block->stackSymToFinalType != nullptr)
  5016. {
  5017. StackSym *baseSym = opnd->GetObjectSym();
  5018. AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(baseSym->m_id);
  5019. if (pBucket &&
  5020. pBucket->GetFinalType() != nullptr &&
  5021. pBucket->GetFinalType() != pBucket->GetInitialType())
  5022. {
  5023. this->InsertTypeTransition(this->currentInstr->m_next, baseSym, pBucket, block->upwardExposedUses);
  5024. pBucket->SetFinalType(pBucket->GetInitialType());
  5025. }
  5026. }
  5027. }
  5028. if (!opnd->HasTypeMismatch() && func->DoGlobOpt())
  5029. {
  5030. // Do this after the above code, as the value of the final type may change there.
  5031. TrackAddPropertyTypes(opnd, block);
  5032. }
  5033. TrackObjTypeSpecProperties(opnd, block);
  5034. TrackObjTypeSpecWriteGuards(opnd, block);
  5035. }
  5036. }
  5037. void
  5038. BackwardPass::TrackObjTypeSpecProperties(IR::PropertySymOpnd *opnd, BasicBlock *block)
  5039. {
  5040. Assert(tag == Js::DeadStorePhase);
  5041. Assert(opnd->IsTypeCheckSeqCandidate());
  5042. // Now that we're in the dead store pass and we know definitively which operations will have a type
  5043. // check and which are protected by an upstream type check, we can push the lists of guarded properties
  5044. // up the flow graph and drop them on the type checks for the corresponding object symbol.
  5045. if (opnd->IsTypeCheckSeqParticipant())
  5046. {
  5047. // Add this operation to the list of guarded operations for this object symbol.
  5048. HashTable<ObjTypeGuardBucket>* stackSymToGuardedProperties = block->stackSymToGuardedProperties;
  5049. if (stackSymToGuardedProperties == nullptr)
  5050. {
  5051. stackSymToGuardedProperties = HashTable<ObjTypeGuardBucket>::New(this->tempAlloc, 8);
  5052. block->stackSymToGuardedProperties = stackSymToGuardedProperties;
  5053. }
  5054. StackSym* objSym = opnd->GetObjectSym();
  5055. ObjTypeGuardBucket* bucket = stackSymToGuardedProperties->FindOrInsertNew(objSym->m_id);
  5056. BVSparse<JitArenaAllocator>* guardedPropertyOps = bucket->GetGuardedPropertyOps();
  5057. if (guardedPropertyOps == nullptr)
  5058. {
  5059. // The bit vectors we push around the flow graph only need to live as long as this phase.
  5060. guardedPropertyOps = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  5061. bucket->SetGuardedPropertyOps(guardedPropertyOps);
  5062. }
  5063. #if DBG
  5064. FOREACH_BITSET_IN_SPARSEBV(propOpId, guardedPropertyOps)
  5065. {
  5066. ObjTypeSpecFldInfo* existingFldInfo = this->func->GetGlobalObjTypeSpecFldInfo(propOpId);
  5067. Assert(existingFldInfo != nullptr);
  5068. if (existingFldInfo->GetPropertyId() != opnd->GetPropertyId())
  5069. {
  5070. continue;
  5071. }
  5072. // It would be very nice to assert that the info we have for this property matches all properties guarded thus far.
  5073. // Unfortunately, in some cases of object pointer copy propagation into a loop, we may end up with conflicting
  5074. // information for the same property. We simply ignore the conflict and emit an equivalent type check, which
  5075. // will attempt to check for one property on two different slots, and obviously fail. Thus we may have a
  5076. // guaranteed bailout, but we'll simply re-JIT with equivalent object type spec disabled. To avoid this
  5077. // issue altogether, we would need to track the set of guarded properties along with the type value in the
  5078. // forward pass, and when a conflict is detected either not optimize the offending instruction, or correct
  5079. // its information based on the info from the property in the type value info.
  5080. //Assert(!existingFldInfo->IsPoly() || !opnd->IsPoly() || GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()));
  5081. //Assert(existingFldInfo->GetSlotIndex() == opnd->GetSlotIndex());
  5082. if (PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func) && !JITManager::GetJITManager()->IsJITServer())
  5083. {
  5084. if (existingFldInfo->IsPoly() && opnd->IsPoly() &&
  5085. (!GlobOpt::AreTypeSetsIdentical(existingFldInfo->GetEquivalentTypeSet(), opnd->GetEquivalentTypeSet()) ||
  5086. (existingFldInfo->GetSlotIndex() != opnd->GetSlotIndex())))
  5087. {
  5088. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  5089. Output::Print(_u("EquivObjTypeSpec: top function %s (%s): duplicate property clash on %s(#%d) on operation %u \n"),
  5090. this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer),
  5091. this->func->GetInProcThreadContext()->GetPropertyRecord(opnd->GetPropertyId())->GetBuffer(), opnd->GetPropertyId(), opnd->GetObjTypeSpecFldId());
  5092. Output::Flush();
  5093. }
  5094. }
  5095. }
  5096. NEXT_BITSET_IN_SPARSEBV
  5097. #endif
  5098. bucket->AddToGuardedPropertyOps(opnd->GetObjTypeSpecFldId());
  5099. if (opnd->NeedsMonoCheck())
  5100. {
  5101. Assert(opnd->IsMono());
  5102. JITTypeHolder monoGuardType = opnd->IsInitialTypeChecked() ? opnd->GetInitialType() : opnd->GetType();
  5103. bucket->SetMonoGuardType(monoGuardType);
  5104. }
  5105. if (opnd->NeedsPrimaryTypeCheck())
  5106. {
  5107. // Grab the guarded properties which match this type check with respect to polymorphism and drop them
  5108. // on the operand. Only equivalent type checks can protect polymorphic properties to avoid a case where
  5109. // we have 1) a cache with type set {t1, t2} and property a, followed by 2) a cache with type t3 and
  5110. // property b, and 3) a cache with type set {t1, t2} and property c, where the slot index of property c
  5111. // on t1 and t2 is different than on t3. If cache 2 were to protect property c it would not verify that
  5112. // it resides on the correct slot for cache 3. Yes, an equivalent type check could protect monomorphic
  5113. // properties, but it would then unnecessarily verify their equivalence on the slow path.
  5114. // Also, make sure the guarded properties on the operand are allocated from the func's allocator to
  5115. // persists until lowering.
  5116. Assert(guardedPropertyOps != nullptr);
  5117. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  5118. opnd->AddGuardedPropOps(guardedPropertyOps);
  5119. if (this->currentInstr->HasTypeCheckBailOut())
  5120. {
  5121. // Stop pushing the mono guard type up if it is being checked here.
  5122. if (bucket->NeedsMonoCheck())
  5123. {
  5124. if (this->currentInstr->HasEquivalentTypeCheckBailOut())
  5125. {
  5126. // Some instr protected by this one requires a monomorphic type check. (E.g., final type opt,
  5127. // fixed field not loaded from prototype.) Note the IsTypeAvailable test above: only do this at
  5128. // the initial type check that protects this path.
  5129. opnd->SetMonoGuardType(bucket->GetMonoGuardType());
  5130. this->currentInstr->ChangeEquivalentToMonoTypeCheckBailOut();
  5131. }
  5132. bucket->SetMonoGuardType(nullptr);
  5133. }
  5134. if (!opnd->IsTypeAvailable())
  5135. {
  5136. // Stop tracking the guarded properties if there's not another type check upstream.
  5137. bucket->SetGuardedPropertyOps(nullptr);
  5138. JitAdelete(this->tempAlloc, guardedPropertyOps);
  5139. block->stackSymToGuardedProperties->Clear(objSym->m_id);
  5140. }
  5141. }
  5142. #if DBG
  5143. {
  5144. // If there is no upstream type check that is live and could protect guarded properties, we better
  5145. // not have any properties remaining.
  5146. ObjTypeGuardBucket* objTypeGuardBucket = block->stackSymToGuardedProperties->Get(opnd->GetObjectSym()->m_id);
  5147. Assert(opnd->IsTypeAvailable() || objTypeGuardBucket == nullptr || objTypeGuardBucket->GetGuardedPropertyOps()->IsEmpty());
  5148. }
  5149. #endif
  5150. }
  5151. }
  5152. else if (opnd->NeedsLocalTypeCheck())
  5153. {
  5154. opnd->EnsureGuardedPropOps(this->func->m_alloc);
  5155. opnd->SetGuardedPropOp(opnd->GetObjTypeSpecFldId());
  5156. }
  5157. if (opnd->UsesAuxSlot() && opnd->IsTypeCheckSeqParticipant() && !opnd->HasTypeMismatch() && !opnd->IsLoadedFromProto())
  5158. {
  5159. bool auxSlotPtrUpwardExposed = false;
  5160. StackSym *auxSlotPtrSym = opnd->GetAuxSlotPtrSym();
  5161. if (opnd->IsAuxSlotPtrSymAvailable())
  5162. {
  5163. // This is an upward-exposed use of the aux slot pointer.
  5164. Assert(auxSlotPtrSym);
  5165. auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndSet(auxSlotPtrSym->m_id);
  5166. }
  5167. else if (auxSlotPtrSym != nullptr)
  5168. {
  5169. // The aux slot pointer is not upward-exposed at this point.
  5170. auxSlotPtrUpwardExposed = this->currentBlock->upwardExposedUses->TestAndClear(auxSlotPtrSym->m_id);
  5171. }
  5172. if (!this->IsPrePass() && auxSlotPtrUpwardExposed)
  5173. {
  5174. opnd->SetProducesAuxSlotPtr(true);
  5175. }
  5176. }
  5177. }
  5178. void
  5179. BackwardPass::TrackObjTypeSpecWriteGuards(IR::PropertySymOpnd *opnd, BasicBlock *block)
  5180. {
  5181. // TODO (ObjTypeSpec): Move write guard tracking to the forward pass, by recording on the type value
  5182. // which property IDs have been written since the last type check. This will result in more accurate
  5183. // tracking in cases when object pointer copy prop kicks in.
  5184. if (this->tag == Js::BackwardPhase)
  5185. {
  5186. // If this operation may need a write guard (load from proto or fixed field check) then add its
  5187. // write guard symbol to the map for this object. If it remains live (hasn't been written to)
  5188. // until the type check upstream, it will get recorded there so that the type check can be registered
  5189. // for invalidation on this property used in this operation.
  5190. // (ObjTypeSpec): Consider supporting polymorphic write guards as well. We can't currently distinguish between mono and
  5191. // poly write guards, and a type check can only protect operations matching with respect to polymorphism (see
  5192. // BackwardPass::TrackObjTypeSpecProperties for details), so for now we only target monomorphic operations.
  5193. if (opnd->IsMono() && opnd->MayNeedWriteGuardProtection())
  5194. {
  5195. if (block->stackSymToWriteGuardsMap == nullptr)
  5196. {
  5197. block->stackSymToWriteGuardsMap = HashTable<ObjWriteGuardBucket>::New(this->tempAlloc, 8);
  5198. }
  5199. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->FindOrInsertNew(opnd->GetObjectSym()->m_id);
  5200. BVSparse<JitArenaAllocator>* writeGuards = bucket->GetWriteGuards();
  5201. if (writeGuards == nullptr)
  5202. {
  5203. // The bit vectors we push around the flow graph only need to live as long as this phase.
  5204. writeGuards = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  5205. bucket->SetWriteGuards(writeGuards);
  5206. }
  5207. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  5208. Assert(propertySym->m_writeGuardSym != nullptr);
  5209. SymID writeGuardSymId = propertySym->m_writeGuardSym->m_id;
  5210. writeGuards->Set(writeGuardSymId);
  5211. }
  5212. // Record any live (upward exposed) write guards on this operation, if this operation may end up with
  5213. // a type check. If we ultimately don't need a type check here, we will simply ignore the guards, because
  5214. // an earlier type check will protect them.
  5215. if (!IsPrePass() && opnd->IsMono() && !opnd->IsTypeDead())
  5216. {
  5217. Assert(opnd->GetWriteGuards() == nullptr);
  5218. if (block->stackSymToWriteGuardsMap != nullptr)
  5219. {
  5220. ObjWriteGuardBucket* bucket = block->stackSymToWriteGuardsMap->Get(opnd->GetObjectSym()->m_id);
  5221. if (bucket != nullptr)
  5222. {
  5223. // Get all the write guards associated with this object sym and filter them down to those that
  5224. // are upward exposed. If we end up emitting a type check for this instruction, we will create
  5225. // a type property guard registered for all guarded proto properties and we will set the write
  5226. // guard syms live during forward pass, such that we can avoid unnecessary write guard type
  5227. // checks and bailouts on every proto property (as long as it hasn't been written to since the
  5228. // primary type check).
  5229. auto writeGuards = bucket->GetWriteGuards()->CopyNew(this->func->m_alloc);
  5230. writeGuards->And(block->upwardExposedFields);
  5231. opnd->SetWriteGuards(writeGuards);
  5232. }
  5233. }
  5234. }
  5235. }
  5236. else
  5237. {
  5238. // If we know this property has never been written to in this function (either on this object or any
  5239. // of its aliases) we don't need the local type check.
  5240. if (opnd->MayNeedWriteGuardProtection() && !opnd->IsWriteGuardChecked() && !MayPropertyBeWrittenTo(opnd->GetPropertyId()))
  5241. {
  5242. opnd->SetWriteGuardChecked(true);
  5243. }
  5244. // If we don't need a primary type check here let's clear the write guards. The primary type check upstream will
  5245. // register the type check for the corresponding properties.
  5246. if (!IsPrePass() && !opnd->NeedsPrimaryTypeCheck())
  5247. {
  5248. opnd->ClearWriteGuards();
  5249. }
  5250. }
  5251. }
  5252. void
  5253. BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block)
  5254. {
  5255. // Do the work of objtypespec add-property opt even if it's disabled by PHASE option, so that we have
  5256. // the dataflow info that can be inspected.
  5257. Assert(this->tag == Js::DeadStorePhase);
  5258. Assert(opnd->IsMono() || opnd->HasEquivalentTypeSet());
  5259. JITTypeHolder typeWithProperty = opnd->IsMono() ? opnd->GetType() : opnd->GetFirstEquivalentType();
  5260. JITTypeHolder typeWithoutProperty = opnd->HasInitialType() ? opnd->GetInitialType() : JITTypeHolder(nullptr);
  5261. if (typeWithoutProperty == nullptr ||
  5262. typeWithProperty == typeWithoutProperty ||
  5263. (opnd->IsTypeChecked() && !opnd->IsInitialTypeChecked()))
  5264. {
  5265. if (!this->IsPrePass() && block->stackSymToFinalType != nullptr && !this->currentInstr->HasBailOutInfo())
  5266. {
  5267. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  5268. AddPropertyCacheBucket *pBucket =
  5269. block->stackSymToFinalType->Get(propertySym->m_stackSym->m_id);
  5270. if (pBucket && pBucket->GetFinalType() != nullptr && pBucket->GetInitialType() != pBucket->GetFinalType())
  5271. {
  5272. opnd->SetFinalType(pBucket->GetFinalType());
  5273. }
  5274. }
  5275. return;
  5276. }
  5277. #if DBG
  5278. Assert(typeWithProperty != nullptr);
  5279. const JITTypeHandler * typeWithoutPropertyTypeHandler = typeWithoutProperty->GetTypeHandler();
  5280. const JITTypeHandler * typeWithPropertyTypeHandler = typeWithProperty->GetTypeHandler();
  5281. // TODO: OOP JIT, reenable assert
  5282. //Assert(typeWithoutPropertyTypeHandler->GetPropertyCount() + 1 == typeWithPropertyTypeHandler->GetPropertyCount());
  5283. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(typeWithoutPropertyTypeHandler, typeWithPropertyTypeHandler),
  5284. "TypeHandlers are not compatible for transition?");
  5285. Assert(typeWithoutPropertyTypeHandler->GetSlotCapacity() <= typeWithPropertyTypeHandler->GetSlotCapacity());
  5286. #endif
  5287. // If there's already a final type for this instance, record it on the operand.
  5288. // If not, start tracking it.
  5289. if (block->stackSymToFinalType == nullptr)
  5290. {
  5291. block->stackSymToFinalType = HashTable<AddPropertyCacheBucket>::New(this->tempAlloc, 8);
  5292. }
  5293. // Find or create the type-tracking record for this instance in this block.
  5294. PropertySym *propertySym = opnd->m_sym->AsPropertySym();
  5295. AddPropertyCacheBucket *pBucket =
  5296. block->stackSymToFinalType->FindOrInsertNew(propertySym->m_stackSym->m_id);
  5297. JITTypeHolder finalType(nullptr);
  5298. #if DBG
  5299. JITTypeHolder deadStoreUnavailableFinalType(nullptr);
  5300. #endif
  5301. if (pBucket->GetInitialType() == nullptr || opnd->GetType() != pBucket->GetInitialType())
  5302. {
  5303. #if DBG
  5304. if (opnd->GetType() == pBucket->deadStoreUnavailableInitialType)
  5305. {
  5306. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  5307. }
  5308. #endif
  5309. // No info found, or the info was bad, so initialize it from this cache.
  5310. finalType = opnd->GetType();
  5311. pBucket->SetFinalType(finalType);
  5312. }
  5313. else
  5314. {
  5315. // Match: The type we push upward is now the typeWithoutProperty at this point,
  5316. // and the final type is the one we've been tracking.
  5317. finalType = pBucket->GetFinalType();
  5318. #if DBG
  5319. deadStoreUnavailableFinalType = pBucket->deadStoreUnavailableFinalType;
  5320. #endif
  5321. }
  5322. pBucket->SetInitialType(typeWithoutProperty);
  5323. if (!PHASE_OFF(Js::ObjTypeSpecStorePhase, this->func))
  5324. {
  5325. #if DBG
  5326. // We may regress in this case:
  5327. // if (b)
  5328. // t1 = {};
  5329. // o = t1;
  5330. // o.x =
  5331. // else
  5332. // t2 = {};
  5333. // o = t2;
  5334. // o.x =
  5335. // o.y =
  5336. //
  5337. // Where the backward pass will propagate the final type in o.y to o.x, then globopt will copy prop t1 and t2 to o.x.
  5338. // But not o.y (because of the merge). Then, in the dead store pass, o.y's final type will not propagate to t1.x and t2.x
  5339. // respectively, thus regression the final type. However, in both cases, the types of t1 and t2 are dead anyways.
  5340. //
  5341. // if the type is dead, we don't care if we have regressed the type, as no one is depending on it to skip type check anyways
  5342. if (!opnd->IsTypeDead())
  5343. {
  5344. // This is the type that would have been propagated if we didn't kill it because the type isn't available
  5345. JITTypeHolder checkFinalType = deadStoreUnavailableFinalType != nullptr ? deadStoreUnavailableFinalType : finalType;
  5346. if (opnd->HasFinalType() && opnd->GetFinalType() != checkFinalType)
  5347. {
  5348. // Final type discovery must be progressively better (unless we kill it in the deadstore pass
  5349. // when the type is not available during the forward pass)
  5350. const JITTypeHandler * oldFinalTypeHandler = opnd->GetFinalType()->GetTypeHandler();
  5351. const JITTypeHandler * checkFinalTypeHandler = checkFinalType->GetTypeHandler();
  5352. // TODO: OOP JIT, enable assert
  5353. //Assert(oldFinalTypeHandler->GetPropertyCount() < checkFinalTypeHandler->GetPropertyCount());
  5354. AssertMsg(JITTypeHandler::IsTypeHandlerCompatibleForObjectHeaderInlining(oldFinalTypeHandler, checkFinalTypeHandler),
  5355. "TypeHandlers should be compatible for transition.");
  5356. Assert(oldFinalTypeHandler->GetSlotCapacity() <= checkFinalTypeHandler->GetSlotCapacity());
  5357. }
  5358. }
  5359. #endif
  5360. Assert(opnd->IsBeingAdded());
  5361. if (!this->IsPrePass())
  5362. {
  5363. opnd->SetFinalType(finalType);
  5364. }
  5365. if (!opnd->IsTypeChecked())
  5366. {
  5367. // Transition from initial to final type will only happen at type check points.
  5368. if (opnd->IsTypeAvailable())
  5369. {
  5370. pBucket->SetFinalType(pBucket->GetInitialType());
  5371. }
  5372. }
  5373. }
  5374. #if DBG_DUMP
  5375. if (PHASE_TRACE(Js::ObjTypeSpecStorePhase, this->func))
  5376. {
  5377. Output::Print(_u("ObjTypeSpecStore: "));
  5378. this->currentInstr->Dump();
  5379. pBucket->Dump();
  5380. }
  5381. #endif
  5382. // In the dead-store pass, we have forward information that tells us whether a "final type"
  5383. // reached this point from an earlier store. If it didn't (i.e., it's not available here),
  5384. // remove it from the backward map so that upstream stores will use the final type that is
  5385. // live there. (This avoids unnecessary bailouts in cases where the final type is only live
  5386. // on one branch of an "if", a case that the initial backward pass can't detect.)
  5387. // An example:
  5388. // if (cond)
  5389. // o.x =
  5390. // o.y =
  5391. if (!opnd->IsTypeAvailable())
  5392. {
  5393. #if DBG
  5394. pBucket->deadStoreUnavailableInitialType = pBucket->GetInitialType();
  5395. if (pBucket->deadStoreUnavailableFinalType == nullptr)
  5396. {
  5397. pBucket->deadStoreUnavailableFinalType = pBucket->GetFinalType();
  5398. }
  5399. pBucket->SetInitialType(nullptr);
  5400. pBucket->SetFinalType(nullptr);
  5401. #else
  5402. block->stackSymToFinalType->Clear(propertySym->m_stackSym->m_id);
  5403. #endif
  5404. }
  5405. }
  5406. void
  5407. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5408. {
  5409. StackSym *objSym = this->func->m_symTable->FindStackSym(symId);
  5410. Assert(objSym);
  5411. this->InsertTypeTransition(instrInsertBefore, objSym, data, upwardExposedUses);
  5412. }
  5413. void
  5414. BackwardPass::InsertTypeTransition(IR::Instr *instrInsertBefore, StackSym *objSym, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5415. {
  5416. Assert(!this->IsPrePass());
  5417. IR::RegOpnd *baseOpnd = IR::RegOpnd::New(objSym, TyMachReg, this->func);
  5418. baseOpnd->SetIsJITOptimizedReg(true);
  5419. JITTypeHolder initialType = data->GetInitialType();
  5420. IR::AddrOpnd *initialTypeOpnd =
  5421. IR::AddrOpnd::New(data->GetInitialType()->GetAddr(), IR::AddrOpndKindDynamicType, this->func);
  5422. initialTypeOpnd->m_metadata = initialType.t;
  5423. JITTypeHolder finalType = data->GetFinalType();
  5424. IR::AddrOpnd *finalTypeOpnd =
  5425. IR::AddrOpnd::New(data->GetFinalType()->GetAddr(), IR::AddrOpndKindDynamicType, this->func);
  5426. finalTypeOpnd->m_metadata = finalType.t;
  5427. IR::Instr *adjustTypeInstr =
  5428. IR::Instr::New(Js::OpCode::AdjustObjType, finalTypeOpnd, baseOpnd, initialTypeOpnd, this->func);
  5429. if (upwardExposedUses)
  5430. {
  5431. // If this type change causes a slot adjustment, the aux slot pointer (if any) will be reloaded here, so take it out of upwardExposedUses.
  5432. int oldCount;
  5433. int newCount;
  5434. Js::PropertyIndex inlineSlotCapacity;
  5435. Js::PropertyIndex newInlineSlotCapacity;
  5436. bool needSlotAdjustment =
  5437. JITTypeHandler::NeedSlotAdjustment(initialType->GetTypeHandler(), finalType->GetTypeHandler(), &oldCount, &newCount, &inlineSlotCapacity, &newInlineSlotCapacity);
  5438. if (needSlotAdjustment)
  5439. {
  5440. StackSym *auxSlotPtrSym = baseOpnd->m_sym->GetAuxSlotPtrSym();
  5441. if (auxSlotPtrSym)
  5442. {
  5443. if (upwardExposedUses->Test(auxSlotPtrSym->m_id))
  5444. {
  5445. adjustTypeInstr->m_opcode = Js::OpCode::AdjustObjTypeReloadAuxSlotPtr;
  5446. }
  5447. }
  5448. }
  5449. }
  5450. instrInsertBefore->InsertBefore(adjustTypeInstr);
  5451. }
  5452. void
  5453. BackwardPass::InsertTypeTransitionAfterInstr(IR::Instr *instr, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5454. {
  5455. if (!this->IsPrePass())
  5456. {
  5457. // Transition to the final type if we don't bail out.
  5458. if (instr->EndsBasicBlock())
  5459. {
  5460. // The instr with the bailout is something like a branch that may not fall through.
  5461. // Insert the transitions instead at the beginning of each successor block.
  5462. this->InsertTypeTransitionsAtPriorSuccessors(this->currentBlock, nullptr, symId, data, upwardExposedUses);
  5463. }
  5464. else
  5465. {
  5466. this->InsertTypeTransition(instr->m_next, symId, data, upwardExposedUses);
  5467. }
  5468. }
  5469. // Note: we could probably clear this entry out of the table, but I don't know
  5470. // whether it's worth it, because it's likely coming right back.
  5471. data->SetFinalType(data->GetInitialType());
  5472. }
  5473. void
  5474. BackwardPass::InsertTypeTransitionAtBlock(BasicBlock *block, int symId, AddPropertyCacheBucket *data, BVSparse<JitArenaAllocator>* upwardExposedUses)
  5475. {
  5476. bool inserted = false;
  5477. FOREACH_INSTR_IN_BLOCK(instr, block)
  5478. {
  5479. if (instr->IsRealInstr())
  5480. {
  5481. // Check for pre-existing type transition. There may be more than one AdjustObjType here,
  5482. // so look at them all.
  5483. if (instr->m_opcode == Js::OpCode::AdjustObjType)
  5484. {
  5485. if (instr->GetSrc1()->AsRegOpnd()->m_sym->m_id == (SymID)symId)
  5486. {
  5487. // This symbol already has a type transition at this point.
  5488. // It *must* be doing the same transition we're already trying to do.
  5489. Assert((intptr_t)instr->GetDst()->AsAddrOpnd()->m_address == data->GetFinalType()->GetAddr() &&
  5490. (intptr_t)instr->GetSrc2()->AsAddrOpnd()->m_address == data->GetInitialType()->GetAddr());
  5491. // Nothing to do.
  5492. return;
  5493. }
  5494. }
  5495. else
  5496. {
  5497. this->InsertTypeTransition(instr, symId, data, upwardExposedUses);
  5498. inserted = true;
  5499. break;
  5500. }
  5501. }
  5502. }
  5503. NEXT_INSTR_IN_BLOCK;
  5504. if (!inserted)
  5505. {
  5506. Assert(block->GetLastInstr()->m_next);
  5507. this->InsertTypeTransition(block->GetLastInstr()->m_next, symId, data, upwardExposedUses);
  5508. }
  5509. }
  5510. void
  5511. BackwardPass::InsertTypeTransitionsAtPriorSuccessors(
  5512. BasicBlock *block,
  5513. BasicBlock *blockSucc,
  5514. int symId,
  5515. AddPropertyCacheBucket *data,
  5516. BVSparse<JitArenaAllocator>* upwardExposedUses)
  5517. {
  5518. // For each successor of block prior to blockSucc, adjust the type.
  5519. FOREACH_SUCCESSOR_BLOCK(blockFix, block)
  5520. {
  5521. if (blockFix == blockSucc)
  5522. {
  5523. return;
  5524. }
  5525. this->InsertTypeTransitionAtBlock(blockFix, symId, data, upwardExposedUses);
  5526. }
  5527. NEXT_SUCCESSOR_BLOCK;
  5528. }
  5529. void
  5530. BackwardPass::InsertTypeTransitionsAtPotentialKills()
  5531. {
  5532. // Final types can't be pushed up past certain instructions.
  5533. IR::Instr *instr = this->currentInstr;
  5534. if (instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::UpdateNewScObjectCache)
  5535. {
  5536. // Final types can't be pushed up past a bailout point.
  5537. // Insert any transitions called for by the current state of add-property buckets.
  5538. // Also do this for ctor cache updates, to avoid putting a type in the ctor cache that extends past
  5539. // the end of the ctor that the cache covers.
  5540. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  5541. this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
  5542. return false;
  5543. });
  5544. }
  5545. else
  5546. {
  5547. // If this is a load/store that expects an object-header-inlined type, don't push another sym's transition from
  5548. // object-header-inlined to non-object-header-inlined type past it, because the two syms may be aliases.
  5549. IR::PropertySymOpnd *propertySymOpnd = instr->GetPropertySymOpnd();
  5550. if (propertySymOpnd && propertySymOpnd->IsObjectHeaderInlined())
  5551. {
  5552. SymID opndId = propertySymOpnd->m_sym->AsPropertySym()->m_stackSym->m_id;
  5553. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool {
  5554. if ((SymID)symId == opndId)
  5555. {
  5556. // This is the sym we're tracking. No aliasing to worry about.
  5557. return false;
  5558. }
  5559. if (propertySymOpnd->IsMono() && data->GetInitialType() != propertySymOpnd->GetType())
  5560. {
  5561. // Type mismatch in a monomorphic case -- no aliasing.
  5562. return false;
  5563. }
  5564. if (this->TransitionUndoesObjectHeaderInlining(data))
  5565. {
  5566. // We're transitioning from inlined to non-inlined, so we can't push it up any farther.
  5567. this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses);
  5568. }
  5569. return false;
  5570. });
  5571. }
  5572. }
  5573. }
  5574. template<class Fn>
  5575. void
  5576. BackwardPass::ForEachAddPropertyCacheBucket(Fn fn)
  5577. {
  5578. BasicBlock *block = this->currentBlock;
  5579. if (block->stackSymToFinalType == nullptr)
  5580. {
  5581. return;
  5582. }
  5583. FOREACH_HASHTABLE_ENTRY(AddPropertyCacheBucket, bucket, block->stackSymToFinalType)
  5584. {
  5585. AddPropertyCacheBucket *data = &bucket.element;
  5586. if (data->GetInitialType() != nullptr &&
  5587. data->GetInitialType() != data->GetFinalType())
  5588. {
  5589. bool done = fn(bucket.value, data);
  5590. if (done)
  5591. {
  5592. break;
  5593. }
  5594. }
  5595. }
  5596. NEXT_HASHTABLE_ENTRY;
  5597. }
  5598. bool
  5599. BackwardPass::TransitionUndoesObjectHeaderInlining(AddPropertyCacheBucket *data) const
  5600. {
  5601. JITTypeHolder type = data->GetInitialType();
  5602. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  5603. {
  5604. return false;
  5605. }
  5606. if (!type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler())
  5607. {
  5608. return false;
  5609. }
  5610. type = data->GetFinalType();
  5611. if (type == nullptr || !Js::DynamicType::Is(type->GetTypeId()))
  5612. {
  5613. return false;
  5614. }
  5615. return !type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler();
  5616. }
  5617. void
  5618. BackwardPass::CollectCloneStrCandidate(IR::Opnd * opnd)
  5619. {
  5620. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5621. Assert(regOpnd != nullptr);
  5622. StackSym *sym = regOpnd->m_sym;
  5623. if (tag == Js::BackwardPhase
  5624. && currentInstr->m_opcode == Js::OpCode::Add_A
  5625. && currentInstr->GetSrc1() == opnd
  5626. && !this->IsPrePass()
  5627. && !this->IsCollectionPass()
  5628. && this->currentBlock->loop)
  5629. {
  5630. Assert(currentBlock->cloneStrCandidates != nullptr);
  5631. currentBlock->cloneStrCandidates->Set(sym->m_id);
  5632. }
  5633. }
  5634. void
  5635. BackwardPass::InvalidateCloneStrCandidate(IR::Opnd * opnd)
  5636. {
  5637. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5638. Assert(regOpnd != nullptr);
  5639. StackSym *sym = regOpnd->m_sym;
  5640. if (tag == Js::BackwardPhase &&
  5641. (currentInstr->m_opcode != Js::OpCode::Add_A || currentInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id != sym->m_id) &&
  5642. !this->IsPrePass() &&
  5643. !this->IsCollectionPass() &&
  5644. this->currentBlock->loop)
  5645. {
  5646. currentBlock->cloneStrCandidates->Clear(sym->m_id);
  5647. }
  5648. }
  5649. void
  5650. BackwardPass::ProcessUse(IR::Opnd * opnd)
  5651. {
  5652. switch (opnd->GetKind())
  5653. {
  5654. case IR::OpndKindReg:
  5655. {
  5656. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5657. StackSym *sym = regOpnd->m_sym;
  5658. if (!IsCollectionPass())
  5659. {
  5660. // isTempLastUse is only used for string concat right now, so lets not mark it if it's not a string.
  5661. // If it's upward exposed, it is not it's last use.
  5662. if (regOpnd->m_isTempLastUse && (regOpnd->GetValueType().IsNotString() || this->currentBlock->upwardExposedUses->Test(sym->m_id) || sym->m_mayNotBeTempLastUse))
  5663. {
  5664. regOpnd->m_isTempLastUse = false;
  5665. }
  5666. this->CollectCloneStrCandidate(opnd);
  5667. }
  5668. this->DoSetDead(regOpnd, !this->ProcessSymUse(sym, true, regOpnd->GetIsJITOptimizedReg()));
  5669. if (IsCollectionPass())
  5670. {
  5671. break;
  5672. }
  5673. if (tag == Js::DeadStorePhase && regOpnd->IsArrayRegOpnd())
  5674. {
  5675. ProcessArrayRegOpndUse(currentInstr, regOpnd->AsArrayRegOpnd());
  5676. }
  5677. if (currentInstr->m_opcode == Js::OpCode::BailOnNotArray)
  5678. {
  5679. Assert(tag == Js::DeadStorePhase);
  5680. const ValueType valueType(regOpnd->GetValueType());
  5681. if(valueType.IsLikelyArrayOrObjectWithArray())
  5682. {
  5683. currentBlock->noImplicitCallUses->Clear(sym->m_id);
  5684. // We are being conservative here to always check for missing value
  5685. // if any of them expect no missing value. That is because we don't know
  5686. // what set of sym is equivalent (copied) from the one we are testing for right now.
  5687. if(valueType.HasNoMissingValues() &&
  5688. !currentBlock->noImplicitCallNoMissingValuesUses->IsEmpty() &&
  5689. !IsPrePass())
  5690. {
  5691. // There is a use of this sym that requires this array to have no missing values, so this instruction
  5692. // needs to bail out if the array has missing values.
  5693. Assert(currentInstr->GetBailOutKind() == IR::BailOutOnNotArray ||
  5694. currentInstr->GetBailOutKind() == IR::BailOutOnNotNativeArray);
  5695. currentInstr->SetBailOutKind(currentInstr->GetBailOutKind() | IR::BailOutOnMissingValue);
  5696. }
  5697. currentBlock->noImplicitCallNoMissingValuesUses->Clear(sym->m_id);
  5698. currentBlock->noImplicitCallNativeArrayUses->Clear(sym->m_id);
  5699. }
  5700. }
  5701. }
  5702. break;
  5703. case IR::OpndKindSym:
  5704. {
  5705. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  5706. Sym * sym = symOpnd->m_sym;
  5707. this->DoSetDead(symOpnd, !this->ProcessSymUse(sym, false, opnd->GetIsJITOptimizedReg()));
  5708. if (IsCollectionPass())
  5709. {
  5710. break;
  5711. }
  5712. if (sym->IsPropertySym())
  5713. {
  5714. // TODO: We don't have last use info for property sym
  5715. // and we don't set the last use of the stacksym inside the property sym
  5716. if (tag == Js::BackwardPhase)
  5717. {
  5718. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5719. {
  5720. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(symOpnd->AsPropertySymOpnd(), this->currentInstr, this->currentBlock->loop);
  5721. }
  5722. }
  5723. if (this->DoMarkTempNumbersOnTempObjects())
  5724. {
  5725. this->currentBlock->tempNumberTracker->ProcessPropertySymUse(symOpnd, this->currentInstr, this);
  5726. }
  5727. if (symOpnd->IsPropertySymOpnd())
  5728. {
  5729. this->ProcessPropertySymOpndUse(symOpnd->AsPropertySymOpnd());
  5730. }
  5731. }
  5732. }
  5733. break;
  5734. case IR::OpndKindIndir:
  5735. {
  5736. IR::IndirOpnd * indirOpnd = opnd->AsIndirOpnd();
  5737. IR::RegOpnd * baseOpnd = indirOpnd->GetBaseOpnd();
  5738. this->DoSetDead(baseOpnd, !this->ProcessSymUse(baseOpnd->m_sym, false, baseOpnd->GetIsJITOptimizedReg()));
  5739. IR::RegOpnd * indexOpnd = indirOpnd->GetIndexOpnd();
  5740. if (indexOpnd)
  5741. {
  5742. this->DoSetDead(indexOpnd, !this->ProcessSymUse(indexOpnd->m_sym, false, indexOpnd->GetIsJITOptimizedReg()));
  5743. }
  5744. if(IsCollectionPass())
  5745. {
  5746. break;
  5747. }
  5748. if (this->DoMarkTempNumbersOnTempObjects())
  5749. {
  5750. this->currentBlock->tempNumberTracker->ProcessIndirUse(indirOpnd, currentInstr, this);
  5751. }
  5752. if(tag == Js::DeadStorePhase && baseOpnd->IsArrayRegOpnd())
  5753. {
  5754. ProcessArrayRegOpndUse(currentInstr, baseOpnd->AsArrayRegOpnd());
  5755. }
  5756. }
  5757. break;
  5758. }
  5759. }
  5760. bool
  5761. BackwardPass::ProcessPropertySymUse(PropertySym *propertySym)
  5762. {
  5763. Assert(this->tag == Js::BackwardPhase);
  5764. BasicBlock *block = this->currentBlock;
  5765. bool isLive = !!block->upwardExposedFields->TestAndSet(propertySym->m_id);
  5766. if (propertySym->m_propertyEquivSet)
  5767. {
  5768. block->upwardExposedFields->Or(propertySym->m_propertyEquivSet);
  5769. }
  5770. return isLive;
  5771. }
  5772. void
  5773. BackwardPass::MarkTemp(StackSym * sym)
  5774. {
  5775. Assert(!IsCollectionPass());
  5776. // Don't care about type specialized syms
  5777. if (!sym->IsVar())
  5778. {
  5779. return;
  5780. }
  5781. BasicBlock * block = this->currentBlock;
  5782. if (this->DoMarkTempNumbers())
  5783. {
  5784. Assert((block->loop != nullptr) == block->tempNumberTracker->HasTempTransferDependencies());
  5785. block->tempNumberTracker->MarkTemp(sym, this);
  5786. }
  5787. if (this->DoMarkTempObjects())
  5788. {
  5789. Assert((block->loop != nullptr) == block->tempObjectTracker->HasTempTransferDependencies());
  5790. block->tempObjectTracker->MarkTemp(sym, this);
  5791. }
  5792. #if DBG
  5793. if (this->DoMarkTempObjectVerify())
  5794. {
  5795. Assert((block->loop != nullptr) == block->tempObjectVerifyTracker->HasTempTransferDependencies());
  5796. block->tempObjectVerifyTracker->MarkTemp(sym, this);
  5797. }
  5798. #endif
  5799. }
  5800. void
  5801. BackwardPass::MarkTempProcessInstr(IR::Instr * instr)
  5802. {
  5803. Assert(!IsCollectionPass());
  5804. if (this->currentBlock->isDead)
  5805. {
  5806. return;
  5807. }
  5808. BasicBlock * block;
  5809. block = this->currentBlock;
  5810. if (this->DoMarkTempNumbers())
  5811. {
  5812. block->tempNumberTracker->ProcessInstr(instr, this);
  5813. }
  5814. if (this->DoMarkTempObjects())
  5815. {
  5816. block->tempObjectTracker->ProcessInstr(instr);
  5817. }
  5818. #if DBG
  5819. if (this->DoMarkTempObjectVerify())
  5820. {
  5821. block->tempObjectVerifyTracker->ProcessInstr(instr, this);
  5822. }
  5823. #endif
  5824. }
  5825. #if DBG_DUMP
  5826. void
  5827. BackwardPass::DumpMarkTemp()
  5828. {
  5829. Assert(!IsCollectionPass());
  5830. BasicBlock * block = this->currentBlock;
  5831. if (this->DoMarkTempNumbers())
  5832. {
  5833. block->tempNumberTracker->Dump();
  5834. }
  5835. if (this->DoMarkTempObjects())
  5836. {
  5837. block->tempObjectTracker->Dump();
  5838. }
  5839. #if DBG
  5840. if (this->DoMarkTempObjectVerify())
  5841. {
  5842. block->tempObjectVerifyTracker->Dump();
  5843. }
  5844. #endif
  5845. }
  5846. #endif
  5847. void
  5848. BackwardPass::SetSymIsUsedOnlyInNumberIfLastUse(IR::Opnd *const opnd)
  5849. {
  5850. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5851. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5852. {
  5853. symUsedOnlyForNumberBySymId->Set(stackSym->m_id);
  5854. }
  5855. }
  5856. void
  5857. BackwardPass::SetSymIsNotUsedOnlyInNumber(IR::Opnd *const opnd)
  5858. {
  5859. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5860. if (stackSym)
  5861. {
  5862. symUsedOnlyForNumberBySymId->Clear(stackSym->m_id);
  5863. }
  5864. }
  5865. void
  5866. BackwardPass::SetSymIsUsedOnlyInBitOpsIfLastUse(IR::Opnd *const opnd)
  5867. {
  5868. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5869. if (stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  5870. {
  5871. symUsedOnlyForBitOpsBySymId->Set(stackSym->m_id);
  5872. }
  5873. }
  5874. void
  5875. BackwardPass::SetSymIsNotUsedOnlyInBitOps(IR::Opnd *const opnd)
  5876. {
  5877. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  5878. if (stackSym)
  5879. {
  5880. symUsedOnlyForBitOpsBySymId->Clear(stackSym->m_id);
  5881. }
  5882. }
  5883. void
  5884. BackwardPass::TrackBitWiseOrNumberOp(IR::Instr *const instr)
  5885. {
  5886. Assert(instr);
  5887. const bool trackBitWiseop = DoTrackBitOpsOrNumber();
  5888. const bool trackNumberop = trackBitWiseop;
  5889. const Js::OpCode opcode = instr->m_opcode;
  5890. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  5891. if (!trackBitWiseop && !trackNumberop)
  5892. {
  5893. return;
  5894. }
  5895. if (!instr->IsRealInstr())
  5896. {
  5897. return;
  5898. }
  5899. if (dstSym)
  5900. {
  5901. // For a dst where the def is in this block, transfer the current info into the instruction
  5902. if (trackBitWiseop && symUsedOnlyForBitOpsBySymId->TestAndClear(dstSym->m_id))
  5903. {
  5904. instr->dstIsAlwaysConvertedToInt32 = true;
  5905. }
  5906. if (trackNumberop && symUsedOnlyForNumberBySymId->TestAndClear(dstSym->m_id))
  5907. {
  5908. instr->dstIsAlwaysConvertedToNumber = true;
  5909. }
  5910. }
  5911. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  5912. if (OpCodeAttr::NonTempNumberSources(opcode))
  5913. {
  5914. if (trackBitWiseop)
  5915. {
  5916. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  5917. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  5918. }
  5919. if (trackNumberop)
  5920. {
  5921. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  5922. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  5923. }
  5924. return;
  5925. }
  5926. if (trackBitWiseop)
  5927. {
  5928. switch (opcode)
  5929. {
  5930. // Instructions that can cause src values to escape the local scope have already been excluded
  5931. case Js::OpCode::Not_A:
  5932. case Js::OpCode::And_A:
  5933. case Js::OpCode::Or_A:
  5934. case Js::OpCode::Xor_A:
  5935. case Js::OpCode::Shl_A:
  5936. case Js::OpCode::Shr_A:
  5937. case Js::OpCode::Not_I4:
  5938. case Js::OpCode::And_I4:
  5939. case Js::OpCode::Or_I4:
  5940. case Js::OpCode::Xor_I4:
  5941. case Js::OpCode::Shl_I4:
  5942. case Js::OpCode::Shr_I4:
  5943. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  5944. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc1());
  5945. SetSymIsUsedOnlyInBitOpsIfLastUse(instr->GetSrc2());
  5946. break;
  5947. default:
  5948. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc1());
  5949. SetSymIsNotUsedOnlyInBitOps(instr->GetSrc2());
  5950. break;
  5951. }
  5952. }
  5953. if (trackNumberop)
  5954. {
  5955. switch (opcode)
  5956. {
  5957. // Instructions that can cause src values to escape the local scope have already been excluded
  5958. case Js::OpCode::Conv_Num:
  5959. case Js::OpCode::Div_A:
  5960. case Js::OpCode::Mul_A:
  5961. case Js::OpCode::Sub_A:
  5962. case Js::OpCode::Rem_A:
  5963. case Js::OpCode::Incr_A:
  5964. case Js::OpCode::Decr_A:
  5965. case Js::OpCode::Neg_A:
  5966. case Js::OpCode::Not_A:
  5967. case Js::OpCode::ShrU_A:
  5968. case Js::OpCode::ShrU_I4:
  5969. case Js::OpCode::And_A:
  5970. case Js::OpCode::Or_A:
  5971. case Js::OpCode::Xor_A:
  5972. case Js::OpCode::Shl_A:
  5973. case Js::OpCode::Shr_A:
  5974. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  5975. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc1());
  5976. SetSymIsUsedOnlyInNumberIfLastUse(instr->GetSrc2());
  5977. break;
  5978. default:
  5979. SetSymIsNotUsedOnlyInNumber(instr->GetSrc1());
  5980. SetSymIsNotUsedOnlyInNumber(instr->GetSrc2());
  5981. break;
  5982. }
  5983. }
  5984. }
  5985. void
  5986. BackwardPass::RemoveNegativeZeroBailout(IR::Instr* instr)
  5987. {
  5988. Assert(instr->HasBailOutInfo() && (instr->GetBailOutKind() & IR::BailOutOnNegativeZero));
  5989. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  5990. bailOutKind = bailOutKind & ~IR::BailOutOnNegativeZero;
  5991. if (bailOutKind)
  5992. {
  5993. instr->SetBailOutKind(bailOutKind);
  5994. }
  5995. else
  5996. {
  5997. instr->ClearBailOutInfo();
  5998. if (preOpBailOutInstrToProcess == instr)
  5999. {
  6000. preOpBailOutInstrToProcess = nullptr;
  6001. }
  6002. }
  6003. }
  6004. void
  6005. BackwardPass::TrackIntUsage(IR::Instr *const instr)
  6006. {
  6007. Assert(instr);
  6008. const bool trackNegativeZero = DoTrackNegativeZero();
  6009. const bool trackIntOverflow = DoTrackIntOverflow();
  6010. const bool trackCompoundedIntOverflow = DoTrackCompoundedIntOverflow();
  6011. const bool trackNon32BitOverflow = DoTrackNon32BitOverflow();
  6012. if(!(trackNegativeZero || trackIntOverflow || trackCompoundedIntOverflow))
  6013. {
  6014. return;
  6015. }
  6016. const Js::OpCode opcode = instr->m_opcode;
  6017. if(trackCompoundedIntOverflow && opcode == Js::OpCode::StatementBoundary && instr->AsPragmaInstr()->m_statementIndex == 0)
  6018. {
  6019. // Cannot bail out before the first statement boundary, so the range cannot extend beyond this instruction
  6020. Assert(!instr->ignoreIntOverflowInRange);
  6021. EndIntOverflowDoesNotMatterRange();
  6022. return;
  6023. }
  6024. if(!instr->IsRealInstr())
  6025. {
  6026. return;
  6027. }
  6028. StackSym *const dstSym = IR::RegOpnd::TryGetStackSym(instr->GetDst());
  6029. bool ignoreIntOverflowCandidate = false;
  6030. if(dstSym)
  6031. {
  6032. // For a dst where the def is in this block, transfer the current info into the instruction
  6033. if(trackNegativeZero)
  6034. {
  6035. if (negativeZeroDoesNotMatterBySymId->Test(dstSym->m_id))
  6036. {
  6037. instr->ignoreNegativeZero = true;
  6038. }
  6039. if (tag == Js::DeadStorePhase)
  6040. {
  6041. if (negativeZeroDoesNotMatterBySymId->TestAndClear(dstSym->m_id))
  6042. {
  6043. if (instr->HasBailOutInfo())
  6044. {
  6045. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  6046. if (bailOutKind & IR::BailOutOnNegativeZero)
  6047. {
  6048. RemoveNegativeZeroBailout(instr);
  6049. }
  6050. }
  6051. }
  6052. else
  6053. {
  6054. if (instr->HasBailOutInfo())
  6055. {
  6056. if (instr->GetBailOutKind() & IR::BailOutOnNegativeZero)
  6057. {
  6058. if (this->currentBlock->couldRemoveNegZeroBailoutForDef->TestAndClear(dstSym->m_id))
  6059. {
  6060. RemoveNegativeZeroBailout(instr);
  6061. }
  6062. }
  6063. // This instruction could potentially bail out. Hence, we cannot reliably remove negative zero
  6064. // bailouts upstream. If we did, and the operation actually produced a -0, and this instruction
  6065. // bailed out, we'd use +0 instead of -0 in the interpreter.
  6066. this->currentBlock->couldRemoveNegZeroBailoutForDef->ClearAll();
  6067. }
  6068. }
  6069. }
  6070. else
  6071. {
  6072. this->negativeZeroDoesNotMatterBySymId->Clear(dstSym->m_id);
  6073. }
  6074. }
  6075. if(trackIntOverflow)
  6076. {
  6077. ignoreIntOverflowCandidate = !!intOverflowDoesNotMatterBySymId->TestAndClear(dstSym->m_id);
  6078. if(trackCompoundedIntOverflow)
  6079. {
  6080. instr->ignoreIntOverflowInRange = !!intOverflowDoesNotMatterInRangeBySymId->TestAndClear(dstSym->m_id);
  6081. }
  6082. }
  6083. }
  6084. // If the instruction can cause src values to escape the local scope, the srcs can't be optimized
  6085. if(OpCodeAttr::NonTempNumberSources(opcode))
  6086. {
  6087. if(trackNegativeZero)
  6088. {
  6089. SetNegativeZeroMatters(instr->GetSrc1());
  6090. SetNegativeZeroMatters(instr->GetSrc2());
  6091. }
  6092. if(trackIntOverflow)
  6093. {
  6094. SetIntOverflowMatters(instr->GetSrc1());
  6095. SetIntOverflowMatters(instr->GetSrc2());
  6096. if(trackCompoundedIntOverflow)
  6097. {
  6098. instr->ignoreIntOverflowInRange = false;
  6099. SetIntOverflowMattersInRange(instr->GetSrc1());
  6100. SetIntOverflowMattersInRange(instr->GetSrc2());
  6101. EndIntOverflowDoesNotMatterRange();
  6102. }
  6103. }
  6104. return;
  6105. }
  6106. // -0 tracking
  6107. if(trackNegativeZero)
  6108. {
  6109. switch(opcode)
  6110. {
  6111. // Instructions that can cause src values to escape the local scope have already been excluded
  6112. case Js::OpCode::FromVar:
  6113. case Js::OpCode::Conv_Prim:
  6114. Assert(dstSym);
  6115. Assert(instr->GetSrc1());
  6116. Assert(!instr->GetSrc2() || instr->GetDst()->GetType() == instr->GetSrc1()->GetType());
  6117. if(instr->GetDst()->IsInt32())
  6118. {
  6119. // Conversion to int32 that is either explicit, or has a bailout check ensuring that it's an int value
  6120. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6121. break;
  6122. }
  6123. // fall-through
  6124. default:
  6125. if(dstSym && !instr->ignoreNegativeZero)
  6126. {
  6127. // -0 matters for dst, so -0 also matters for srcs
  6128. SetNegativeZeroMatters(instr->GetSrc1());
  6129. SetNegativeZeroMatters(instr->GetSrc2());
  6130. break;
  6131. }
  6132. if(opcode == Js::OpCode::Div_A || opcode == Js::OpCode::Div_I4)
  6133. {
  6134. // src1 is being divided by src2, so -0 matters for src2
  6135. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6136. SetNegativeZeroMatters(instr->GetSrc2());
  6137. break;
  6138. }
  6139. // fall-through
  6140. case Js::OpCode::Incr_A:
  6141. case Js::OpCode::Decr_A:
  6142. // Adding 1 to something or subtracting 1 from something does not generate -0
  6143. case Js::OpCode::Not_A:
  6144. case Js::OpCode::And_A:
  6145. case Js::OpCode::Or_A:
  6146. case Js::OpCode::Xor_A:
  6147. case Js::OpCode::Shl_A:
  6148. case Js::OpCode::Shr_A:
  6149. case Js::OpCode::ShrU_A:
  6150. case Js::OpCode::Not_I4:
  6151. case Js::OpCode::And_I4:
  6152. case Js::OpCode::Or_I4:
  6153. case Js::OpCode::Xor_I4:
  6154. case Js::OpCode::Shl_I4:
  6155. case Js::OpCode::Shr_I4:
  6156. case Js::OpCode::ShrU_I4:
  6157. case Js::OpCode::Conv_Str:
  6158. case Js::OpCode::Coerce_Str:
  6159. case Js::OpCode::Coerce_Regex:
  6160. case Js::OpCode::Coerce_StrOrRegex:
  6161. case Js::OpCode::Conv_PrimStr:
  6162. // These instructions don't generate -0, and their behavior is the same for any src that is -0 or +0
  6163. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6164. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6165. break;
  6166. case Js::OpCode::Add_I4:
  6167. {
  6168. Assert(dstSym);
  6169. Assert(instr->GetSrc1());
  6170. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsImmediateOpnd());
  6171. Assert(instr->GetSrc2());
  6172. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsImmediateOpnd());
  6173. if (instr->ignoreNegativeZero ||
  6174. (instr->GetSrc1()->IsImmediateOpnd() && instr->GetSrc1()->GetImmediateValue(func) != 0) ||
  6175. (instr->GetSrc2()->IsImmediateOpnd() && instr->GetSrc2()->GetImmediateValue(func) != 0))
  6176. {
  6177. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6178. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6179. break;
  6180. }
  6181. // -0 + -0 == -0. As long as one src is guaranteed to not be -0, -0 does not matter for the other src. Pick a
  6182. // src for which to ignore negative zero, based on which sym is last-use. If both syms are last-use, src2 is
  6183. // picked arbitrarily.
  6184. SetNegativeZeroMatters(instr->GetSrc1());
  6185. SetNegativeZeroMatters(instr->GetSrc2());
  6186. if (tag == Js::DeadStorePhase)
  6187. {
  6188. if (instr->GetSrc2()->IsRegOpnd() &&
  6189. !currentBlock->upwardExposedUses->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))
  6190. {
  6191. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc2());
  6192. }
  6193. else
  6194. {
  6195. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc1());
  6196. }
  6197. }
  6198. break;
  6199. }
  6200. case Js::OpCode::Add_A:
  6201. Assert(dstSym);
  6202. Assert(instr->GetSrc1());
  6203. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  6204. Assert(instr->GetSrc2());
  6205. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd());
  6206. if(instr->ignoreNegativeZero || instr->GetSrc1()->IsAddrOpnd() || instr->GetSrc2()->IsAddrOpnd())
  6207. {
  6208. // -0 does not matter for dst, or this instruction does not generate -0 since one of the srcs is not -0
  6209. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6210. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6211. break;
  6212. }
  6213. SetNegativeZeroMatters(instr->GetSrc1());
  6214. SetNegativeZeroMatters(instr->GetSrc2());
  6215. break;
  6216. case Js::OpCode::Sub_I4:
  6217. {
  6218. Assert(dstSym);
  6219. Assert(instr->GetSrc1());
  6220. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsImmediateOpnd());
  6221. Assert(instr->GetSrc2());
  6222. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsImmediateOpnd());
  6223. if (instr->ignoreNegativeZero ||
  6224. (instr->GetSrc1()->IsImmediateOpnd() && instr->GetSrc1()->GetImmediateValue(func) != 0) ||
  6225. (instr->GetSrc2()->IsImmediateOpnd() && instr->GetSrc2()->GetImmediateValue(func) != 0))
  6226. {
  6227. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6228. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6229. }
  6230. else
  6231. {
  6232. goto NegativeZero_Sub_Default;
  6233. }
  6234. break;
  6235. }
  6236. case Js::OpCode::Sub_A:
  6237. Assert(dstSym);
  6238. Assert(instr->GetSrc1());
  6239. Assert(instr->GetSrc1()->IsRegOpnd() || instr->GetSrc1()->IsAddrOpnd());
  6240. Assert(instr->GetSrc2());
  6241. Assert(instr->GetSrc2()->IsRegOpnd() || instr->GetSrc2()->IsAddrOpnd() || instr->GetSrc2()->IsIntConstOpnd());
  6242. if(instr->ignoreNegativeZero ||
  6243. instr->GetSrc1()->IsAddrOpnd() ||
  6244. (
  6245. instr->GetSrc2()->IsAddrOpnd() &&
  6246. instr->GetSrc2()->AsAddrOpnd()->IsVar() &&
  6247. Js::TaggedInt::ToInt32(instr->GetSrc2()->AsAddrOpnd()->m_address) != 0
  6248. ))
  6249. {
  6250. // At least one of the following is true:
  6251. // - -0 does not matter for dst
  6252. // - Src1 is not -0, and so this instruction cannot generate -0
  6253. // - Src2 is a nonzero tagged int constant, and so this instruction cannot generate -0
  6254. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1());
  6255. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6256. break;
  6257. }
  6258. // fall-through
  6259. NegativeZero_Sub_Default:
  6260. // -0 - 0 == -0. As long as src1 is guaranteed to not be -0, -0 does not matter for src2.
  6261. SetNegativeZeroMatters(instr->GetSrc1());
  6262. SetNegativeZeroMatters(instr->GetSrc2());
  6263. if (this->tag == Js::DeadStorePhase)
  6264. {
  6265. SetCouldRemoveNegZeroBailoutForDefIfLastUse(instr->GetSrc2());
  6266. }
  6267. break;
  6268. case Js::OpCode::BrEq_I4:
  6269. case Js::OpCode::BrTrue_I4:
  6270. case Js::OpCode::BrFalse_I4:
  6271. case Js::OpCode::BrGe_I4:
  6272. case Js::OpCode::BrUnGe_I4:
  6273. case Js::OpCode::BrGt_I4:
  6274. case Js::OpCode::BrUnGt_I4:
  6275. case Js::OpCode::BrLt_I4:
  6276. case Js::OpCode::BrUnLt_I4:
  6277. case Js::OpCode::BrLe_I4:
  6278. case Js::OpCode::BrUnLe_I4:
  6279. case Js::OpCode::BrNeq_I4:
  6280. // Int-specialized branches may prove that one of the src must be zero purely based on the int range, in which
  6281. // case they rely on prior -0 bailouts to guarantee that the src cannot be -0. So, consider that -0 matters for
  6282. // the srcs.
  6283. // fall-through
  6284. case Js::OpCode::InlineMathAtan2:
  6285. // Atan(y,x) - signs of y, x is used to determine the quadrant of the result
  6286. SetNegativeZeroMatters(instr->GetSrc1());
  6287. SetNegativeZeroMatters(instr->GetSrc2());
  6288. break;
  6289. case Js::OpCode::Expo_A:
  6290. case Js::OpCode::InlineMathPow:
  6291. // Negative zero matters for src1
  6292. // Pow( 0, <neg>) is Infinity
  6293. // Pow(-0, <neg>) is -Infinity
  6294. SetNegativeZeroMatters(instr->GetSrc1());
  6295. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc2());
  6296. break;
  6297. case Js::OpCode::LdElemI_A:
  6298. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  6299. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd());
  6300. break;
  6301. case Js::OpCode::StElemI_A:
  6302. case Js::OpCode::StElemI_A_Strict:
  6303. // There is an implicit ToString on the index operand, which doesn't differentiate -0 from +0
  6304. SetNegativeZeroDoesNotMatterIfLastUse(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd());
  6305. break;
  6306. }
  6307. }
  6308. // Int overflow tracking
  6309. if(!trackIntOverflow)
  6310. {
  6311. return;
  6312. }
  6313. switch(opcode)
  6314. {
  6315. // Instructions that can cause src values to escape the local scope have already been excluded
  6316. default:
  6317. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  6318. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  6319. // insufficient.
  6320. ignoreIntOverflowCandidate = false;
  6321. // fall-through
  6322. case Js::OpCode::Incr_A:
  6323. case Js::OpCode::Decr_A:
  6324. case Js::OpCode::Add_A:
  6325. case Js::OpCode::Sub_A:
  6326. // The sources are not guaranteed to be converted to int32. Let the compounded int overflow tracking handle this.
  6327. SetIntOverflowMatters(instr->GetSrc1());
  6328. SetIntOverflowMatters(instr->GetSrc2());
  6329. break;
  6330. case Js::OpCode::Mul_A:
  6331. if (trackNon32BitOverflow)
  6332. {
  6333. if (ignoreIntOverflowCandidate)
  6334. instr->ignoreOverflowBitCount = 53;
  6335. }
  6336. else
  6337. {
  6338. ignoreIntOverflowCandidate = false;
  6339. }
  6340. SetIntOverflowMatters(instr->GetSrc1());
  6341. SetIntOverflowMatters(instr->GetSrc2());
  6342. break;
  6343. case Js::OpCode::Neg_A:
  6344. case Js::OpCode::Ld_A:
  6345. case Js::OpCode::Conv_Num:
  6346. case Js::OpCode::ShrU_A:
  6347. if(!ignoreIntOverflowCandidate)
  6348. {
  6349. // Int overflow matters for dst, so int overflow also matters for srcs
  6350. SetIntOverflowMatters(instr->GetSrc1());
  6351. SetIntOverflowMatters(instr->GetSrc2());
  6352. break;
  6353. }
  6354. // fall-through
  6355. case Js::OpCode::Not_A:
  6356. case Js::OpCode::And_A:
  6357. case Js::OpCode::Or_A:
  6358. case Js::OpCode::Xor_A:
  6359. case Js::OpCode::Shl_A:
  6360. case Js::OpCode::Shr_A:
  6361. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  6362. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  6363. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  6364. // difference if the srcs have int32-overflowed values.
  6365. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc1());
  6366. SetIntOverflowDoesNotMatterIfLastUse(instr->GetSrc2());
  6367. break;
  6368. }
  6369. if(ignoreIntOverflowCandidate)
  6370. {
  6371. instr->ignoreIntOverflow = true;
  6372. }
  6373. // Compounded int overflow tracking
  6374. if(!trackCompoundedIntOverflow)
  6375. {
  6376. return;
  6377. }
  6378. if(instr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  6379. {
  6380. // The forward pass may need to insert conversions with bailouts before the first instruction in the range. Since this
  6381. // instruction does not have a valid byte code offset for bailout purposes, end the current range now.
  6382. instr->ignoreIntOverflowInRange = false;
  6383. SetIntOverflowMattersInRange(instr->GetSrc1());
  6384. SetIntOverflowMattersInRange(instr->GetSrc2());
  6385. EndIntOverflowDoesNotMatterRange();
  6386. return;
  6387. }
  6388. if(ignoreIntOverflowCandidate)
  6389. {
  6390. instr->ignoreIntOverflowInRange = true;
  6391. if(dstSym)
  6392. {
  6393. dstSym->scratch.globOpt.numCompoundedAddSubUses = 0;
  6394. }
  6395. }
  6396. bool lossy = false;
  6397. switch(opcode)
  6398. {
  6399. // Instructions that can cause src values to escape the local scope have already been excluded
  6400. case Js::OpCode::Incr_A:
  6401. case Js::OpCode::Decr_A:
  6402. case Js::OpCode::Add_A:
  6403. case Js::OpCode::Sub_A:
  6404. {
  6405. if(!instr->ignoreIntOverflowInRange)
  6406. {
  6407. // Int overflow matters for dst, so int overflow also matters for srcs
  6408. SetIntOverflowMattersInRange(instr->GetSrc1());
  6409. SetIntOverflowMattersInRange(instr->GetSrc2());
  6410. break;
  6411. }
  6412. AnalysisAssert(dstSym);
  6413. // The number of compounded add/sub uses of each src is at least the number of compounded add/sub uses of the dst,
  6414. // + 1 for the current instruction
  6415. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6416. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6417. const int addSubUses = dstSym->scratch.globOpt.numCompoundedAddSubUses + 1;
  6418. if(addSubUses > MaxCompoundedUsesInAddSubForIgnoringIntOverflow)
  6419. {
  6420. // There are too many compounded add/sub uses of the srcs. There is a possibility that combined, the number
  6421. // eventually overflows the 53 bits that 'double' values have to precisely represent ints
  6422. instr->ignoreIntOverflowInRange = false;
  6423. SetIntOverflowMattersInRange(instr->GetSrc1());
  6424. SetIntOverflowMattersInRange(instr->GetSrc2());
  6425. break;
  6426. }
  6427. TransferCompoundedAddSubUsesToSrcs(instr, addSubUses);
  6428. break;
  6429. }
  6430. case Js::OpCode::Neg_A:
  6431. case Js::OpCode::Ld_A:
  6432. case Js::OpCode::Conv_Num:
  6433. case Js::OpCode::ShrU_A:
  6434. {
  6435. if(!instr->ignoreIntOverflowInRange)
  6436. {
  6437. // Int overflow matters for dst, so int overflow also matters for srcs
  6438. SetIntOverflowMattersInRange(instr->GetSrc1());
  6439. SetIntOverflowMattersInRange(instr->GetSrc2());
  6440. break;
  6441. }
  6442. AnalysisAssert(dstSym);
  6443. TransferCompoundedAddSubUsesToSrcs(instr, dstSym->scratch.globOpt.numCompoundedAddSubUses);
  6444. lossy = opcode == Js::OpCode::ShrU_A;
  6445. break;
  6446. }
  6447. case Js::OpCode::Not_A:
  6448. case Js::OpCode::And_A:
  6449. case Js::OpCode::Or_A:
  6450. case Js::OpCode::Xor_A:
  6451. case Js::OpCode::Shl_A:
  6452. case Js::OpCode::Shr_A:
  6453. // These instructions convert their srcs to int32s, and hence don't care about int-overflowed values in the srcs (as
  6454. // long as the overflowed values did not overflow the 53 bits that 'double' values have to precisely represent
  6455. // ints). ShrU_A is not included here because it converts its srcs to uint32 rather than int32, so it would make a
  6456. // difference if the srcs have int32-overflowed values.
  6457. instr->ignoreIntOverflowInRange = true;
  6458. lossy = true;
  6459. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc1(), 0);
  6460. SetIntOverflowDoesNotMatterInRangeIfLastUse(instr->GetSrc2(), 0);
  6461. break;
  6462. case Js::OpCode::LdSlotArr:
  6463. case Js::OpCode::LdSlot:
  6464. {
  6465. Assert(dstSym);
  6466. Assert(!instr->GetSrc2()); // at the moment, this list contains only unary operations
  6467. if(intOverflowCurrentlyMattersInRange)
  6468. {
  6469. // These instructions will not begin a range, so just return. They don't begin a range because their initial
  6470. // value may not be available until after the instruction is processed in the forward pass.
  6471. Assert(!instr->ignoreIntOverflowInRange);
  6472. return;
  6473. }
  6474. Assert(currentBlock->intOverflowDoesNotMatterRange);
  6475. // Int overflow does not matter for dst, so the srcs need to be tracked as inputs into the region of
  6476. // instructions where int overflow does not matter. Since these instructions will not begin or end a range, they
  6477. // are tracked in separate candidates bit-vectors and once we have confirmed that they don't begin the range,
  6478. // they will be transferred to 'SymsRequiredToBe[Lossy]Int'. Furthermore, once this instruction is included in
  6479. // the range, its dst sym has to be removed. Since this instructions may not be included in the range, add the
  6480. // dst sym to the candidates bit-vectors. If they are included, the process of transferring will remove the dst
  6481. // syms and add the src syms.
  6482. // Remove the dst using the candidate bit-vectors
  6483. Assert(
  6484. !instr->ignoreIntOverflowInRange ||
  6485. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id));
  6486. if(instr->ignoreIntOverflowInRange ||
  6487. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(dstSym->m_id))
  6488. {
  6489. candidateSymsRequiredToBeInt->Set(dstSym->m_id);
  6490. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id))
  6491. {
  6492. candidateSymsRequiredToBeLossyInt->Set(dstSym->m_id);
  6493. }
  6494. }
  6495. if(!instr->ignoreIntOverflowInRange)
  6496. {
  6497. // These instructions will not end a range, so just return. They may be included in the middle of a range, but
  6498. // since int overflow matters for the dst, the src does not need to be counted as an input into the range.
  6499. return;
  6500. }
  6501. instr->ignoreIntOverflowInRange = false;
  6502. // Add the src using the candidate bit-vectors. The src property sym may already be included in the range or as
  6503. // a candidate. The xor of the final bit-vector with the candidate is the set of syms required to be int,
  6504. // assuming all instructions up to and not including this one are included in the range.
  6505. const SymID srcSymId = instr->GetSrc1()->AsSymOpnd()->m_sym->m_id;
  6506. const bool srcIncluded =
  6507. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(srcSymId) ^
  6508. !!candidateSymsRequiredToBeInt->Test(srcSymId);
  6509. const bool srcIncludedAsLossy =
  6510. srcIncluded &&
  6511. !!currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(srcSymId) ^
  6512. !!candidateSymsRequiredToBeLossyInt->Test(srcSymId);
  6513. const bool srcNeedsToBeLossless =
  6514. !currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(dstSym->m_id) ||
  6515. (srcIncluded && !srcIncludedAsLossy);
  6516. if(srcIncluded)
  6517. {
  6518. if(srcIncludedAsLossy && srcNeedsToBeLossless)
  6519. {
  6520. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  6521. }
  6522. }
  6523. else
  6524. {
  6525. candidateSymsRequiredToBeInt->Compliment(srcSymId);
  6526. if(!srcNeedsToBeLossless)
  6527. {
  6528. candidateSymsRequiredToBeLossyInt->Compliment(srcSymId);
  6529. }
  6530. }
  6531. // These instructions will not end a range, so just return. They may be included in the middle of a range, and the
  6532. // src has been included as a candidate input into the range.
  6533. return;
  6534. }
  6535. case Js::OpCode::Mul_A:
  6536. if (trackNon32BitOverflow)
  6537. {
  6538. // MULs will always be at the start of a range. Either included in the range if int32 overflow is ignored, or excluded if int32 overflow matters. Even if int32 can be ignored, MULs can still bailout on 53-bit.
  6539. // That's why it cannot be in the middle of a range.
  6540. if (instr->ignoreIntOverflowInRange)
  6541. {
  6542. AnalysisAssert(dstSym);
  6543. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6544. Assert(dstSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6545. instr->ignoreOverflowBitCount = (uint8) (53 - dstSym->scratch.globOpt.numCompoundedAddSubUses);
  6546. // We have the max number of compounded adds/subs. 32-bit overflow cannot be ignored.
  6547. if (instr->ignoreOverflowBitCount == 32)
  6548. {
  6549. instr->ignoreIntOverflowInRange = false;
  6550. }
  6551. }
  6552. SetIntOverflowMattersInRange(instr->GetSrc1());
  6553. SetIntOverflowMattersInRange(instr->GetSrc2());
  6554. break;
  6555. }
  6556. // fall-through
  6557. default:
  6558. // Unlike the -0 tracking, we use an inclusion list of op-codes for overflow tracking rather than an exclusion list.
  6559. // Assume for any instructions other than those listed above, that int-overflowed values in the srcs are
  6560. // insufficient.
  6561. instr->ignoreIntOverflowInRange = false;
  6562. SetIntOverflowMattersInRange(instr->GetSrc1());
  6563. SetIntOverflowMattersInRange(instr->GetSrc2());
  6564. break;
  6565. }
  6566. if(!instr->ignoreIntOverflowInRange)
  6567. {
  6568. EndIntOverflowDoesNotMatterRange();
  6569. return;
  6570. }
  6571. if(intOverflowCurrentlyMattersInRange)
  6572. {
  6573. // This is the last instruction in a new range of instructions where int overflow does not matter
  6574. intOverflowCurrentlyMattersInRange = false;
  6575. IR::Instr *const boundaryInstr = IR::PragmaInstr::New(Js::OpCode::NoIntOverflowBoundary, 0, instr->m_func);
  6576. boundaryInstr->SetByteCodeOffset(instr);
  6577. currentBlock->InsertInstrAfter(boundaryInstr, instr);
  6578. currentBlock->intOverflowDoesNotMatterRange =
  6579. IntOverflowDoesNotMatterRange::New(
  6580. globOpt->alloc,
  6581. instr,
  6582. boundaryInstr,
  6583. currentBlock->intOverflowDoesNotMatterRange);
  6584. }
  6585. else
  6586. {
  6587. Assert(currentBlock->intOverflowDoesNotMatterRange);
  6588. // Extend the current range of instructions where int overflow does not matter, to include this instruction. We also need to
  6589. // include the tracked syms for instructions that have not yet been included in the range, which are tracked in the range's
  6590. // bit-vector. 'SymsRequiredToBeInt' will contain both the dst and src syms of instructions not yet included in the range;
  6591. // the xor will remove the dst syms and add the src syms.
  6592. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Xor(candidateSymsRequiredToBeInt);
  6593. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Xor(candidateSymsRequiredToBeLossyInt);
  6594. candidateSymsRequiredToBeInt->ClearAll();
  6595. candidateSymsRequiredToBeLossyInt->ClearAll();
  6596. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(instr);
  6597. }
  6598. // Track syms that are inputs into the range based on the current instruction, which was just added to the range. The dst
  6599. // sym is obtaining a new value so it isn't required to be an int at the start of the range, but the srcs are.
  6600. if(dstSym)
  6601. {
  6602. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Clear(dstSym->m_id);
  6603. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(dstSym->m_id);
  6604. }
  6605. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  6606. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]) && srcs[i]; ++i)
  6607. {
  6608. StackSym *srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  6609. if(!srcSym)
  6610. {
  6611. continue;
  6612. }
  6613. if(currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->TestAndSet(srcSym->m_id))
  6614. {
  6615. if(!lossy)
  6616. {
  6617. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(srcSym->m_id);
  6618. }
  6619. }
  6620. else if(lossy)
  6621. {
  6622. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(srcSym->m_id);
  6623. }
  6624. }
  6625. // If the last instruction included in the range is a MUL, we have to end the range.
  6626. // MULs with ignoreIntOverflow can still bailout on 53-bit overflow, so they cannot be in the middle of a range
  6627. if (trackNon32BitOverflow && instr->m_opcode == Js::OpCode::Mul_A)
  6628. {
  6629. // range would have ended already if int32 overflow matters
  6630. Assert(instr->ignoreIntOverflowInRange && instr->ignoreOverflowBitCount != 32);
  6631. EndIntOverflowDoesNotMatterRange();
  6632. }
  6633. }
  6634. void
  6635. BackwardPass::SetNegativeZeroDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  6636. {
  6637. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6638. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6639. {
  6640. negativeZeroDoesNotMatterBySymId->Set(stackSym->m_id);
  6641. }
  6642. }
  6643. void
  6644. BackwardPass::SetNegativeZeroMatters(IR::Opnd *const opnd)
  6645. {
  6646. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6647. if(stackSym)
  6648. {
  6649. negativeZeroDoesNotMatterBySymId->Clear(stackSym->m_id);
  6650. }
  6651. }
  6652. void
  6653. BackwardPass::SetCouldRemoveNegZeroBailoutForDefIfLastUse(IR::Opnd *const opnd)
  6654. {
  6655. StackSym * stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6656. if (stackSym && !this->currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6657. {
  6658. this->currentBlock->couldRemoveNegZeroBailoutForDef->Set(stackSym->m_id);
  6659. }
  6660. }
  6661. void
  6662. BackwardPass::SetIntOverflowDoesNotMatterIfLastUse(IR::Opnd *const opnd)
  6663. {
  6664. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6665. if(stackSym && !currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6666. {
  6667. intOverflowDoesNotMatterBySymId->Set(stackSym->m_id);
  6668. }
  6669. }
  6670. void
  6671. BackwardPass::SetIntOverflowMatters(IR::Opnd *const opnd)
  6672. {
  6673. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6674. if(stackSym)
  6675. {
  6676. intOverflowDoesNotMatterBySymId->Clear(stackSym->m_id);
  6677. }
  6678. }
  6679. bool
  6680. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(IR::Opnd *const opnd, const int addSubUses)
  6681. {
  6682. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6683. return stackSym && SetIntOverflowDoesNotMatterInRangeIfLastUse(stackSym, addSubUses);
  6684. }
  6685. bool
  6686. BackwardPass::SetIntOverflowDoesNotMatterInRangeIfLastUse(StackSym *const stackSym, const int addSubUses)
  6687. {
  6688. Assert(stackSym);
  6689. Assert(addSubUses >= 0);
  6690. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6691. if(currentBlock->upwardExposedUses->Test(stackSym->m_id))
  6692. {
  6693. return false;
  6694. }
  6695. intOverflowDoesNotMatterInRangeBySymId->Set(stackSym->m_id);
  6696. stackSym->scratch.globOpt.numCompoundedAddSubUses = addSubUses;
  6697. return true;
  6698. }
  6699. void
  6700. BackwardPass::SetIntOverflowMattersInRange(IR::Opnd *const opnd)
  6701. {
  6702. StackSym *const stackSym = IR::RegOpnd::TryGetStackSym(opnd);
  6703. if(stackSym)
  6704. {
  6705. intOverflowDoesNotMatterInRangeBySymId->Clear(stackSym->m_id);
  6706. }
  6707. }
  6708. void
  6709. BackwardPass::TransferCompoundedAddSubUsesToSrcs(IR::Instr *const instr, const int addSubUses)
  6710. {
  6711. Assert(instr);
  6712. Assert(addSubUses >= 0);
  6713. Assert(addSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6714. IR::Opnd *const srcs[] = { instr->GetSrc1(), instr->GetSrc2() };
  6715. for(int i = 0; i < _countof(srcs) && srcs[i]; ++i)
  6716. {
  6717. StackSym *const srcSym = IR::RegOpnd::TryGetStackSym(srcs[i]);
  6718. if(!srcSym)
  6719. {
  6720. // Int overflow tracking is only done for StackSyms in RegOpnds. Int overflow matters for the src, so it is
  6721. // guaranteed to be in the int range at this point if the instruction is int-specialized.
  6722. continue;
  6723. }
  6724. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses >= 0);
  6725. Assert(srcSym->scratch.globOpt.numCompoundedAddSubUses <= MaxCompoundedUsesInAddSubForIgnoringIntOverflow);
  6726. if(SetIntOverflowDoesNotMatterInRangeIfLastUse(srcSym, addSubUses))
  6727. {
  6728. // This is the last use of the src
  6729. continue;
  6730. }
  6731. if(intOverflowDoesNotMatterInRangeBySymId->Test(srcSym->m_id))
  6732. {
  6733. // Since a src may be compounded through different chains of add/sub instructions, the greater number must be
  6734. // preserved
  6735. srcSym->scratch.globOpt.numCompoundedAddSubUses =
  6736. max(srcSym->scratch.globOpt.numCompoundedAddSubUses, addSubUses);
  6737. }
  6738. else
  6739. {
  6740. // Int overflow matters for the src, so it is guaranteed to be in the int range at this point if the instruction is
  6741. // int-specialized
  6742. }
  6743. }
  6744. }
  6745. void
  6746. BackwardPass::EndIntOverflowDoesNotMatterRange()
  6747. {
  6748. if(intOverflowCurrentlyMattersInRange)
  6749. {
  6750. return;
  6751. }
  6752. intOverflowCurrentlyMattersInRange = true;
  6753. if(currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next ==
  6754. currentBlock->intOverflowDoesNotMatterRange->LastInstr())
  6755. {
  6756. // Don't need a range for a single-instruction range
  6757. IntOverflowDoesNotMatterRange *const rangeToDelete = currentBlock->intOverflowDoesNotMatterRange;
  6758. currentBlock->intOverflowDoesNotMatterRange = currentBlock->intOverflowDoesNotMatterRange->Next();
  6759. currentBlock->RemoveInstr(rangeToDelete->LastInstr());
  6760. rangeToDelete->Delete(globOpt->alloc);
  6761. }
  6762. else
  6763. {
  6764. // End the current range of instructions where int overflow does not matter
  6765. IR::Instr *const boundaryInstr =
  6766. IR::PragmaInstr::New(
  6767. Js::OpCode::NoIntOverflowBoundary,
  6768. 0,
  6769. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_func);
  6770. boundaryInstr->SetByteCodeOffset(currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  6771. currentBlock->InsertInstrBefore(boundaryInstr, currentBlock->intOverflowDoesNotMatterRange->FirstInstr());
  6772. currentBlock->intOverflowDoesNotMatterRange->SetFirstInstr(boundaryInstr);
  6773. #if DBG_DUMP
  6774. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  6775. {
  6776. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  6777. Output::Print(
  6778. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  6779. func->GetJITFunctionBody()->GetDisplayName(),
  6780. func->GetDebugNumberSet(debugStringBuffer),
  6781. Js::PhaseNames[Js::BackwardPhase],
  6782. currentBlock->GetBlockNum());
  6783. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  6784. candidateSymsRequiredToBeInt->Minus(
  6785. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeInt(),
  6786. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()); // candidate bit-vectors are cleared below anyway
  6787. candidateSymsRequiredToBeInt->Dump();
  6788. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  6789. currentBlock->intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  6790. Output::Print(_u(" First instr: "));
  6791. currentBlock->intOverflowDoesNotMatterRange->FirstInstr()->m_next->Dump();
  6792. Output::Flush();
  6793. }
  6794. #endif
  6795. }
  6796. // Reset candidates for the next range
  6797. candidateSymsRequiredToBeInt->ClearAll();
  6798. candidateSymsRequiredToBeLossyInt->ClearAll();
  6799. // Syms are not tracked across different ranges of instructions where int overflow does not matter, since instructions
  6800. // between the ranges may bail out. The value of the dst of an int operation where overflow is ignored is incorrect until
  6801. // the last use of that sym is converted to int. If the int operation and the last use of the sym are in different ranges
  6802. // and an instruction between the ranges bails out, other inputs into the second range are no longer guaranteed to be ints,
  6803. // so the incorrect value of the sym may be used in non-int operations.
  6804. intOverflowDoesNotMatterInRangeBySymId->ClearAll();
  6805. }
  6806. void
  6807. BackwardPass::TrackFloatSymEquivalence(IR::Instr *const instr)
  6808. {
  6809. /*
  6810. This function determines sets of float-specialized syms where any two syms in a set may have the same value number at some
  6811. point in the function. Conversely, if two float-specialized syms are not in the same set, it guarantees that those two syms
  6812. will never have the same value number. These sets are referred to as equivalence classes here.
  6813. The equivalence class for a sym is used to determine whether a bailout FromVar generating a float value for the sym needs to
  6814. bail out on any non-number value. For instance, for syms s1 and s5 in an equivalence class (say we have s5 = s1 at some
  6815. point), if there's a FromVar that generates a float value for s1 but only bails out on strings or non-primitives, and s5 is
  6816. returned from the function, it has to be ensured that s5 is not converted to Var. If the source of the FromVar was null, the
  6817. FromVar would not have bailed out, and s1 and s5 would have the value +0. When s5 is returned, we need to return null and
  6818. not +0, so the equivalence class is used to determine that since s5 requires a bailout on any non-number value, so does s1.
  6819. The tracking is very conservative because the bit that says "I require bailout on any non-number value" is on the sym itself
  6820. (referred to as non-number bailout bit below).
  6821. Data:
  6822. - BackwardPass::floatSymEquivalenceMap
  6823. - hash table mapping a float sym ID to its equivalence class
  6824. - FloatSymEquivalenceClass
  6825. - bit vector of float sym IDs that are in the equivalence class
  6826. - one non-number bailout bit for all syms in the equivalence class
  6827. Algorithm:
  6828. - In a loop prepass or when not in loop:
  6829. - For a float sym transfer (s0.f = s1.f), add both syms to an equivalence class (set the syms in a bit vector)
  6830. - If either sym requires bailout on any non-number value, set the equivalence class' non-number bailout bit
  6831. - If one of the syms is already in an equivalence class, merge the two equivalence classes by OR'ing the two bit vectors
  6832. and the non-number bailout bit.
  6833. - Note that for functions with a loop, dependency tracking is done using equivalence classes and that information is not
  6834. transferred back into each sym's non-number bailout bit
  6835. - In a loop non-prepass or when not in loop, for a FromVar instruction that requires bailout only on strings and
  6836. non-primitives:
  6837. - If the destination float sym's non-number bailout bit is set, or the sym is in an equivalence class whose non-number
  6838. bailout bit is set, change the bailout to bail out on any non-number value
  6839. The result is that if a float-specialized sym's value is used in a way in which it would be invalid to use the float value
  6840. through any other float-specialized sym that acquires the value, the FromVar generating the float value will be modified to
  6841. bail out on any non-number value.
  6842. */
  6843. Assert(instr);
  6844. if(tag != Js::DeadStorePhase || instr->GetSrc2() || !instr->m_func->hasBailout)
  6845. {
  6846. return;
  6847. }
  6848. if(!instr->GetDst() || !instr->GetDst()->IsRegOpnd())
  6849. {
  6850. return;
  6851. }
  6852. const auto dst = instr->GetDst()->AsRegOpnd()->m_sym;
  6853. if(!dst->IsFloat64())
  6854. {
  6855. return;
  6856. }
  6857. if(!instr->GetSrc1() || !instr->GetSrc1()->IsRegOpnd())
  6858. {
  6859. return;
  6860. }
  6861. const auto src = instr->GetSrc1()->AsRegOpnd()->m_sym;
  6862. if(OpCodeAttr::NonIntTransfer(instr->m_opcode) && (!currentBlock->loop || IsPrePass()))
  6863. {
  6864. Assert(src->IsFloat64()); // dst is specialized, and since this is a float transfer, src must be specialized too
  6865. if(dst == src)
  6866. {
  6867. return;
  6868. }
  6869. if(!func->m_fg->hasLoop)
  6870. {
  6871. // Special case for functions with no loops, since there can only be in-order dependencies. Just merge the two
  6872. // non-number bailout bits and put the result in the source.
  6873. if(dst->m_requiresBailOnNotNumber)
  6874. {
  6875. src->m_requiresBailOnNotNumber = true;
  6876. }
  6877. return;
  6878. }
  6879. FloatSymEquivalenceClass *dstEquivalenceClass = nullptr, *srcEquivalenceClass = nullptr;
  6880. const bool dstHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass);
  6881. const bool srcHasEquivalenceClass = floatSymEquivalenceMap->TryGetValue(src->m_id, &srcEquivalenceClass);
  6882. if(!dstHasEquivalenceClass)
  6883. {
  6884. if(srcHasEquivalenceClass)
  6885. {
  6886. // Just add the destination into the source's equivalence class
  6887. srcEquivalenceClass->Set(dst);
  6888. floatSymEquivalenceMap->Add(dst->m_id, srcEquivalenceClass);
  6889. return;
  6890. }
  6891. dstEquivalenceClass = JitAnew(tempAlloc, FloatSymEquivalenceClass, tempAlloc);
  6892. dstEquivalenceClass->Set(dst);
  6893. floatSymEquivalenceMap->Add(dst->m_id, dstEquivalenceClass);
  6894. }
  6895. if(!srcHasEquivalenceClass)
  6896. {
  6897. // Just add the source into the destination's equivalence class
  6898. dstEquivalenceClass->Set(src);
  6899. floatSymEquivalenceMap->Add(src->m_id, dstEquivalenceClass);
  6900. return;
  6901. }
  6902. if(dstEquivalenceClass == srcEquivalenceClass)
  6903. {
  6904. return;
  6905. }
  6906. Assert(!dstEquivalenceClass->Bv()->Test(src->m_id));
  6907. Assert(!srcEquivalenceClass->Bv()->Test(dst->m_id));
  6908. // Merge the two equivalence classes. The source's equivalence class is typically smaller, so it's merged into the
  6909. // destination's equivalence class. To save space and prevent a potential explosion of bit vector size,
  6910. // 'floatSymEquivalenceMap' is updated for syms in the source's equivalence class to map to the destination's now merged
  6911. // equivalence class, and the source's equivalence class is discarded.
  6912. dstEquivalenceClass->Or(srcEquivalenceClass);
  6913. FOREACH_BITSET_IN_SPARSEBV(id, srcEquivalenceClass->Bv())
  6914. {
  6915. floatSymEquivalenceMap->Item(id, dstEquivalenceClass);
  6916. } NEXT_BITSET_IN_SPARSEBV;
  6917. JitAdelete(tempAlloc, srcEquivalenceClass);
  6918. return;
  6919. }
  6920. // Not a float transfer, and non-prepass (not necessarily in a loop)
  6921. if(!instr->HasBailOutInfo() || instr->GetBailOutKind() != IR::BailOutPrimitiveButString)
  6922. {
  6923. return;
  6924. }
  6925. Assert(instr->m_opcode == Js::OpCode::FromVar);
  6926. // If either the destination or its equivalence class says it requires bailout on any non-number value, adjust the bailout
  6927. // kind on the instruction. Both are checked because in functions without loops, equivalence tracking is not done and only
  6928. // the sym's non-number bailout bit will have the information, and in functions with loops, equivalence tracking is done
  6929. // throughout the function and checking just the sym's non-number bailout bit is insufficient.
  6930. FloatSymEquivalenceClass *dstEquivalenceClass = nullptr;
  6931. if(dst->m_requiresBailOnNotNumber ||
  6932. (floatSymEquivalenceMap->TryGetValue(dst->m_id, &dstEquivalenceClass) && dstEquivalenceClass->RequiresBailOnNotNumber()))
  6933. {
  6934. instr->SetBailOutKind(IR::BailOutNumberOnly);
  6935. }
  6936. }
  6937. bool
  6938. BackwardPass::SymIsIntconstOrSelf(Sym *sym, IR::Opnd *opnd)
  6939. {
  6940. Assert(sym->IsStackSym());
  6941. if (!opnd->IsRegOpnd())
  6942. {
  6943. return false;
  6944. }
  6945. StackSym *opndSym = opnd->AsRegOpnd()->m_sym;
  6946. if (sym == opndSym)
  6947. {
  6948. return true;
  6949. }
  6950. if (!opndSym->IsSingleDef())
  6951. {
  6952. return false;
  6953. }
  6954. if (opndSym->GetInstrDef()->m_opcode == Js::OpCode::LdC_A_I4)
  6955. {
  6956. return true;
  6957. }
  6958. return false;
  6959. }
  6960. bool
  6961. BackwardPass::InstrPreservesNumberValues(IR::Instr *instr, Sym *defSym)
  6962. {
  6963. if (instr->m_opcode == Js::OpCode::Ld_A)
  6964. {
  6965. if (instr->GetSrc1()->IsRegOpnd())
  6966. {
  6967. IR::RegOpnd *src1 = instr->GetSrc1()->AsRegOpnd();
  6968. if (src1->m_sym->IsSingleDef())
  6969. {
  6970. instr = src1->m_sym->GetInstrDef();
  6971. }
  6972. }
  6973. }
  6974. return (OpCodeAttr::ProducesNumber(instr->m_opcode) ||
  6975. (instr->m_opcode == Js::OpCode::Add_A && this->SymIsIntconstOrSelf(defSym, instr->GetSrc1()) && this->SymIsIntconstOrSelf(defSym, instr->GetSrc2())));
  6976. }
  6977. bool
  6978. BackwardPass::ProcessDef(IR::Opnd * opnd)
  6979. {
  6980. BOOLEAN isJITOptimizedReg = false;
  6981. Sym * sym;
  6982. if (opnd->IsRegOpnd())
  6983. {
  6984. sym = opnd->AsRegOpnd()->m_sym;
  6985. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  6986. if (!IsCollectionPass())
  6987. {
  6988. this->InvalidateCloneStrCandidate(opnd);
  6989. if ((tag == Js::BackwardPhase) && IsPrePass())
  6990. {
  6991. bool firstDef = !this->currentPrePassLoop->symsAssignedToInLoop->TestAndSet(sym->m_id);
  6992. if (firstDef)
  6993. {
  6994. if (this->InstrPreservesNumberValues(this->currentInstr, sym))
  6995. {
  6996. this->currentPrePassLoop->preservesNumberValue->Set(sym->m_id);
  6997. }
  6998. }
  6999. else if (!this->InstrPreservesNumberValues(this->currentInstr, sym))
  7000. {
  7001. this->currentPrePassLoop->preservesNumberValue->Clear(sym->m_id);
  7002. }
  7003. }
  7004. }
  7005. }
  7006. else if (opnd->IsSymOpnd())
  7007. {
  7008. sym = opnd->AsSymOpnd()->m_sym;
  7009. isJITOptimizedReg = opnd->GetIsJITOptimizedReg();
  7010. }
  7011. else
  7012. {
  7013. if (opnd->IsIndirOpnd())
  7014. {
  7015. this->ProcessUse(opnd);
  7016. }
  7017. return false;
  7018. }
  7019. BasicBlock * block = this->currentBlock;
  7020. BOOLEAN isUsed = true;
  7021. BOOLEAN keepSymLiveForException = false;
  7022. BOOLEAN keepVarSymLiveForException = false;
  7023. IR::Instr * instr = this->currentInstr;
  7024. Assert(!instr->IsByteCodeUsesInstr());
  7025. if (sym->IsPropertySym())
  7026. {
  7027. PropertySym *propertySym = sym->AsPropertySym();
  7028. ProcessStackSymUse(propertySym->m_stackSym, isJITOptimizedReg);
  7029. if (IsCollectionPass())
  7030. {
  7031. return false;
  7032. }
  7033. if (this->DoDeadStoreSlots())
  7034. {
  7035. if (propertySym->m_fieldKind == PropertyKindLocalSlots || propertySym->m_fieldKind == PropertyKindSlots)
  7036. {
  7037. BOOLEAN isPropertySymUsed = !block->slotDeadStoreCandidates->TestAndSet(propertySym->m_id);
  7038. Assert(isPropertySymUsed || !block->upwardExposedUses->Test(propertySym->m_id));
  7039. isUsed = isPropertySymUsed || block->upwardExposedUses->Test(propertySym->m_stackSym->m_id);
  7040. }
  7041. }
  7042. this->DoSetDead(opnd, !block->upwardExposedFields->TestAndClear(propertySym->m_id));
  7043. if (tag == Js::BackwardPhase)
  7044. {
  7045. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  7046. {
  7047. this->globOpt->PreparePropertySymOpndForTypeCheckSeq(opnd->AsPropertySymOpnd(), instr, this->currentBlock->loop);
  7048. }
  7049. }
  7050. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  7051. {
  7052. this->ProcessPropertySymOpndUse(opnd->AsPropertySymOpnd());
  7053. }
  7054. }
  7055. else
  7056. {
  7057. Assert(!instr->IsByteCodeUsesInstr());
  7058. if (this->DoByteCodeUpwardExposedUsed())
  7059. {
  7060. if (sym->AsStackSym()->HasByteCodeRegSlot())
  7061. {
  7062. StackSym * varSym = sym->AsStackSym();
  7063. if (varSym->IsTypeSpec())
  7064. {
  7065. // It has to have a var version for byte code regs
  7066. varSym = varSym->GetVarEquivSym(nullptr);
  7067. }
  7068. if (this->currentRegion)
  7069. {
  7070. keepSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, sym->AsStackSym());
  7071. keepVarSymLiveForException = this->CheckWriteThroughSymInRegion(this->currentRegion, varSym);
  7072. }
  7073. if (!isJITOptimizedReg)
  7074. {
  7075. if (!DoDeadStore(this->func, sym->AsStackSym()))
  7076. {
  7077. // Don't deadstore the bytecodereg sym, so that we could do write to get the locals inspection
  7078. if (opnd->IsRegOpnd())
  7079. {
  7080. opnd->AsRegOpnd()->m_dontDeadStore = true;
  7081. }
  7082. }
  7083. // write through symbols should not be cleared from the byteCodeUpwardExposedUsed BV upon defs in the Try region:
  7084. // try
  7085. // x =
  7086. // <bailout> <-- this bailout should restore x from its first def. This would not happen if x is cleared
  7087. // from byteCodeUpwardExposedUsed when we process its second def
  7088. // <exception>
  7089. // x =
  7090. // catch
  7091. // = x
  7092. if (!keepVarSymLiveForException)
  7093. {
  7094. // Always track the sym use on the var sym.
  7095. block->byteCodeUpwardExposedUsed->Clear(varSym->m_id);
  7096. #if DBG
  7097. // TODO: We can only track first level function stack syms right now
  7098. if (varSym->GetByteCodeFunc() == this->func)
  7099. {
  7100. block->byteCodeRestoreSyms[varSym->GetByteCodeRegSlot()] = nullptr;
  7101. }
  7102. #endif
  7103. }
  7104. }
  7105. }
  7106. }
  7107. if (IsCollectionPass())
  7108. {
  7109. return false;
  7110. }
  7111. // Don't care about property sym for mark temps
  7112. if (opnd->IsRegOpnd())
  7113. {
  7114. this->MarkTemp(sym->AsStackSym());
  7115. }
  7116. if (this->tag == Js::BackwardPhase &&
  7117. instr->m_opcode == Js::OpCode::Ld_A &&
  7118. instr->GetSrc1()->IsRegOpnd() &&
  7119. block->upwardExposedFields->Test(sym->m_id))
  7120. {
  7121. block->upwardExposedFields->Set(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  7122. }
  7123. if (!keepSymLiveForException)
  7124. {
  7125. isUsed = block->upwardExposedUses->TestAndClear(sym->m_id);
  7126. }
  7127. }
  7128. if (isUsed || !this->DoDeadStore())
  7129. {
  7130. return false;
  7131. }
  7132. // FromVar on a primitive value has no side-effects
  7133. // TODO: There may be more cases where FromVars can be dead-stored, such as cases where they have a bailout that would bail
  7134. // out on non-primitive vars, thereby causing no side effects anyway. However, it needs to be ensured that no assumptions
  7135. // that depend on the bailout are made later in the function.
  7136. // Special case StFld for trackable fields
  7137. bool hasSideEffects = instr->HasAnySideEffects()
  7138. && instr->m_opcode != Js::OpCode::StFld
  7139. && instr->m_opcode != Js::OpCode::StRootFld
  7140. && instr->m_opcode != Js::OpCode::StFldStrict
  7141. && instr->m_opcode != Js::OpCode::StRootFldStrict;
  7142. if (this->IsPrePass() || hasSideEffects)
  7143. {
  7144. return false;
  7145. }
  7146. if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_dontDeadStore)
  7147. {
  7148. return false;
  7149. }
  7150. if (instr->HasBailOutInfo())
  7151. {
  7152. // A bailout inserted for aggressive or lossy int type specialization causes assumptions to be made on the value of
  7153. // the instruction's destination later on, as though the bailout did not happen. If the value is an int constant and
  7154. // that value is propagated forward, it can cause the bailout instruction to become a dead store and be removed,
  7155. // thereby invalidating the assumptions made. Or for lossy int type specialization, the lossy conversion to int32
  7156. // may have side effects and so cannot be dead-store-removed. As one way of solving that problem, bailout
  7157. // instructions resulting from aggressive or lossy int type spec are not dead-stored.
  7158. const auto bailOutKind = instr->GetBailOutKind();
  7159. if(bailOutKind & IR::BailOutOnResultConditions)
  7160. {
  7161. return false;
  7162. }
  7163. switch(bailOutKind & ~IR::BailOutKindBits)
  7164. {
  7165. case IR::BailOutIntOnly:
  7166. case IR::BailOutNumberOnly:
  7167. case IR::BailOutExpectingInteger:
  7168. case IR::BailOutPrimitiveButString:
  7169. case IR::BailOutExpectingString:
  7170. case IR::BailOutOnNotPrimitive:
  7171. case IR::BailOutFailedInlineTypeCheck:
  7172. case IR::BailOutOnFloor:
  7173. case IR::BailOnModByPowerOf2:
  7174. case IR::BailOnDivResultNotInt:
  7175. case IR::BailOnIntMin:
  7176. return false;
  7177. }
  7178. }
  7179. // Dead store
  7180. DeadStoreInstr(instr);
  7181. return true;
  7182. }
  7183. bool
  7184. BackwardPass::DeadStoreInstr(IR::Instr *instr)
  7185. {
  7186. BasicBlock * block = this->currentBlock;
  7187. #if DBG_DUMP
  7188. if (this->IsTraceEnabled())
  7189. {
  7190. Output::Print(_u("Deadstore instr: "));
  7191. instr->Dump();
  7192. }
  7193. this->numDeadStore++;
  7194. #endif
  7195. // Before we remove the dead store, we need to track the byte code uses
  7196. if (this->DoByteCodeUpwardExposedUsed())
  7197. {
  7198. #if DBG
  7199. BVSparse<JitArenaAllocator> tempBv(this->tempAlloc);
  7200. tempBv.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  7201. #endif
  7202. PropertySym *unusedPropertySym = nullptr;
  7203. GlobOpt::TrackByteCodeSymUsed(instr, this->currentBlock->byteCodeUpwardExposedUsed, &unusedPropertySym);
  7204. #if DBG
  7205. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  7206. tempBv2.Copy(this->currentBlock->byteCodeUpwardExposedUsed);
  7207. tempBv2.Minus(&tempBv);
  7208. FOREACH_BITSET_IN_SPARSEBV(symId, &tempBv2)
  7209. {
  7210. StackSym * stackSym = this->func->m_symTable->FindStackSym(symId);
  7211. Assert(stackSym->GetType() == TyVar);
  7212. // TODO: We can only track first level function stack syms right now
  7213. if (stackSym->GetByteCodeFunc() == this->func)
  7214. {
  7215. Js::RegSlot byteCodeRegSlot = stackSym->GetByteCodeRegSlot();
  7216. Assert(byteCodeRegSlot != Js::Constants::NoRegister);
  7217. if (this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] != stackSym)
  7218. {
  7219. AssertMsg(this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] == nullptr,
  7220. "Can't have two active lifetime for the same byte code register");
  7221. this->currentBlock->byteCodeRestoreSyms[byteCodeRegSlot] = stackSym;
  7222. }
  7223. }
  7224. }
  7225. NEXT_BITSET_IN_SPARSEBV;
  7226. #endif
  7227. }
  7228. // If this is a pre-op bailout instruction, we may have saved it for bailout info processing. It's being removed now, so no
  7229. // need to process the bailout info anymore.
  7230. Assert(!preOpBailOutInstrToProcess || preOpBailOutInstrToProcess == instr);
  7231. preOpBailOutInstrToProcess = nullptr;
  7232. #if DBG
  7233. if (this->DoMarkTempObjectVerify())
  7234. {
  7235. this->currentBlock->tempObjectVerifyTracker->NotifyDeadStore(instr, this);
  7236. }
  7237. #endif
  7238. if (instr->m_opcode == Js::OpCode::ArgIn_A)
  7239. {
  7240. // Ignore tracking ArgIn for "this" as argInsCount only tracks other
  7241. // params, unless it is a AsmJS function (which doesn't have a "this").
  7242. if (instr->GetSrc1()->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum() != 1 || func->GetJITFunctionBody()->IsAsmJsMode())
  7243. {
  7244. Assert(this->func->argInsCount > 0);
  7245. this->func->argInsCount--;
  7246. }
  7247. }
  7248. TraceDeadStoreOfInstrsForScopeObjectRemoval();
  7249. block->RemoveInstr(instr);
  7250. return true;
  7251. }
  7252. void
  7253. BackwardPass::ProcessTransfers(IR::Instr * instr)
  7254. {
  7255. if (this->tag == Js::DeadStorePhase &&
  7256. this->currentBlock->upwardExposedFields &&
  7257. instr->m_opcode == Js::OpCode::Ld_A &&
  7258. instr->GetDst()->GetStackSym() &&
  7259. !instr->GetDst()->GetStackSym()->IsTypeSpec() &&
  7260. instr->GetDst()->GetStackSym()->HasObjectInfo() &&
  7261. instr->GetSrc1() &&
  7262. instr->GetSrc1()->GetStackSym() &&
  7263. !instr->GetSrc1()->GetStackSym()->IsTypeSpec() &&
  7264. instr->GetSrc1()->GetStackSym()->HasObjectInfo())
  7265. {
  7266. StackSym * dstStackSym = instr->GetDst()->GetStackSym();
  7267. PropertySym * dstPropertySym = dstStackSym->GetObjectInfo()->m_propertySymList;
  7268. BVSparse<JitArenaAllocator> transferFields(this->tempAlloc);
  7269. while (dstPropertySym != nullptr)
  7270. {
  7271. Assert(dstPropertySym->m_stackSym == dstStackSym);
  7272. transferFields.Set(dstPropertySym->m_id);
  7273. dstPropertySym = dstPropertySym->m_nextInStackSymList;
  7274. }
  7275. StackSym * srcStackSym = instr->GetSrc1()->GetStackSym();
  7276. PropertySym * srcPropertySym = srcStackSym->GetObjectInfo()->m_propertySymList;
  7277. BVSparse<JitArenaAllocator> equivFields(this->tempAlloc);
  7278. while (srcPropertySym != nullptr && !transferFields.IsEmpty())
  7279. {
  7280. Assert(srcPropertySym->m_stackSym == srcStackSym);
  7281. if (srcPropertySym->m_propertyEquivSet)
  7282. {
  7283. equivFields.And(&transferFields, srcPropertySym->m_propertyEquivSet);
  7284. if (!equivFields.IsEmpty())
  7285. {
  7286. transferFields.Minus(&equivFields);
  7287. this->currentBlock->upwardExposedFields->Set(srcPropertySym->m_id);
  7288. }
  7289. }
  7290. srcPropertySym = srcPropertySym->m_nextInStackSymList;
  7291. }
  7292. }
  7293. }
  7294. void
  7295. BackwardPass::ProcessFieldKills(IR::Instr * instr)
  7296. {
  7297. if (this->currentBlock->upwardExposedFields)
  7298. {
  7299. this->globOpt->ProcessFieldKills(instr, this->currentBlock->upwardExposedFields, false);
  7300. }
  7301. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToFinalType);
  7302. this->ClearBucketsOnFieldKill(instr, currentBlock->stackSymToGuardedProperties);
  7303. }
  7304. template<typename T>
  7305. void
  7306. BackwardPass::ClearBucketsOnFieldKill(IR::Instr *instr, HashTable<T> *table)
  7307. {
  7308. if (table)
  7309. {
  7310. if (instr->UsesAllFields())
  7311. {
  7312. table->ClearAll();
  7313. }
  7314. else
  7315. {
  7316. IR::Opnd *dst = instr->GetDst();
  7317. if (dst && dst->IsRegOpnd())
  7318. {
  7319. table->Clear(dst->AsRegOpnd()->m_sym->m_id);
  7320. }
  7321. }
  7322. }
  7323. }
  7324. bool
  7325. BackwardPass::TrackNoImplicitCallInlinees(IR::Instr *instr)
  7326. {
  7327. if (this->tag != Js::DeadStorePhase || this->IsPrePass())
  7328. {
  7329. return false;
  7330. }
  7331. if (instr->HasBailOutInfo()
  7332. || OpCodeAttr::CallInstr(instr->m_opcode)
  7333. || instr->CallsAccessor()
  7334. || GlobOpt::MayNeedBailOnImplicitCall(instr, nullptr, nullptr)
  7335. || instr->HasAnyLoadHeapArgsOpCode()
  7336. || instr->m_opcode == Js::OpCode::LdFuncExpr)
  7337. {
  7338. // This func has instrs with bailouts or implicit calls
  7339. Assert(instr->m_opcode != Js::OpCode::InlineeStart);
  7340. instr->m_func->SetHasImplicitCallsOnSelfAndParents();
  7341. return false;
  7342. }
  7343. if (instr->m_opcode == Js::OpCode::InlineeStart)
  7344. {
  7345. if (!instr->GetSrc1())
  7346. {
  7347. Assert(instr->m_func->m_hasInlineArgsOpt);
  7348. return false;
  7349. }
  7350. return this->ProcessInlineeStart(instr);
  7351. }
  7352. return false;
  7353. }
  7354. bool
  7355. BackwardPass::ProcessInlineeStart(IR::Instr* inlineeStart)
  7356. {
  7357. inlineeStart->m_func->SetFirstArgOffset(inlineeStart);
  7358. IR::Instr* startCallInstr = nullptr;
  7359. bool noImplicitCallsInInlinee = false;
  7360. // Inlinee has no bailouts or implicit calls. Get rid of the inline overhead.
  7361. auto removeInstr = [&](IR::Instr* argInstr)
  7362. {
  7363. Assert(argInstr->m_opcode == Js::OpCode::InlineeStart || argInstr->m_opcode == Js::OpCode::ArgOut_A || argInstr->m_opcode == Js::OpCode::ArgOut_A_Inline);
  7364. IR::Opnd *opnd = argInstr->GetSrc1();
  7365. StackSym *sym = opnd->GetStackSym();
  7366. if (!opnd->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  7367. {
  7368. // Replace instrs with bytecodeUses
  7369. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(argInstr);
  7370. bytecodeUse->Set(opnd);
  7371. argInstr->InsertBefore(bytecodeUse);
  7372. }
  7373. startCallInstr = argInstr->GetSrc2()->GetStackSym()->m_instrDef;
  7374. FlowGraph::SafeRemoveInstr(argInstr);
  7375. return false;
  7376. };
  7377. // If there are no implicit calls - bailouts/throws - we can remove all inlining overhead.
  7378. if (!inlineeStart->m_func->GetHasImplicitCalls())
  7379. {
  7380. noImplicitCallsInInlinee = true;
  7381. inlineeStart->IterateArgInstrs(removeInstr);
  7382. inlineeStart->IterateMetaArgs([](IR::Instr* metArg)
  7383. {
  7384. FlowGraph::SafeRemoveInstr(metArg);
  7385. return false;
  7386. });
  7387. inlineeStart->m_func->m_hasInlineArgsOpt = false;
  7388. removeInstr(inlineeStart);
  7389. return true;
  7390. }
  7391. if (!inlineeStart->m_func->m_hasInlineArgsOpt)
  7392. {
  7393. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: Skipping inline args optimization: %s[%d] HasCalls: %s, 'arguments' access: %s, stackArgs enabled: %s, Can do inlinee args opt: %s\n"),
  7394. func->GetJITFunctionBody()->GetDisplayName(), func->GetJITFunctionBody()->GetFunctionNumber(),
  7395. inlineeStart->m_func->GetJITFunctionBody()->GetDisplayName(), inlineeStart->m_func->GetJITFunctionBody()->GetFunctionNumber(),
  7396. IsTrueOrFalse(inlineeStart->m_func->GetHasCalls()),
  7397. IsTrueOrFalse(inlineeStart->m_func->GetHasUnoptimizedArgumentsAccess()),
  7398. IsTrueOrFalse(inlineeStart->m_func->IsStackArgsEnabled()),
  7399. IsTrueOrFalse(inlineeStart->m_func->m_canDoInlineArgsOpt));
  7400. return false;
  7401. }
  7402. if (!inlineeStart->m_func->frameInfo->isRecorded)
  7403. {
  7404. PHASE_PRINT_TESTTRACE(Js::InlineArgsOptPhase, func, _u("%s[%d]: InlineeEnd not found - usually due to a throw or a BailOnNoProfile (stressed, most likely)\n"),
  7405. func->GetJITFunctionBody()->GetDisplayName(), func->GetJITFunctionBody()->GetFunctionNumber());
  7406. inlineeStart->m_func->DisableCanDoInlineArgOpt();
  7407. return false;
  7408. }
  7409. inlineeStart->IterateArgInstrs(removeInstr);
  7410. int i = 0;
  7411. inlineeStart->IterateMetaArgs([&](IR::Instr* metaArg)
  7412. {
  7413. if (i == Js::Constants::InlineeMetaArgIndex_ArgumentsObject &&
  7414. inlineeStart->m_func->GetJITFunctionBody()->UsesArgumentsObject())
  7415. {
  7416. Assert(!inlineeStart->m_func->GetHasUnoptimizedArgumentsAccess());
  7417. // Do not remove arguments object meta arg if there is a reference to arguments object
  7418. }
  7419. else
  7420. {
  7421. FlowGraph::SafeRemoveInstr(metaArg);
  7422. }
  7423. i++;
  7424. return false;
  7425. });
  7426. IR::Opnd *src1 = inlineeStart->GetSrc1();
  7427. StackSym *sym = src1->GetStackSym();
  7428. if (!src1->GetIsJITOptimizedReg() && sym && sym->HasByteCodeRegSlot())
  7429. {
  7430. // Replace instrs with bytecodeUses
  7431. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(inlineeStart);
  7432. bytecodeUse->Set(src1);
  7433. inlineeStart->InsertBefore(bytecodeUse);
  7434. }
  7435. // This indicates to the lowerer that this inlinee has been optimized
  7436. // and it should not be lowered - Now this instruction is used to mark inlineeStart
  7437. inlineeStart->FreeSrc1();
  7438. inlineeStart->FreeSrc2();
  7439. inlineeStart->FreeDst();
  7440. return true;
  7441. }
  7442. void
  7443. BackwardPass::ProcessInlineeEnd(IR::Instr* instr)
  7444. {
  7445. if (this->IsPrePass())
  7446. {
  7447. return;
  7448. }
  7449. if (this->tag == Js::BackwardPhase)
  7450. {
  7451. // Commenting out to allow for argument length and argument[constant] optimization
  7452. // Will revisit in phase two
  7453. /*if (!GlobOpt::DoInlineArgsOpt(instr->m_func))
  7454. {
  7455. return;
  7456. }*/
  7457. // This adds a use for function sym as part of InlineeStart & all the syms referenced by the args.
  7458. // It ensure they do not get cleared from the copy prop sym map.
  7459. instr->IterateArgInstrs([=](IR::Instr* argInstr){
  7460. if (argInstr->GetSrc1()->IsRegOpnd())
  7461. {
  7462. this->currentBlock->upwardExposedUses->Set(argInstr->GetSrc1()->AsRegOpnd()->m_sym->m_id);
  7463. }
  7464. return false;
  7465. });
  7466. }
  7467. else if (this->tag == Js::DeadStorePhase)
  7468. {
  7469. if (instr->m_func->GetJITFunctionBody()->UsesArgumentsObject() && !instr->m_func->IsStackArgsEnabled())
  7470. {
  7471. instr->m_func->DisableCanDoInlineArgOpt();
  7472. }
  7473. if (instr->m_func->m_hasInlineArgsOpt)
  7474. {
  7475. Assert(instr->m_func->frameInfo);
  7476. instr->m_func->frameInfo->IterateSyms([=](StackSym* argSym)
  7477. {
  7478. this->currentBlock->upwardExposedUses->Set(argSym->m_id);
  7479. });
  7480. }
  7481. }
  7482. }
  7483. bool
  7484. BackwardPass::ProcessBailOnNoProfile(IR::Instr *instr, BasicBlock *block)
  7485. {
  7486. Assert(this->tag == Js::BackwardPhase);
  7487. Assert(instr->m_opcode == Js::OpCode::BailOnNoProfile);
  7488. Assert(!instr->HasBailOutInfo());
  7489. AnalysisAssert(block);
  7490. if (this->IsPrePass())
  7491. {
  7492. return false;
  7493. }
  7494. if (this->currentRegion && (this->currentRegion->GetType() == RegionTypeCatch || this->currentRegion->GetType() == RegionTypeFinally))
  7495. {
  7496. return false;
  7497. }
  7498. IR::Instr *curInstr = instr->m_prev;
  7499. if (curInstr->IsLabelInstr() && curInstr->AsLabelInstr()->isOpHelper)
  7500. {
  7501. // Already processed
  7502. if (this->DoMarkTempObjects())
  7503. {
  7504. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  7505. }
  7506. return false;
  7507. }
  7508. // For generator functions, we don't want to move the BailOutOnNoProfile above
  7509. // certain instructions such as ResumeYield/ResumeYieldStar/CreateInterpreterStackFrameForGenerator
  7510. // This indicates the insertion point for the BailOutOnNoProfile in such cases.
  7511. IR::Instr *insertionPointForGenerator = nullptr;
  7512. // Don't hoist if we see calls with profile data (recursive calls)
  7513. while(!curInstr->StartsBasicBlock())
  7514. {
  7515. if (curInstr->DontHoistBailOnNoProfileAboveInGeneratorFunction())
  7516. {
  7517. Assert(insertionPointForGenerator == nullptr);
  7518. insertionPointForGenerator = curInstr;
  7519. }
  7520. // If a function was inlined, it must have had profile info.
  7521. if (curInstr->m_opcode == Js::OpCode::InlineeEnd || curInstr->m_opcode == Js::OpCode::InlineBuiltInEnd || curInstr->m_opcode == Js::OpCode::InlineNonTrackingBuiltInEnd
  7522. || curInstr->m_opcode == Js::OpCode::InlineeStart || curInstr->m_opcode == Js::OpCode::EndCallForPolymorphicInlinee)
  7523. {
  7524. break;
  7525. }
  7526. else if (OpCodeAttr::CallInstr(curInstr->m_opcode))
  7527. {
  7528. if (curInstr->m_prev->m_opcode != Js::OpCode::BailOnNoProfile)
  7529. {
  7530. break;
  7531. }
  7532. }
  7533. curInstr = curInstr->m_prev;
  7534. }
  7535. // Didn't get to the top of the block, delete this BailOnNoProfile.
  7536. if (!curInstr->IsLabelInstr())
  7537. {
  7538. block->RemoveInstr(instr);
  7539. return true;
  7540. }
  7541. // Save the head instruction for later use.
  7542. IR::LabelInstr *blockHeadInstr = curInstr->AsLabelInstr();
  7543. // We can't bail in the middle of a "tmp = CmEq s1, s2; BrTrue tmp" turned into a "BrEq s1, s2",
  7544. // because the bailout wouldn't be able to restore tmp.
  7545. IR::Instr *curNext = curInstr->GetNextRealInstrOrLabel();
  7546. IR::Instr *instrNope = nullptr;
  7547. if (curNext->m_opcode == Js::OpCode::Ld_A && curNext->GetDst()->IsRegOpnd() && curNext->GetDst()->AsRegOpnd()->m_fgPeepTmp)
  7548. {
  7549. block->RemoveInstr(instr);
  7550. return true;
  7551. /*while (curNext->m_opcode == Js::OpCode::Ld_A && curNext->GetDst()->IsRegOpnd() && curNext->GetDst()->AsRegOpnd()->m_fgPeepTmp)
  7552. {
  7553. // Instead of just giving up, we can be a little trickier. We can instead treat the tmp declaration(s) as a
  7554. // part of the block prefix, and put the bailonnoprofile immediately after them. This has the added benefit
  7555. // that we can still merge up blocks beginning with bailonnoprofile, even if they would otherwise not allow
  7556. // us to, due to the fact that these tmp declarations would be pre-empted by the higher-level bailout.
  7557. instrNope = curNext;
  7558. curNext = curNext->GetNextRealInstrOrLabel();
  7559. }*/
  7560. }
  7561. curInstr = instr->m_prev;
  7562. // Move to top of block (but just below any fgpeeptemp lds).
  7563. while(!curInstr->StartsBasicBlock() && curInstr != instrNope)
  7564. {
  7565. // Delete redundant BailOnNoProfile
  7566. if (curInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  7567. {
  7568. Assert(!curInstr->HasBailOutInfo());
  7569. curInstr = curInstr->m_next;
  7570. curInstr->m_prev->Remove();
  7571. }
  7572. curInstr = curInstr->m_prev;
  7573. }
  7574. if (instr == block->GetLastInstr())
  7575. {
  7576. block->SetLastInstr(instr->m_prev);
  7577. }
  7578. instr->Unlink();
  7579. // Now try to move this up the flowgraph to the predecessor blocks
  7580. FOREACH_PREDECESSOR_BLOCK(pred, block)
  7581. {
  7582. // Don't hoist BailOnNoProfile up past blocks containing ResumeYield/ResumeYieldStar
  7583. bool hoistBailToPred = (insertionPointForGenerator == nullptr);
  7584. if (block->isLoopHeader && pred->loop == block->loop)
  7585. {
  7586. // Skip loop back-edges
  7587. continue;
  7588. }
  7589. if (pred->GetFirstInstr()->AsLabelInstr()->GetRegion() != this->currentRegion)
  7590. {
  7591. break;
  7592. }
  7593. // If all successors of this predecessor start with a BailOnNoProfile, we should be
  7594. // okay to hoist this bail to the predecessor.
  7595. FOREACH_SUCCESSOR_BLOCK(predSucc, pred)
  7596. {
  7597. if (predSucc == block)
  7598. {
  7599. continue;
  7600. }
  7601. if (!predSucc->beginsBailOnNoProfile)
  7602. {
  7603. hoistBailToPred = false;
  7604. break;
  7605. }
  7606. } NEXT_SUCCESSOR_BLOCK;
  7607. if (hoistBailToPred)
  7608. {
  7609. IR::Instr *predInstr = pred->GetLastInstr();
  7610. IR::Instr *instrCopy = instr->Copy();
  7611. if (predInstr->EndsBasicBlock())
  7612. {
  7613. if (predInstr->m_prev->m_opcode == Js::OpCode::BailOnNoProfile)
  7614. {
  7615. // We already have one, we don't need a second.
  7616. instrCopy->Free();
  7617. }
  7618. else if (!predInstr->AsBranchInstr()->m_isSwitchBr)
  7619. {
  7620. // Don't put a bailout in the middle of a switch dispatch sequence.
  7621. // The bytecode offsets are not in order, and it would lead to incorrect
  7622. // bailout info.
  7623. instrCopy->m_func = predInstr->m_func;
  7624. predInstr->InsertBefore(instrCopy);
  7625. }
  7626. }
  7627. else
  7628. {
  7629. if (predInstr->m_opcode == Js::OpCode::BailOnNoProfile)
  7630. {
  7631. // We already have one, we don't need a second.
  7632. instrCopy->Free();
  7633. }
  7634. else
  7635. {
  7636. instrCopy->m_func = predInstr->m_func;
  7637. predInstr->InsertAfter(instrCopy);
  7638. pred->SetLastInstr(instrCopy);
  7639. }
  7640. }
  7641. }
  7642. } NEXT_PREDECESSOR_BLOCK;
  7643. // If we have a BailOnNoProfile in the first block, there must have been at least one path out of this block that always throws.
  7644. // Don't bother keeping the bailout in the first block as there are some issues in restoring the ArgIn bytecode registers on bailout
  7645. // and throw case should be rare enough that it won't matter for perf.
  7646. if (block->GetBlockNum() != 0)
  7647. {
  7648. blockHeadInstr->isOpHelper = true;
  7649. #if DBG
  7650. blockHeadInstr->m_noHelperAssert = true;
  7651. #endif
  7652. instr->m_func = curInstr->m_func;
  7653. if (insertionPointForGenerator != nullptr)
  7654. {
  7655. insertionPointForGenerator->InsertAfter(instr);
  7656. block->beginsBailOnNoProfile = false;
  7657. }
  7658. else
  7659. {
  7660. curInstr->InsertAfter(instr);
  7661. block->beginsBailOnNoProfile = true;
  7662. }
  7663. bool setLastInstr = (curInstr == block->GetLastInstr());
  7664. if (setLastInstr)
  7665. {
  7666. block->SetLastInstr(instr);
  7667. }
  7668. if (this->DoMarkTempObjects())
  7669. {
  7670. block->tempObjectTracker->ProcessBailOnNoProfile(instr);
  7671. }
  7672. return false;
  7673. }
  7674. else
  7675. {
  7676. instr->Free();
  7677. return true;
  7678. }
  7679. }
  7680. bool
  7681. BackwardPass::ReverseCopyProp(IR::Instr *instr)
  7682. {
  7683. // Look for :
  7684. //
  7685. // t1 = instr
  7686. // [bytecodeuse t1]
  7687. // t2 = Ld_A t1 >> t1 !upwardExposed
  7688. //
  7689. // Transform into:
  7690. //
  7691. // t2 = instr
  7692. //
  7693. if (PHASE_OFF(Js::ReverseCopyPropPhase, this->func))
  7694. {
  7695. return false;
  7696. }
  7697. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  7698. {
  7699. return false;
  7700. }
  7701. if (this->func->HasTry())
  7702. {
  7703. // UpwardExposedUsed info can't be relied on
  7704. return false;
  7705. }
  7706. // Find t2 = Ld_A t1
  7707. switch (instr->m_opcode)
  7708. {
  7709. case Js::OpCode::Ld_A:
  7710. case Js::OpCode::Ld_I4:
  7711. break;
  7712. default:
  7713. return false;
  7714. }
  7715. if (!instr->GetDst()->IsRegOpnd())
  7716. {
  7717. return false;
  7718. }
  7719. if (!instr->GetSrc1()->IsRegOpnd())
  7720. {
  7721. return false;
  7722. }
  7723. if (instr->HasBailOutInfo())
  7724. {
  7725. return false;
  7726. }
  7727. IR::RegOpnd *dst = instr->GetDst()->AsRegOpnd();
  7728. IR::RegOpnd *src = instr->GetSrc1()->AsRegOpnd();
  7729. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  7730. IR::ByteCodeUsesInstr *byteCodeUseInstr = nullptr;
  7731. StackSym *varSym = src->m_sym;
  7732. if (varSym->IsTypeSpec())
  7733. {
  7734. varSym = varSym->GetVarEquivSym(this->func);
  7735. }
  7736. // SKip ByteCodeUse instr if possible
  7737. // [bytecodeuse t1]
  7738. if (!instrPrev->GetDst())
  7739. {
  7740. if (instrPrev->m_opcode == Js::OpCode::ByteCodeUses)
  7741. {
  7742. byteCodeUseInstr = instrPrev->AsByteCodeUsesInstr();
  7743. const BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = byteCodeUseInstr->GetByteCodeUpwardExposedUsed();
  7744. if (byteCodeUpwardExposedUsed && byteCodeUpwardExposedUsed->Test(varSym->m_id) && byteCodeUpwardExposedUsed->Count() == 1)
  7745. {
  7746. instrPrev = byteCodeUseInstr->GetPrevRealInstrOrLabel();
  7747. if (!instrPrev->GetDst())
  7748. {
  7749. return false;
  7750. }
  7751. }
  7752. else
  7753. {
  7754. return false;
  7755. }
  7756. }
  7757. else
  7758. {
  7759. return false;
  7760. }
  7761. }
  7762. // The fast-path for these doesn't handle dst == src.
  7763. // REVIEW: I believe the fast-path for LdElemI_A has been fixed... Nope, still broken for "i = A[i]" for prejit
  7764. switch (instrPrev->m_opcode)
  7765. {
  7766. case Js::OpCode::LdElemI_A:
  7767. case Js::OpCode::IsInst:
  7768. case Js::OpCode::ByteCodeUses:
  7769. return false;
  7770. }
  7771. // Can't do it if post-op bailout would need result
  7772. // REVIEW: enable for pre-opt bailout?
  7773. if (instrPrev->HasBailOutInfo() && instrPrev->GetByteCodeOffset() != instrPrev->GetBailOutInfo()->bailOutOffset)
  7774. {
  7775. return false;
  7776. }
  7777. // Make sure src of Ld_A == dst of instr
  7778. // t1 = instr
  7779. if (!instrPrev->GetDst()->IsEqual(src))
  7780. {
  7781. return false;
  7782. }
  7783. // Make sure t1 isn't used later
  7784. if (this->currentBlock->upwardExposedUses->Test(src->m_sym->m_id))
  7785. {
  7786. return false;
  7787. }
  7788. if (this->currentBlock->byteCodeUpwardExposedUsed && this->currentBlock->byteCodeUpwardExposedUsed->Test(varSym->m_id))
  7789. {
  7790. return false;
  7791. }
  7792. // Make sure we can dead-store this sym (debugger mode?)
  7793. if (!this->DoDeadStore(this->func, src->m_sym))
  7794. {
  7795. return false;
  7796. }
  7797. StackSym *const dstSym = dst->m_sym;
  7798. if(instrPrev->HasBailOutInfo() && dstSym->IsInt32() && dstSym->IsTypeSpec())
  7799. {
  7800. StackSym *const prevDstSym = IR::RegOpnd::TryGetStackSym(instrPrev->GetDst());
  7801. if(instrPrev->GetBailOutKind() & IR::BailOutOnResultConditions &&
  7802. prevDstSym &&
  7803. prevDstSym->IsInt32() &&
  7804. prevDstSym->IsTypeSpec() &&
  7805. instrPrev->GetSrc1() &&
  7806. !instrPrev->GetDst()->IsEqual(instrPrev->GetSrc1()) &&
  7807. !(instrPrev->GetSrc2() && instrPrev->GetDst()->IsEqual(instrPrev->GetSrc2())))
  7808. {
  7809. // The previous instruction's dst value may be trashed by the time of the pre-op bailout. Skip reverse copy-prop if
  7810. // it would replace the previous instruction's dst with a sym that bailout had decided to use to restore a value for
  7811. // the pre-op bailout, which can't be trashed before bailout. See big comment in ProcessBailOutCopyProps for the
  7812. // reasoning behind the tests above.
  7813. FOREACH_SLISTBASE_ENTRY(
  7814. CopyPropSyms,
  7815. usedCopyPropSym,
  7816. &instrPrev->GetBailOutInfo()->usedCapturedValues->copyPropSyms)
  7817. {
  7818. if(dstSym == usedCopyPropSym.Value())
  7819. {
  7820. return false;
  7821. }
  7822. } NEXT_SLISTBASE_ENTRY;
  7823. }
  7824. }
  7825. if (byteCodeUseInstr)
  7826. {
  7827. if (this->currentBlock->byteCodeUpwardExposedUsed && instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  7828. {
  7829. if(varSym->HasByteCodeRegSlot())
  7830. {
  7831. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  7832. }
  7833. if (src->IsEqual(dst) && instrPrev->GetDst()->GetIsJITOptimizedReg())
  7834. {
  7835. // s2(s1).i32 = FromVar s1.var #0000 Bailout: #0000 (BailOutIntOnly)
  7836. // ByteCodeUses s1
  7837. // s2(s1).i32 = Ld_A s2(s1).i32
  7838. //
  7839. // Since the dst on the FromVar is marked JITOptimized, we need to set it on the new dst as well,
  7840. // or we'll change the bytecode liveness of s1
  7841. dst->SetIsJITOptimizedReg(true);
  7842. }
  7843. }
  7844. byteCodeUseInstr->Remove();
  7845. }
  7846. else if (instrPrev->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg() && !src->GetIsJITOptimizedReg() && varSym->HasByteCodeRegSlot())
  7847. {
  7848. this->currentBlock->byteCodeUpwardExposedUsed->Set(varSym->m_id);
  7849. }
  7850. #if DBG
  7851. if (this->DoMarkTempObjectVerify())
  7852. {
  7853. this->currentBlock->tempObjectVerifyTracker->NotifyReverseCopyProp(instrPrev);
  7854. }
  7855. #endif
  7856. dst->SetValueType(instrPrev->GetDst()->GetValueType());
  7857. instrPrev->ReplaceDst(dst);
  7858. instr->Remove();
  7859. return true;
  7860. }
  7861. bool
  7862. BackwardPass::FoldCmBool(IR::Instr *instr)
  7863. {
  7864. Assert(instr->m_opcode == Js::OpCode::Conv_Bool);
  7865. if (this->tag != Js::DeadStorePhase || this->IsPrePass() || this->IsCollectionPass())
  7866. {
  7867. return false;
  7868. }
  7869. if (this->func->HasTry())
  7870. {
  7871. // UpwardExposedUsed info can't be relied on
  7872. return false;
  7873. }
  7874. IR::RegOpnd *intOpnd = instr->GetSrc1()->AsRegOpnd();
  7875. Assert(intOpnd->m_sym->IsInt32());
  7876. if (!intOpnd->m_sym->IsSingleDef())
  7877. {
  7878. return false;
  7879. }
  7880. IR::Instr *cmInstr = intOpnd->m_sym->GetInstrDef();
  7881. // Should be a Cm instr...
  7882. if (!cmInstr->GetSrc2())
  7883. {
  7884. return false;
  7885. }
  7886. IR::Instr *instrPrev = instr->GetPrevRealInstrOrLabel();
  7887. if (instrPrev != cmInstr)
  7888. {
  7889. return false;
  7890. }
  7891. switch (cmInstr->m_opcode)
  7892. {
  7893. case Js::OpCode::CmEq_A:
  7894. case Js::OpCode::CmGe_A:
  7895. case Js::OpCode::CmUnGe_A:
  7896. case Js::OpCode::CmGt_A:
  7897. case Js::OpCode::CmUnGt_A:
  7898. case Js::OpCode::CmLt_A:
  7899. case Js::OpCode::CmUnLt_A:
  7900. case Js::OpCode::CmLe_A:
  7901. case Js::OpCode::CmUnLe_A:
  7902. case Js::OpCode::CmNeq_A:
  7903. case Js::OpCode::CmSrEq_A:
  7904. case Js::OpCode::CmSrNeq_A:
  7905. case Js::OpCode::CmEq_I4:
  7906. case Js::OpCode::CmNeq_I4:
  7907. case Js::OpCode::CmLt_I4:
  7908. case Js::OpCode::CmLe_I4:
  7909. case Js::OpCode::CmGt_I4:
  7910. case Js::OpCode::CmGe_I4:
  7911. case Js::OpCode::CmUnLt_I4:
  7912. case Js::OpCode::CmUnLe_I4:
  7913. case Js::OpCode::CmUnGt_I4:
  7914. case Js::OpCode::CmUnGe_I4:
  7915. break;
  7916. default:
  7917. return false;
  7918. }
  7919. IR::RegOpnd *varDst = instr->GetDst()->AsRegOpnd();
  7920. if (this->currentBlock->upwardExposedUses->Test(intOpnd->m_sym->m_id) || !this->currentBlock->upwardExposedUses->Test(varDst->m_sym->m_id))
  7921. {
  7922. return false;
  7923. }
  7924. varDst = instr->UnlinkDst()->AsRegOpnd();
  7925. cmInstr->ReplaceDst(varDst);
  7926. this->currentBlock->RemoveInstr(instr);
  7927. return true;
  7928. }
  7929. void
  7930. BackwardPass::SetWriteThroughSymbolsSetForRegion(BasicBlock * catchOrFinallyBlock, Region * tryRegion)
  7931. {
  7932. tryRegion->writeThroughSymbolsSet = JitAnew(this->func->m_alloc, BVSparse<JitArenaAllocator>, this->func->m_alloc);
  7933. if (this->DoByteCodeUpwardExposedUsed())
  7934. {
  7935. Assert(catchOrFinallyBlock->byteCodeUpwardExposedUsed);
  7936. if (!catchOrFinallyBlock->byteCodeUpwardExposedUsed->IsEmpty())
  7937. {
  7938. FOREACH_BITSET_IN_SPARSEBV(id, catchOrFinallyBlock->byteCodeUpwardExposedUsed)
  7939. {
  7940. tryRegion->writeThroughSymbolsSet->Set(id);
  7941. }
  7942. NEXT_BITSET_IN_SPARSEBV
  7943. }
  7944. #if DBG
  7945. // Symbols write-through in the parent try region should be marked as write-through in the current try region as well.
  7946. // x =
  7947. // try{
  7948. // try{
  7949. // x = <-- x needs to be write-through here. With the current mechanism of not clearing a write-through
  7950. // symbol from the bytecode upward-exposed on a def, x should be marked as write-through as
  7951. // write-through symbols for a try are basically the bytecode upward exposed symbols at the
  7952. // beginning of the corresponding catch block).
  7953. // Verify that it still holds.
  7954. // <exception>
  7955. // }
  7956. // catch(){}
  7957. // x =
  7958. // }
  7959. // catch(){}
  7960. // = x
  7961. if (tryRegion->GetParent()->GetType() == RegionTypeTry)
  7962. {
  7963. Region * parentTry = tryRegion->GetParent();
  7964. Assert(parentTry->writeThroughSymbolsSet);
  7965. FOREACH_BITSET_IN_SPARSEBV(id, parentTry->writeThroughSymbolsSet)
  7966. {
  7967. Assert(tryRegion->writeThroughSymbolsSet->Test(id));
  7968. }
  7969. NEXT_BITSET_IN_SPARSEBV
  7970. }
  7971. #endif
  7972. }
  7973. else
  7974. {
  7975. // this can happen with -off:globopt
  7976. return;
  7977. }
  7978. }
  7979. bool
  7980. BackwardPass::CheckWriteThroughSymInRegion(Region* region, StackSym* sym)
  7981. {
  7982. if (region->GetType() == RegionTypeRoot)
  7983. {
  7984. return false;
  7985. }
  7986. // if the current region is a try region, check in its write-through set,
  7987. // otherwise (current = catch region) look in the first try ancestor's write-through set
  7988. Region * selfOrFirstTryAncestor = region->GetSelfOrFirstTryAncestor();
  7989. if (!selfOrFirstTryAncestor)
  7990. {
  7991. return false;
  7992. }
  7993. Assert(selfOrFirstTryAncestor->GetType() == RegionTypeTry);
  7994. return selfOrFirstTryAncestor->writeThroughSymbolsSet && selfOrFirstTryAncestor->writeThroughSymbolsSet->Test(sym->m_id);
  7995. }
  7996. #if DBG
  7997. void
  7998. BackwardPass::VerifyByteCodeUpwardExposed(BasicBlock* block, Func* func, BVSparse<JitArenaAllocator>* trackingByteCodeUpwardExposedUsed, IR::Instr* instr, uint32 bytecodeOffset)
  7999. {
  8000. Assert(instr);
  8001. Assert(bytecodeOffset != Js::Constants::NoByteCodeOffset);
  8002. Assert(this->tag == Js::DeadStorePhase);
  8003. // The calculated bytecode upward exposed should be the same between Backward and DeadStore passes
  8004. if (trackingByteCodeUpwardExposedUsed && !trackingByteCodeUpwardExposedUsed->IsEmpty())
  8005. {
  8006. // We don't need to track bytecodeUpwardExposeUses if we don't have bailout
  8007. // We've collected the Backward bytecodeUpwardExposeUses for nothing, oh well.
  8008. if (this->func->hasBailout)
  8009. {
  8010. BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = GetByteCodeRegisterUpwardExposed(block, func, this->tempAlloc);
  8011. BVSparse<JitArenaAllocator>* notInDeadStore = trackingByteCodeUpwardExposedUsed->MinusNew(byteCodeUpwardExposedUsed, this->tempAlloc);
  8012. if (!notInDeadStore->IsEmpty())
  8013. {
  8014. Output::Print(_u("\n\nByteCode Updward Exposed mismatch after DeadStore\n"));
  8015. Output::Print(_u("Mismatch Instr:\n"));
  8016. instr->Dump();
  8017. Output::Print(_u(" ByteCode Register list present before Backward pass missing in DeadStore pass:\n"));
  8018. FOREACH_BITSET_IN_SPARSEBV(bytecodeReg, notInDeadStore)
  8019. {
  8020. Output::Print(_u(" R%u\n"), bytecodeReg);
  8021. }
  8022. NEXT_BITSET_IN_SPARSEBV;
  8023. AssertMsg(false, "ByteCode Updward Exposed Used Mismatch");
  8024. }
  8025. JitAdelete(this->tempAlloc, notInDeadStore);
  8026. JitAdelete(this->tempAlloc, byteCodeUpwardExposedUsed);
  8027. }
  8028. }
  8029. }
  8030. void
  8031. BackwardPass::CaptureByteCodeUpwardExposed(BasicBlock* block, Func* func, Js::OpCode opcode, uint32 offset)
  8032. {
  8033. Assert(this->DoCaptureByteCodeUpwardExposedUsed());
  8034. // Keep track of all the bytecode upward exposed after Backward's pass
  8035. BVSparse<JitArenaAllocator>* byteCodeUpwardExposedUsed = GetByteCodeRegisterUpwardExposed(block, func, this->globOpt->alloc);
  8036. byteCodeUpwardExposedUsed->Minus(block->excludeByteCodeUpwardExposedTracking);
  8037. if (func->GetJITFunctionBody()->GetEnvReg() != Js::Constants::NoByteCodeOffset)
  8038. {
  8039. // No need to restore the environment so don't track it
  8040. byteCodeUpwardExposedUsed->Clear(func->GetJITFunctionBody()->GetEnvReg());
  8041. }
  8042. if (!func->byteCodeRegisterUses)
  8043. {
  8044. func->byteCodeRegisterUses = JitAnew(this->globOpt->alloc, Func::ByteCodeRegisterUses, this->globOpt->alloc);
  8045. }
  8046. Func::InstrByteCodeRegisterUses instrUses;
  8047. if (func->byteCodeRegisterUses->TryGetValueAndRemove(offset, &instrUses))
  8048. {
  8049. if (instrUses.capturingOpCode == Js::OpCode::Leave)
  8050. {
  8051. // Do not overwrite in the case of Leave
  8052. JitAdelete(this->globOpt->alloc, byteCodeUpwardExposedUsed);
  8053. func->byteCodeRegisterUses->Add(offset, instrUses);
  8054. return;
  8055. }
  8056. byteCodeUpwardExposedUsed->Or(instrUses.bv);
  8057. JitAdelete(this->globOpt->alloc, instrUses.bv);
  8058. }
  8059. instrUses.capturingOpCode = opcode;
  8060. instrUses.bv = byteCodeUpwardExposedUsed;
  8061. func->byteCodeRegisterUses->Add(offset, instrUses);
  8062. }
  8063. BVSparse<JitArenaAllocator>*
  8064. BackwardPass::GetByteCodeRegisterUpwardExposed(BasicBlock* block, Func* func, JitArenaAllocator* alloc)
  8065. {
  8066. BVSparse<JitArenaAllocator>* byteCodeRegisterUpwardExposed = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  8067. // Convert the sym to the corresponding bytecode register
  8068. FOREACH_BITSET_IN_SPARSEBV(symID, block->byteCodeUpwardExposedUsed)
  8069. {
  8070. Sym* sym = func->m_symTable->Find(symID);
  8071. if (sym && sym->IsStackSym())
  8072. {
  8073. StackSym* stackSym = sym->AsStackSym();
  8074. // Make sure we only look at bytecode from the func we're interested in
  8075. if (stackSym->GetByteCodeFunc() == func && stackSym->HasByteCodeRegSlot())
  8076. {
  8077. Js::RegSlot bytecode = stackSym->GetByteCodeRegSlot();
  8078. byteCodeRegisterUpwardExposed->Set(bytecode);
  8079. }
  8080. }
  8081. }
  8082. NEXT_BITSET_IN_SPARSEBV;
  8083. return byteCodeRegisterUpwardExposed;
  8084. }
  8085. #endif
  8086. bool
  8087. BackwardPass::DoDeadStoreLdStForMemop(IR::Instr *instr)
  8088. {
  8089. Assert(this->tag == Js::DeadStorePhase && this->currentBlock->loop != nullptr);
  8090. Loop *loop = this->currentBlock->loop;
  8091. if (globOpt->HasMemOp(loop))
  8092. {
  8093. if (instr->m_opcode == Js::OpCode::StElemI_A && instr->GetDst()->IsIndirOpnd())
  8094. {
  8095. SymID base = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  8096. SymID index = this->globOpt->GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  8097. FOREACH_MEMOP_CANDIDATES(candidate, loop)
  8098. {
  8099. if (base == candidate->base && index == candidate->index)
  8100. {
  8101. return true;
  8102. }
  8103. } NEXT_MEMOP_CANDIDATE
  8104. }
  8105. else if (instr->m_opcode == Js::OpCode::LdElemI_A && instr->GetSrc1()->IsIndirOpnd())
  8106. {
  8107. SymID base = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym());
  8108. SymID index = this->globOpt->GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym());
  8109. FOREACH_MEMCOPY_CANDIDATES(candidate, loop)
  8110. {
  8111. if (base == candidate->ldBase && index == candidate->index)
  8112. {
  8113. return true;
  8114. }
  8115. } NEXT_MEMCOPY_CANDIDATE
  8116. }
  8117. }
  8118. return false;
  8119. }
  8120. void
  8121. BackwardPass::RestoreInductionVariableValuesAfterMemOp(Loop *loop)
  8122. {
  8123. const auto RestoreInductionVariable = [&](SymID symId, Loop::InductionVariableChangeInfo inductionVariableChangeInfo, Loop *loop)
  8124. {
  8125. Js::OpCode opCode = Js::OpCode::Add_I4;
  8126. if (!inductionVariableChangeInfo.isIncremental)
  8127. {
  8128. opCode = Js::OpCode::Sub_I4;
  8129. }
  8130. Func *localFunc = loop->GetFunc();
  8131. StackSym *sym = localFunc->m_symTable->FindStackSym(symId);
  8132. if (!sym->IsInt32())
  8133. {
  8134. sym = sym->GetInt32EquivSym(localFunc);
  8135. }
  8136. IR::Opnd *inductionVariableOpnd = IR::RegOpnd::New(sym, IRType::TyInt32, localFunc);
  8137. IR::Opnd *tempInductionVariableOpnd = IR::RegOpnd::New(IRType::TyInt32, localFunc);
  8138. // The induction variable is restored to a temp register before the MemOp occurs. Once the MemOp is
  8139. // complete, the induction variable's register is set to the value of the temp register. This is done
  8140. // in order to avoid overwriting the induction variable's value after a bailout on the MemOp.
  8141. IR::Instr* restoreInductionVarToTemp = IR::Instr::New(opCode, tempInductionVariableOpnd, inductionVariableOpnd, loop->GetFunc());
  8142. // The IR that restores the induction variable's value is placed before the MemOp. Since this IR can
  8143. // bailout to the loop's landing pad, placing this IR before the MemOp avoids performing the MemOp,
  8144. // bailing out because of this IR, and then performing the effects of the loop again.
  8145. loop->landingPad->InsertInstrBefore(restoreInductionVarToTemp, loop->memOpInfo->instr);
  8146. // The amount to be added or subtraced (depends on opCode) to the induction vairable after the MemOp.
  8147. IR::Opnd *sizeOpnd = globOpt->GenerateInductionVariableChangeForMemOp(loop, inductionVariableChangeInfo.unroll, restoreInductionVarToTemp);
  8148. restoreInductionVarToTemp->SetSrc2(sizeOpnd);
  8149. IR::Instr* restoreInductionVar = IR::Instr::New(Js::OpCode::Ld_A, inductionVariableOpnd, tempInductionVariableOpnd, loop->GetFunc());
  8150. // If restoring an induction variable results in an overflow, bailout to the loop's landing pad.
  8151. restoreInductionVarToTemp->ConvertToBailOutInstr(loop->bailOutInfo, IR::BailOutOnOverflow);
  8152. // Restore the induction variable's actual register once all bailouts have been passed.
  8153. loop->landingPad->InsertAfter(restoreInductionVar);
  8154. };
  8155. for (auto it = loop->memOpInfo->inductionVariableChangeInfoMap->GetIterator(); it.IsValid(); it.MoveNext())
  8156. {
  8157. Loop::InductionVariableChangeInfo iv = it.CurrentValue();
  8158. SymID sym = it.CurrentKey();
  8159. if (iv.unroll != Js::Constants::InvalidLoopUnrollFactor)
  8160. {
  8161. // if the variable is being used after the loop restore it
  8162. if (loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(sym))
  8163. {
  8164. RestoreInductionVariable(sym, iv, loop);
  8165. }
  8166. }
  8167. }
  8168. }
  8169. bool
  8170. BackwardPass::IsEmptyLoopAfterMemOp(Loop *loop)
  8171. {
  8172. if (globOpt->HasMemOp(loop))
  8173. {
  8174. const auto IsInductionVariableUse = [&](IR::Opnd *opnd) -> bool
  8175. {
  8176. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  8177. return (opnd &&
  8178. opnd->GetStackSym() &&
  8179. loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(this->globOpt->GetVarSymID(opnd->GetStackSym())) &&
  8180. (((Loop::InductionVariableChangeInfo)
  8181. loop->memOpInfo->inductionVariableChangeInfoMap->
  8182. LookupWithKey(this->globOpt->GetVarSymID(opnd->GetStackSym()), inductionVariableChangeInfo)).unroll != Js::Constants::InvalidLoopUnrollFactor));
  8183. };
  8184. Assert(loop->blockList.HasTwo());
  8185. FOREACH_BLOCK_IN_LOOP(bblock, loop)
  8186. {
  8187. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrPrev, bblock)
  8188. {
  8189. if (instr->IsLabelInstr() || !instr->IsRealInstr() || instr->m_opcode == Js::OpCode::IncrLoopBodyCount || instr->m_opcode == Js::OpCode::StLoopBodyCount
  8190. || (instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional()))
  8191. {
  8192. continue;
  8193. }
  8194. else
  8195. {
  8196. switch (instr->m_opcode)
  8197. {
  8198. case Js::OpCode::Nop:
  8199. break;
  8200. case Js::OpCode::Ld_I4:
  8201. case Js::OpCode::Add_I4:
  8202. case Js::OpCode::Sub_I4:
  8203. if (!IsInductionVariableUse(instr->GetDst()))
  8204. {
  8205. Assert(instr->GetDst());
  8206. if (instr->GetDst()->GetStackSym()
  8207. && loop->memOpInfo->inductionVariablesUsedAfterLoop->Test(instr->GetDst()->GetStackSym()->m_id))
  8208. {
  8209. // We have use after the loop for a variable defined inside the loop. So the loop can't be removed.
  8210. return false;
  8211. }
  8212. }
  8213. break;
  8214. case Js::OpCode::Decr_A:
  8215. case Js::OpCode::Incr_A:
  8216. if (!IsInductionVariableUse(instr->GetSrc1()))
  8217. {
  8218. return false;
  8219. }
  8220. break;
  8221. default:
  8222. if (instr->IsBranchInstr())
  8223. {
  8224. if (IsInductionVariableUse(instr->GetSrc1()) || IsInductionVariableUse(instr->GetSrc2()))
  8225. {
  8226. break;
  8227. }
  8228. }
  8229. return false;
  8230. }
  8231. }
  8232. }
  8233. NEXT_INSTR_IN_BLOCK_EDITING;
  8234. }NEXT_BLOCK_IN_LIST;
  8235. return true;
  8236. }
  8237. return false;
  8238. }
  8239. void
  8240. BackwardPass::RemoveEmptyLoops()
  8241. {
  8242. if (PHASE_OFF(Js::MemOpPhase, this->func))
  8243. {
  8244. return;
  8245. }
  8246. const auto DeleteMemOpInfo = [&](Loop *loop)
  8247. {
  8248. JitArenaAllocator *alloc = this->func->GetTopFunc()->m_fg->alloc;
  8249. if (!loop->memOpInfo)
  8250. {
  8251. return;
  8252. }
  8253. if (loop->memOpInfo->candidates)
  8254. {
  8255. loop->memOpInfo->candidates->Clear();
  8256. JitAdelete(alloc, loop->memOpInfo->candidates);
  8257. }
  8258. if (loop->memOpInfo->inductionVariableChangeInfoMap)
  8259. {
  8260. loop->memOpInfo->inductionVariableChangeInfoMap->Clear();
  8261. JitAdelete(alloc, loop->memOpInfo->inductionVariableChangeInfoMap);
  8262. }
  8263. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap)
  8264. {
  8265. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Clear();
  8266. JitAdelete(alloc, loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  8267. }
  8268. if (loop->memOpInfo->inductionVariablesUsedAfterLoop)
  8269. {
  8270. JitAdelete(this->tempAlloc, loop->memOpInfo->inductionVariablesUsedAfterLoop);
  8271. }
  8272. JitAdelete(alloc, loop->memOpInfo);
  8273. };
  8274. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  8275. {
  8276. if (IsEmptyLoopAfterMemOp(loop))
  8277. {
  8278. RestoreInductionVariableValuesAfterMemOp(loop);
  8279. RemoveEmptyLoopAfterMemOp(loop);
  8280. }
  8281. // Remove memop info as we don't need them after this point.
  8282. DeleteMemOpInfo(loop);
  8283. } NEXT_LOOP_IN_FUNC_EDITING;
  8284. }
  8285. void
  8286. BackwardPass::RemoveEmptyLoopAfterMemOp(Loop *loop)
  8287. {
  8288. BasicBlock *head = loop->GetHeadBlock();
  8289. BasicBlock *tail = head->next;
  8290. BasicBlock *landingPad = loop->landingPad;
  8291. BasicBlock *outerBlock = nullptr;
  8292. SListBaseCounted<FlowEdge *> *succList = head->GetSuccList();
  8293. Assert(succList->HasTwo());
  8294. // Between the two successors of head, one is tail and the other one is the outerBlock
  8295. SListBaseCounted<FlowEdge *>::Iterator iter(succList);
  8296. iter.Next();
  8297. if (iter.Data()->GetSucc() == tail)
  8298. {
  8299. iter.Next();
  8300. outerBlock = iter.Data()->GetSucc();
  8301. }
  8302. else
  8303. {
  8304. outerBlock = iter.Data()->GetSucc();
  8305. #ifdef DBG
  8306. iter.Next();
  8307. Assert(iter.Data()->GetSucc() == tail);
  8308. #endif
  8309. }
  8310. outerBlock->RemovePred(head, this->func->m_fg);
  8311. landingPad->RemoveSucc(head, this->func->m_fg);
  8312. Assert(landingPad->GetSuccList()->Count() == 0);
  8313. IR::Instr* firstOuterInstr = outerBlock->GetFirstInstr();
  8314. AssertOrFailFast(firstOuterInstr->IsLabelInstr() && !landingPad->GetLastInstr()->EndsBasicBlock());
  8315. IR::LabelInstr* label = firstOuterInstr->AsLabelInstr();
  8316. // Add br to Outer block to keep coherence between branches and flow graph
  8317. IR::BranchInstr *outerBr = IR::BranchInstr::New(Js::OpCode::Br, label, this->func);
  8318. landingPad->InsertAfter(outerBr);
  8319. this->func->m_fg->AddEdge(landingPad, outerBlock);
  8320. this->func->m_fg->RemoveBlock(head, nullptr);
  8321. if (head != tail)
  8322. {
  8323. this->func->m_fg->RemoveBlock(tail, nullptr);
  8324. }
  8325. }
  8326. #if DBG_DUMP
  8327. bool
  8328. BackwardPass::IsTraceEnabled() const
  8329. {
  8330. return
  8331. Js::Configuration::Global.flags.Trace.IsEnabled(tag, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()) &&
  8332. (PHASE_TRACE(Js::SimpleJitPhase, func) || !func->IsSimpleJit());
  8333. }
  8334. #endif