2
0

GlobOpt.cpp 650 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if DBG_DUMP
  7. #define DO_MEMOP_TRACE() (PHASE_TRACE(Js::MemOpPhase, this->func) ||\
  8. PHASE_TRACE(Js::MemSetPhase, this->func) ||\
  9. PHASE_TRACE(Js::MemCopyPhase, this->func))
  10. #define DO_MEMOP_TRACE_PHASE(phase) (PHASE_TRACE(Js::MemOpPhase, this->func) || PHASE_TRACE(Js::phase ## Phase, this->func))
  11. #define OUTPUT_MEMOP_TRACE(loop, instr, ...) {\
  12. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];\
  13. Output::Print(15, _u("Function: %s%s, Loop: %u: "), this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer), loop->GetLoopNumber());\
  14. Output::Print(__VA_ARGS__);\
  15. IR::Instr* __instr__ = instr;\
  16. if(__instr__) __instr__->DumpByteCodeOffset();\
  17. if(__instr__) Output::Print(_u(" (%s)"), Js::OpCodeUtil::GetOpCodeName(__instr__->m_opcode));\
  18. Output::Print(_u("\n"));\
  19. Output::Flush(); \
  20. }
  21. #define TRACE_MEMOP(loop, instr, ...) \
  22. if (DO_MEMOP_TRACE()) {\
  23. Output::Print(_u("TRACE MemOp:"));\
  24. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  25. }
  26. #define TRACE_MEMOP_VERBOSE(loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP(loop, instr, __VA_ARGS__)}
  27. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...) \
  28. if (DO_MEMOP_TRACE_PHASE(phase))\
  29. {\
  30. Output::Print(_u("TRACE ") _u(#phase) _u(":"));\
  31. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  32. }
  33. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP_PHASE(phase, loop, instr, __VA_ARGS__)}
  34. #else
  35. #define DO_MEMOP_TRACE()
  36. #define DO_MEMOP_TRACE_PHASE(phase)
  37. #define OUTPUT_MEMOP_TRACE(loop, instr, ...)
  38. #define TRACE_MEMOP(loop, instr, ...)
  39. #define TRACE_MEMOP_VERBOSE(loop, instr, ...)
  40. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...)
  41. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...)
  42. #endif
  43. class AutoRestoreVal
  44. {
  45. private:
  46. Value *const originalValue;
  47. Value *const tempValue;
  48. Value * *const valueRef;
  49. public:
  50. AutoRestoreVal(Value *const originalValue, Value * *const tempValueRef)
  51. : originalValue(originalValue), tempValue(*tempValueRef), valueRef(tempValueRef)
  52. {
  53. }
  54. ~AutoRestoreVal()
  55. {
  56. if(*valueRef == tempValue)
  57. {
  58. *valueRef = originalValue;
  59. }
  60. }
  61. PREVENT_COPY(AutoRestoreVal);
  62. };
  63. GlobOpt::GlobOpt(Func * func)
  64. : func(func),
  65. intConstantToStackSymMap(nullptr),
  66. intConstantToValueMap(nullptr),
  67. currentValue(FirstNewValueNumber),
  68. prePassLoop(nullptr),
  69. alloc(nullptr),
  70. isCallHelper(false),
  71. inInlinedBuiltIn(false),
  72. rootLoopPrePass(nullptr),
  73. noImplicitCallUsesToInsert(nullptr),
  74. valuesCreatedForClone(nullptr),
  75. valuesCreatedForMerge(nullptr),
  76. instrCountSinceLastCleanUp(0),
  77. isRecursiveCallOnLandingPad(false),
  78. updateInductionVariableValueNumber(false),
  79. isPerformingLoopBackEdgeCompensation(false),
  80. currentRegion(nullptr),
  81. auxSlotPtrSyms(nullptr),
  82. changedSymsAfterIncBailoutCandidate(nullptr),
  83. doTypeSpec(
  84. !IsTypeSpecPhaseOff(func)),
  85. doAggressiveIntTypeSpec(
  86. doTypeSpec &&
  87. DoAggressiveIntTypeSpec(func)),
  88. doAggressiveMulIntTypeSpec(
  89. doTypeSpec &&
  90. !PHASE_OFF(Js::AggressiveMulIntTypeSpecPhase, func) &&
  91. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveMulIntTypeSpecDisabled(func->IsLoopBody()))),
  92. doDivIntTypeSpec(
  93. doAggressiveIntTypeSpec &&
  94. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsDivIntTypeSpecDisabled(func->IsLoopBody()))),
  95. doLossyIntTypeSpec(
  96. doTypeSpec &&
  97. DoLossyIntTypeSpec(func)),
  98. doFloatTypeSpec(
  99. doTypeSpec &&
  100. DoFloatTypeSpec(func)),
  101. doArrayCheckHoist(
  102. DoArrayCheckHoist(func)),
  103. doArrayMissingValueCheckHoist(
  104. doArrayCheckHoist &&
  105. DoArrayMissingValueCheckHoist(func)),
  106. doArraySegmentHoist(
  107. doArrayCheckHoist &&
  108. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array), func)),
  109. doJsArraySegmentHoist(
  110. doArraySegmentHoist &&
  111. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array), func)),
  112. doArrayLengthHoist(
  113. doArrayCheckHoist &&
  114. DoArrayLengthHoist(func)),
  115. doEliminateArrayAccessHelperCall(
  116. doArrayCheckHoist &&
  117. !PHASE_OFF(Js::EliminateArrayAccessHelperCallPhase, func)),
  118. doTrackRelativeIntBounds(
  119. doAggressiveIntTypeSpec &&
  120. DoPathDependentValues() &&
  121. !PHASE_OFF(Js::Phase::TrackRelativeIntBoundsPhase, func)),
  122. doBoundCheckElimination(
  123. doTrackRelativeIntBounds &&
  124. !PHASE_OFF(Js::Phase::BoundCheckEliminationPhase, func)),
  125. doBoundCheckHoist(
  126. doEliminateArrayAccessHelperCall &&
  127. doBoundCheckElimination &&
  128. DoConstFold() &&
  129. !PHASE_OFF(Js::Phase::BoundCheckHoistPhase, func) &&
  130. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsBoundCheckHoistDisabled(func->IsLoopBody()))),
  131. doLoopCountBasedBoundCheckHoist(
  132. doBoundCheckHoist &&
  133. !PHASE_OFF(Js::Phase::LoopCountBasedBoundCheckHoistPhase, func) &&
  134. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLoopCountBasedBoundCheckHoistDisabled(func->IsLoopBody()))),
  135. doPowIntIntTypeSpec(
  136. doAggressiveIntTypeSpec &&
  137. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsPowIntIntTypeSpecDisabled())),
  138. doTagChecks(
  139. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTagCheckDisabled())),
  140. isAsmJSFunc(func->GetJITFunctionBody()->IsAsmJsMode())
  141. {
  142. }
  143. void
  144. GlobOpt::BackwardPass(Js::Phase tag)
  145. {
  146. BEGIN_CODEGEN_PHASE(this->func, tag);
  147. ::BackwardPass backwardPass(this->func, this, tag);
  148. backwardPass.Optimize();
  149. END_CODEGEN_PHASE(this->func, tag);
  150. }
  151. void
  152. GlobOpt::Optimize()
  153. {
  154. this->objectTypeSyms = nullptr;
  155. this->func->argInsCount = this->func->GetInParamsCount() - 1; //Don't include "this" pointer in the count.
  156. if (!func->DoGlobOpt())
  157. {
  158. this->lengthEquivBv = nullptr;
  159. this->argumentsEquivBv = nullptr;
  160. this->callerEquivBv = nullptr;
  161. // Still need to run the dead store phase to calculate the live reg on back edge
  162. this->BackwardPass(Js::DeadStorePhase);
  163. CannotAllocateArgumentsObjectOnStack(nullptr);
  164. return;
  165. }
  166. {
  167. this->lengthEquivBv = this->func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::length, nullptr); // Used to kill live "length" properties
  168. this->argumentsEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::arguments, nullptr); // Used to kill live "arguments" properties
  169. this->callerEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::caller, nullptr); // Used to kill live "caller" properties
  170. // The backward phase needs the glob opt's allocator to allocate the propertyTypeValueMap
  171. // in GlobOpt::EnsurePropertyTypeValue and ranges of instructions where int overflow may be ignored.
  172. // (see BackwardPass::TrackIntUsage)
  173. PageAllocator * pageAllocator = this->func->m_alloc->GetPageAllocator();
  174. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-GlobOpt"), pageAllocator, Js::Throw::OutOfMemory);
  175. this->alloc = &localAlloc;
  176. NoRecoverMemoryJitArenaAllocator localTempAlloc(_u("BE-GlobOpt temp"), pageAllocator, Js::Throw::OutOfMemory);
  177. this->tempAlloc = &localTempAlloc;
  178. // The forward passes use info (upwardExposedUses) from the backward pass. This info
  179. // isn't available for some of the symbols created during the backward pass, or the forward pass.
  180. // Keep track of the last symbol for which we're guaranteed to have data.
  181. this->maxInitialSymID = this->func->m_symTable->GetMaxSymID();
  182. #if DBG
  183. this->BackwardPass(Js::CaptureByteCodeRegUsePhase);
  184. #endif
  185. this->BackwardPass(Js::BackwardPhase);
  186. this->ForwardPass();
  187. this->BackwardPass(Js::DeadStorePhase);
  188. }
  189. this->TailDupPass();
  190. }
  191. bool GlobOpt::ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd)
  192. {
  193. Assert(indirOpnd);
  194. if(!indirOpnd->GetIndexOpnd())
  195. {
  196. return indirOpnd->GetOffset() >= 0;
  197. }
  198. IR::RegOpnd *const indexOpnd = indirOpnd->GetIndexOpnd();
  199. if(indexOpnd->m_sym->m_isNotNumber)
  200. {
  201. // Typically, single-def or any sym-specific information for type-specialized syms should not be used because all of
  202. // their defs will not have been accounted for until after the forward pass. But m_isNotNumber is only ever changed from
  203. // false to true, so it's okay in this case.
  204. return false;
  205. }
  206. StackSym *indexVarSym = indexOpnd->m_sym;
  207. if(indexVarSym->IsTypeSpec())
  208. {
  209. indexVarSym = indexVarSym->GetVarEquivSym(nullptr);
  210. Assert(indexVarSym);
  211. }
  212. else if(!IsLoopPrePass())
  213. {
  214. // Don't use single-def info or const flags for type-specialized syms, as all of their defs will not have been accounted
  215. // for until after the forward pass. Also, don't use the const flags in a loop prepass because the const flags may not
  216. // be up-to-date.
  217. if (indexOpnd->IsNotInt())
  218. {
  219. return false;
  220. }
  221. StackSym *const indexSym = indexOpnd->m_sym;
  222. if(indexSym->IsIntConst())
  223. {
  224. return indexSym->GetIntConstValue() >= 0;
  225. }
  226. }
  227. Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
  228. if(!indexValue)
  229. {
  230. // Treat it as Uninitialized, assume it's going to be valid
  231. return true;
  232. }
  233. ValueInfo *const indexValueInfo = indexValue->GetValueInfo();
  234. int32 indexConstantValue;
  235. if(indexValueInfo->TryGetIntConstantValue(&indexConstantValue))
  236. {
  237. return indexConstantValue >= 0;
  238. }
  239. if(indexValueInfo->IsUninitialized())
  240. {
  241. // Assume it's going to be valid
  242. return true;
  243. }
  244. return indexValueInfo->HasBeenNumber() && !indexValueInfo->HasBeenFloat();
  245. }
  246. //
  247. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  248. //
  249. ValueType GlobOpt::GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize)
  250. {
  251. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  252. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  253. if (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo())
  254. {
  255. ValueType resultType = instr->m_func->GetReadOnlyProfileInfo()->GetDivProfileInfo(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  256. if (resultType.IsLikelyInt())
  257. {
  258. if (specialize && src1ValueInfo && src2ValueInfo
  259. && ((src1ValueInfo->IsInt() && src2ValueInfo->IsInt()) ||
  260. (this->DoDivIntTypeSpec() && src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())))
  261. {
  262. return ValueType::GetInt(true);
  263. }
  264. return resultType;
  265. }
  266. // Consider: Checking that the sources are numbers.
  267. if (resultType.IsLikelyFloat())
  268. {
  269. return ValueType::Float;
  270. }
  271. return resultType;
  272. }
  273. int32 src1IntConstantValue;
  274. if(!src1ValueInfo || !src1ValueInfo->TryGetIntConstantValue(&src1IntConstantValue))
  275. {
  276. return ValueType::Number;
  277. }
  278. if (src1IntConstantValue == 1)
  279. {
  280. return ValueType::Float;
  281. }
  282. int32 src2IntConstantValue;
  283. if(!src2Val || !src2ValueInfo->TryGetIntConstantValue(&src2IntConstantValue))
  284. {
  285. return ValueType::Number;
  286. }
  287. if (src2IntConstantValue // Avoid divide by zero
  288. && !(src1IntConstantValue == 0x80000000 && src2IntConstantValue == -1) // Avoid integer overflow
  289. && (src1IntConstantValue % src2IntConstantValue) != 0)
  290. {
  291. return ValueType::Float;
  292. }
  293. return ValueType::Number;
  294. }
  295. void
  296. GlobOpt::ForwardPass()
  297. {
  298. BEGIN_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  299. #if DBG_DUMP
  300. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  301. {
  302. this->func->DumpHeader();
  303. }
  304. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::GlobOptPhase))
  305. {
  306. this->TraceSettings();
  307. }
  308. #endif
  309. // GetConstantCount() gives us the right size to pick for the SparseArray, but we may need more if we've inlined
  310. // functions with constants. There will be a gap in the symbol numbering between the main constants and
  311. // the inlined ones, so we'll most likely need a new array chunk. Make the min size of the array chunks be 64
  312. // in case we have a main function with very few constants and a bunch of constants from inlined functions.
  313. this->byteCodeConstantValueArray = SparseArray<Value>::New(this->alloc, max(this->func->GetJITFunctionBody()->GetConstCount(), 64U));
  314. this->byteCodeConstantValueNumbersBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  315. this->tempBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  316. this->prePassCopyPropSym = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  317. this->slotSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  318. this->byteCodeUses = nullptr;
  319. this->propertySymUse = nullptr;
  320. // changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
  321. this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  322. this->auxSlotPtrSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  323. #if DBG
  324. this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  325. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
  326. {
  327. Output::Print(_u("TRACE: CanDoFieldCopyProp Func: "));
  328. this->func->DumpFullFunctionName();
  329. Output::Print(_u("\n"));
  330. }
  331. #endif
  332. OpndList localNoImplicitCallUsesToInsert(alloc);
  333. this->noImplicitCallUsesToInsert = &localNoImplicitCallUsesToInsert;
  334. IntConstantToStackSymMap localIntConstantToStackSymMap(alloc);
  335. this->intConstantToStackSymMap = &localIntConstantToStackSymMap;
  336. IntConstantToValueMap localIntConstantToValueMap(alloc);
  337. this->intConstantToValueMap = &localIntConstantToValueMap;
  338. Int64ConstantToValueMap localInt64ConstantToValueMap(alloc);
  339. this->int64ConstantToValueMap = &localInt64ConstantToValueMap;
  340. AddrConstantToValueMap localAddrConstantToValueMap(alloc);
  341. this->addrConstantToValueMap = &localAddrConstantToValueMap;
  342. StringConstantToValueMap localStringConstantToValueMap(alloc);
  343. this->stringConstantToValueMap = &localStringConstantToValueMap;
  344. SymIdToInstrMap localPrePassInstrMap(alloc);
  345. this->prePassInstrMap = &localPrePassInstrMap;
  346. ValueSetByValueNumber localValuesCreatedForClone(alloc, 64);
  347. this->valuesCreatedForClone = &localValuesCreatedForClone;
  348. ValueNumberPairToValueMap localValuesCreatedForMerge(alloc, 64);
  349. this->valuesCreatedForMerge = &localValuesCreatedForMerge;
  350. #if DBG
  351. BVSparse<JitArenaAllocator> localFinishedStackLiteralInitFld(alloc);
  352. this->finishedStackLiteralInitFld = &localFinishedStackLiteralInitFld;
  353. #endif
  354. FOREACH_BLOCK_IN_FUNC_EDITING(block, this->func)
  355. {
  356. this->OptBlock(block);
  357. } NEXT_BLOCK_IN_FUNC_EDITING;
  358. if (!PHASE_OFF(Js::MemOpPhase, this->func))
  359. {
  360. ProcessMemOp();
  361. }
  362. this->noImplicitCallUsesToInsert = nullptr;
  363. this->intConstantToStackSymMap = nullptr;
  364. this->intConstantToValueMap = nullptr;
  365. this->int64ConstantToValueMap = nullptr;
  366. this->addrConstantToValueMap = nullptr;
  367. this->stringConstantToValueMap = nullptr;
  368. #if DBG
  369. this->finishedStackLiteralInitFld = nullptr;
  370. uint freedCount = 0;
  371. uint spilledCount = 0;
  372. #endif
  373. FOREACH_BLOCK_IN_FUNC(block, this->func)
  374. {
  375. #if DBG
  376. if (block->GetDataUseCount() == 0)
  377. {
  378. freedCount++;
  379. }
  380. else
  381. {
  382. spilledCount++;
  383. }
  384. #endif
  385. block->SetDataUseCount(0);
  386. if (block->cloneStrCandidates)
  387. {
  388. JitAdelete(this->alloc, block->cloneStrCandidates);
  389. block->cloneStrCandidates = nullptr;
  390. }
  391. } NEXT_BLOCK_IN_FUNC;
  392. // Make sure we free most of them.
  393. Assert(freedCount >= spilledCount);
  394. // this->alloc will be freed right after return, no need to free it here
  395. this->changedSymsAfterIncBailoutCandidate = nullptr;
  396. this->auxSlotPtrSyms = nullptr;
  397. END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  398. }
  399. void
  400. GlobOpt::OptBlock(BasicBlock *block)
  401. {
  402. if (this->func->m_fg->RemoveUnreachableBlock(block, this))
  403. {
  404. GOPT_TRACE(_u("Removing unreachable block #%d\n"), block->GetBlockNum());
  405. return;
  406. }
  407. Loop * loop = block->loop;
  408. if (loop && block->isLoopHeader)
  409. {
  410. if (loop != this->prePassLoop)
  411. {
  412. OptLoops(loop);
  413. if (!IsLoopPrePass() && loop->parent)
  414. {
  415. loop->fieldPRESymStores->Or(loop->parent->fieldPRESymStores);
  416. }
  417. if (!this->IsLoopPrePass() && DoFieldPRE(loop))
  418. {
  419. // Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
  420. this->FieldPRE(loop);
  421. // Re-optimize the landing pad
  422. BasicBlock *landingPad = loop->landingPad;
  423. this->isRecursiveCallOnLandingPad = true;
  424. this->OptBlock(landingPad);
  425. this->isRecursiveCallOnLandingPad = false;
  426. }
  427. }
  428. }
  429. this->currentBlock = block;
  430. PrepareLoopArrayCheckHoist();
  431. block->MergePredBlocksValueMaps(this);
  432. this->intOverflowCurrentlyMattersInRange = true;
  433. this->intOverflowDoesNotMatterRange = this->currentBlock->intOverflowDoesNotMatterRange;
  434. if (!DoFieldCopyProp() && !DoFieldRefOpts())
  435. {
  436. this->KillAllFields(CurrentBlockData()->liveFields);
  437. }
  438. this->tempAlloc->Reset();
  439. if(loop && block->isLoopHeader)
  440. {
  441. loop->firstValueNumberInLoop = this->currentValue;
  442. }
  443. GOPT_TRACE_BLOCK(block, true);
  444. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  445. {
  446. GOPT_TRACE_INSTRTRACE(instr);
  447. BailOutInfo* oldBailOutInfo = nullptr;
  448. bool isCheckAuxBailoutNeeded = this->func->IsJitInDebugMode() && !this->IsLoopPrePass();
  449. if (isCheckAuxBailoutNeeded && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  450. {
  451. oldBailOutInfo = instr->GetBailOutInfo();
  452. Assert(oldBailOutInfo);
  453. }
  454. bool isInstrRemoved = false;
  455. instrNext = this->OptInstr(instr, &isInstrRemoved);
  456. // If we still have instrs with only aux bail out, convert aux bail out back to regular bail out and fill it.
  457. // During OptInstr some instr can be moved out to a different block, in this case bailout info is going to be replaced
  458. // with e.g. loop bailout info which is filled as part of processing that block, thus we don't need to fill it here.
  459. if (isCheckAuxBailoutNeeded && !isInstrRemoved && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  460. {
  461. if (instr->GetBailOutInfo() == oldBailOutInfo)
  462. {
  463. instr->PromoteAuxBailOut();
  464. FillBailOutInfo(block, instr);
  465. }
  466. else
  467. {
  468. AssertMsg(instr->GetBailOutInfo(), "With aux bailout, the bailout info should not be removed by OptInstr.");
  469. }
  470. }
  471. } NEXT_INSTR_IN_BLOCK_EDITING;
  472. GOPT_TRACE_BLOCK(block, false);
  473. if (block->loop)
  474. {
  475. if (IsLoopPrePass())
  476. {
  477. if (DoBoundCheckHoist())
  478. {
  479. DetectUnknownChangesToInductionVariables(&block->globOptData);
  480. }
  481. }
  482. else
  483. {
  484. isPerformingLoopBackEdgeCompensation = true;
  485. Assert(this->tempBv->IsEmpty());
  486. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  487. // On loop back-edges, we need to restore the state of the type specialized
  488. // symbols to that of the loop header.
  489. FOREACH_SUCCESSOR_BLOCK(succ, block)
  490. {
  491. if (succ->isLoopHeader && succ->loop->IsDescendentOrSelf(block->loop))
  492. {
  493. BVSparse<JitArenaAllocator> *liveOnBackEdge = block->loop->regAlloc.liveOnBackEdgeSyms;
  494. liveOnBackEdge->Or(block->loop->fieldPRESymStores);
  495. this->tempBv->Minus(block->loop->varSymsOnEntry, block->globOptData.liveVarSyms);
  496. this->tempBv->And(liveOnBackEdge);
  497. this->ToVar(this->tempBv, block);
  498. // Lossy int in the loop header, and no int on the back-edge - need a lossy conversion to int
  499. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->globOptData.liveInt32Syms);
  500. this->tempBv->And(liveOnBackEdge);
  501. this->ToInt32(this->tempBv, block, true /* lossy */);
  502. // Lossless int in the loop header, and no lossless int on the back-edge - need a lossless conversion to int
  503. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  504. tempBv2.Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  505. this->tempBv->Minus(&tempBv2);
  506. this->tempBv->And(liveOnBackEdge);
  507. this->ToInt32(this->tempBv, block, false /* lossy */);
  508. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->globOptData.liveFloat64Syms);
  509. this->tempBv->And(liveOnBackEdge);
  510. this->ToFloat64(this->tempBv, block);
  511. // For ints and floats, go aggressive and type specialize in the landing pad any symbol which was specialized on
  512. // entry to the loop body (in the loop header), and is still specialized on this tail, but wasn't specialized in
  513. // the landing pad.
  514. // Lossy int in the loop header and no int in the landing pad - need a lossy conversion to int
  515. // (entry.lossyInt32 - landingPad.int32)
  516. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->loop->landingPad->globOptData.liveInt32Syms);
  517. this->tempBv->And(liveOnBackEdge);
  518. this->ToInt32(this->tempBv, block->loop->landingPad, true /* lossy */);
  519. // Lossless int in the loop header, and no lossless int in the landing pad - need a lossless conversion to int
  520. // ((entry.int32 - entry.lossyInt32) - (landingPad.int32 - landingPad.lossyInt32))
  521. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  522. tempBv2.Minus(
  523. block->loop->landingPad->globOptData.liveInt32Syms,
  524. block->loop->landingPad->globOptData.liveLossyInt32Syms);
  525. this->tempBv->Minus(&tempBv2);
  526. this->tempBv->And(liveOnBackEdge);
  527. this->ToInt32(this->tempBv, block->loop->landingPad, false /* lossy */);
  528. // ((entry.float64 - landingPad.float64) & block.float64)
  529. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->loop->landingPad->globOptData.liveFloat64Syms);
  530. this->tempBv->And(block->globOptData.liveFloat64Syms);
  531. this->tempBv->And(liveOnBackEdge);
  532. this->ToFloat64(this->tempBv, block->loop->landingPad);
  533. // Now that we're done with the liveFields within this loop, trim the set to those syms
  534. // that the backward pass told us were live out of the loop.
  535. // This assumes we have no further need of the liveFields within the loop.
  536. if (block->loop->liveOutFields)
  537. {
  538. block->globOptData.liveFields->And(block->loop->liveOutFields);
  539. }
  540. }
  541. } NEXT_SUCCESSOR_BLOCK;
  542. this->tempBv->ClearAll();
  543. isPerformingLoopBackEdgeCompensation = false;
  544. }
  545. }
  546. block->PathDepBranchFolding(this);
  547. #if DBG
  548. // The set of live lossy int32 syms should be a subset of all live int32 syms
  549. this->tempBv->And(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  550. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  551. // The set of live lossy int32 syms should be a subset of live var or float syms (var or float sym containing the lossless
  552. // value of the sym should be live)
  553. this->tempBv->Or(block->globOptData.liveVarSyms, block->globOptData.liveFloat64Syms);
  554. this->tempBv->And(block->globOptData.liveLossyInt32Syms);
  555. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  556. this->tempBv->ClearAll();
  557. Assert(this->currentBlock == block);
  558. #endif
  559. }
  560. void
  561. GlobOpt::OptLoops(Loop *loop)
  562. {
  563. Assert(loop != nullptr);
  564. #if DBG
  565. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) &&
  566. !DoFunctionFieldCopyProp() && DoFieldCopyProp(loop))
  567. {
  568. Output::Print(_u("TRACE: CanDoFieldCopyProp Loop: "));
  569. this->func->DumpFullFunctionName();
  570. uint loopNumber = loop->GetLoopNumber();
  571. Assert(loopNumber != Js::LoopHeader::NoLoop);
  572. Output::Print(_u(" Loop: %d\n"), loopNumber);
  573. }
  574. #endif
  575. Loop *previousLoop = this->prePassLoop;
  576. this->prePassLoop = loop;
  577. if (previousLoop == nullptr)
  578. {
  579. Assert(this->rootLoopPrePass == nullptr);
  580. this->rootLoopPrePass = loop;
  581. this->prePassInstrMap->Clear();
  582. if (loop->parent == nullptr)
  583. {
  584. // Outer most loop...
  585. this->prePassCopyPropSym->ClearAll();
  586. }
  587. }
  588. Assert(loop->symsAssignedToInLoop != nullptr);
  589. if (loop->symsUsedBeforeDefined == nullptr)
  590. {
  591. loop->symsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  592. loop->likelyIntSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  593. loop->likelyNumberSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  594. loop->forceFloat64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  595. loop->symsDefInLoop = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  596. loop->fieldKilled = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  597. loop->fieldPRESymStores = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  598. loop->allFieldsKilled = false;
  599. }
  600. else
  601. {
  602. loop->symsUsedBeforeDefined->ClearAll();
  603. loop->likelyIntSymsUsedBeforeDefined->ClearAll();
  604. loop->likelyNumberSymsUsedBeforeDefined->ClearAll();
  605. loop->forceFloat64SymsOnEntry->ClearAll();
  606. loop->symsDefInLoop->ClearAll();
  607. loop->fieldKilled->ClearAll();
  608. loop->allFieldsKilled = false;
  609. loop->initialValueFieldMap.Reset();
  610. }
  611. FOREACH_BLOCK_IN_LOOP(block, loop)
  612. {
  613. block->SetDataUseCount(block->GetSuccList()->Count());
  614. OptBlock(block);
  615. } NEXT_BLOCK_IN_LOOP;
  616. if (previousLoop == nullptr)
  617. {
  618. Assert(this->rootLoopPrePass == loop);
  619. this->rootLoopPrePass = nullptr;
  620. }
  621. this->prePassLoop = previousLoop;
  622. }
  623. void
  624. GlobOpt::TailDupPass()
  625. {
  626. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  627. {
  628. BasicBlock* header = loop->GetHeadBlock();
  629. BasicBlock* loopTail = nullptr;
  630. FOREACH_PREDECESSOR_BLOCK(pred, header)
  631. {
  632. if (loop->IsDescendentOrSelf(pred->loop))
  633. {
  634. loopTail = pred;
  635. break;
  636. }
  637. } NEXT_PREDECESSOR_BLOCK;
  638. if (loopTail)
  639. {
  640. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  641. if (!loopTail->GetPredList()->HasOne())
  642. {
  643. TryTailDup(loopTail->GetLastInstr()->AsBranchInstr());
  644. }
  645. }
  646. } NEXT_LOOP_IN_FUNC_EDITING;
  647. }
  648. bool
  649. GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
  650. {
  651. if (PHASE_OFF(Js::TailDupPhase, tailBranch->m_func->GetTopFunc()))
  652. {
  653. return false;
  654. }
  655. if (tailBranch->IsConditional())
  656. {
  657. return false;
  658. }
  659. IR::Instr *instr;
  660. uint instrCount = 0;
  661. for (instr = tailBranch->GetPrevRealInstrOrLabel(); !instr->IsLabelInstr(); instr = instr->GetPrevRealInstrOrLabel())
  662. {
  663. if (instr->HasBailOutInfo())
  664. {
  665. break;
  666. }
  667. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  668. {
  669. // Consider: We could be more aggressive here
  670. break;
  671. }
  672. instrCount++;
  673. if (instrCount > 1)
  674. {
  675. // Consider: If copy handled single-def tmps renaming, we could do more instrs
  676. break;
  677. }
  678. }
  679. if (!instr->IsLabelInstr())
  680. {
  681. return false;
  682. }
  683. IR::LabelInstr *mergeLabel = instr->AsLabelInstr();
  684. IR::Instr *mergeLabelPrev = mergeLabel->m_prev;
  685. // Skip unreferenced labels
  686. while (mergeLabelPrev->IsLabelInstr() && mergeLabelPrev->AsLabelInstr()->labelRefs.Empty())
  687. {
  688. mergeLabelPrev = mergeLabelPrev->m_prev;
  689. }
  690. BasicBlock* labelBlock = mergeLabel->GetBasicBlock();
  691. uint origPredCount = labelBlock->GetPredList()->Count();
  692. uint dupCount = 0;
  693. // We are good to go. Let's do the tail duplication.
  694. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr*, branchEntry, &mergeLabel->labelRefs, iter)
  695. {
  696. if (branchEntry->IsUnconditional() && !branchEntry->IsMultiBranch() && branchEntry != mergeLabelPrev && branchEntry != tailBranch)
  697. {
  698. for (instr = mergeLabel->m_next; instr != tailBranch; instr = instr->m_next)
  699. {
  700. branchEntry->InsertBefore(instr->Copy());
  701. }
  702. instr = branchEntry;
  703. branchEntry->ReplaceTarget(mergeLabel, tailBranch->GetTarget());
  704. while(!instr->IsLabelInstr())
  705. {
  706. instr = instr->m_prev;
  707. }
  708. BasicBlock* branchBlock = instr->AsLabelInstr()->GetBasicBlock();
  709. labelBlock->RemovePred(branchBlock, func->m_fg);
  710. func->m_fg->AddEdge(branchBlock, tailBranch->GetTarget()->GetBasicBlock());
  711. dupCount++;
  712. }
  713. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  714. // If we've duplicated everywhere, tail block is dead and should be removed.
  715. if (dupCount == origPredCount)
  716. {
  717. AssertMsg(mergeLabel->labelRefs.Empty(), "Should not remove block with referenced label.");
  718. func->m_fg->RemoveBlock(labelBlock, nullptr, true);
  719. }
  720. return true;
  721. }
  722. void
  723. GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  724. {
  725. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  726. {
  727. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  728. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
  729. IR::Instr *lastInstr = block->GetLastInstr();
  730. if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
  731. {
  732. // If branch is using this symbol, hoist the operand as the ToVar load will get
  733. // inserted right before the branch.
  734. IR::Opnd *src1 = lastInstr->GetSrc1();
  735. if (src1)
  736. {
  737. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  738. {
  739. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  740. }
  741. IR::Opnd *src2 = lastInstr->GetSrc2();
  742. if (src2)
  743. {
  744. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  745. {
  746. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  747. }
  748. }
  749. }
  750. this->ToVar(lastInstr, newOpnd, block, nullptr, false);
  751. }
  752. else
  753. {
  754. IR::Instr *lastNextInstr = lastInstr->m_next;
  755. this->ToVar(lastNextInstr, newOpnd, block, nullptr, false);
  756. }
  757. } NEXT_BITSET_IN_SPARSEBV;
  758. }
  759. void
  760. GlobOpt::ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr)
  761. {
  762. return this->ToTypeSpec(bv, block, TyInt32, IR::BailOutIntOnly, lossy, insertBeforeInstr);
  763. }
  764. void
  765. GlobOpt::ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  766. {
  767. return this->ToTypeSpec(bv, block, TyFloat64, IR::BailOutNumberOnly);
  768. }
  769. void
  770. GlobOpt::ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  771. {
  772. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  773. {
  774. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  775. IRType fromType = TyIllegal;
  776. // Win8 bug: 757126. If we are trying to type specialize the arguments object,
  777. // let's make sure stack args optimization is not enabled. This is a problem, particularly,
  778. // if the instruction comes from an unreachable block. In other cases, the pass on the
  779. // instruction itself should disable arguments object optimization.
  780. if(block->globOptData.argObjSyms && block->globOptData.IsArgumentsSymID(id))
  781. {
  782. CannotAllocateArgumentsObjectOnStack(nullptr);
  783. }
  784. if (block->globOptData.liveVarSyms->Test(id))
  785. {
  786. fromType = TyVar;
  787. }
  788. else if (block->globOptData.liveInt32Syms->Test(id) && !block->globOptData.liveLossyInt32Syms->Test(id))
  789. {
  790. fromType = TyInt32;
  791. stackSym = stackSym->GetInt32EquivSym(this->func);
  792. }
  793. else if (block->globOptData.liveFloat64Syms->Test(id))
  794. {
  795. fromType = TyFloat64;
  796. stackSym = stackSym->GetFloat64EquivSym(this->func);
  797. }
  798. else
  799. {
  800. Assert(UNREACHED);
  801. }
  802. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, fromType, this->func);
  803. this->ToTypeSpecUse(nullptr, newOpnd, block, nullptr, nullptr, toType, bailOutKind, lossy, insertBeforeInstr);
  804. } NEXT_BITSET_IN_SPARSEBV;
  805. }
  806. void GlobOpt::PRE::FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc)
  807. {
  808. // Find the set of PRE candidates
  809. BasicBlock *loopHeader = loop->GetHeadBlock();
  810. PRECandidates *candidates = nullptr;
  811. bool firstBackEdge = true;
  812. FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
  813. {
  814. if (!loop->IsDescendentOrSelf(blockPred->loop))
  815. {
  816. // Not a loop back-edge
  817. continue;
  818. }
  819. if (firstBackEdge)
  820. {
  821. candidates = this->globOpt->FindBackEdgePRECandidates(blockPred, alloc);
  822. }
  823. else
  824. {
  825. blockPred->globOptData.RemoveUnavailableCandidates(candidates);
  826. }
  827. } NEXT_PREDECESSOR_BLOCK;
  828. this->candidates = candidates;
  829. }
  830. BOOL GlobOpt::PRE::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
  831. {
  832. // Insert a load for each field PRE candidate.
  833. PropertySym *propertySym = candidate->value->AsPropertySym();
  834. if (!candidates->candidatesToProcess->TestAndClear(propertySym->m_id))
  835. {
  836. return false;
  837. }
  838. Value * propSymValueOnBackEdge = candidate->element;
  839. StackSym *objPtrSym = propertySym->m_stackSym;
  840. Sym * objPtrCopyPropSym = nullptr;
  841. if (!loop->landingPad->globOptData.IsLive(objPtrSym))
  842. {
  843. if (PHASE_OFF(Js::MakeObjSymLiveInLandingPadPhase, this->globOpt->func))
  844. {
  845. return false;
  846. }
  847. if (objPtrSym->IsSingleDef())
  848. {
  849. // We can still try to do PRE if the object sym is single def, even if its not live in the landing pad.
  850. // We'll have to add a def instruction for the object sym in the landing pad, and then we can continue
  851. // pre-loading the current PRE candidate.
  852. // Case in point:
  853. // $L1
  854. // value|symStore
  855. // t1 = o.x (v1|t3)
  856. // t2 = t1.y (v2|t4) <-- t1 is not live in the loop landing pad
  857. // jmp $L1
  858. if (!InsertSymDefinitionInLandingPad(objPtrSym, loop, &objPtrCopyPropSym))
  859. {
  860. #if DBG_DUMP
  861. TraceFailedPreloadInLandingPad(loop, propertySym, _u("Failed to insert load of object sym in landing pad"));
  862. #endif
  863. return false;
  864. }
  865. }
  866. else
  867. {
  868. #if DBG_DUMP
  869. TraceFailedPreloadInLandingPad(loop, propertySym, _u("Object sym not live in landing pad and not single-def"));
  870. #endif
  871. return false;
  872. }
  873. }
  874. Assert(loop->landingPad->globOptData.IsLive(objPtrSym));
  875. BasicBlock *landingPad = loop->landingPad;
  876. Sym *symStore = propSymValueOnBackEdge->GetValueInfo()->GetSymStore();
  877. // The symStore can't be live into the loop
  878. // The symStore needs to still have the same value
  879. Assert(symStore && symStore->IsStackSym());
  880. if (loop->landingPad->globOptData.IsLive(symStore))
  881. {
  882. // May have already been hoisted:
  883. // o.x = t1;
  884. // o.y = t1;
  885. return false;
  886. }
  887. Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
  888. // Value should be added as initial value or already be there.
  889. Assert(landingPadValue);
  890. IR::Instr * ldInstrInLoop = this->globOpt->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  891. Assert(ldInstrInLoop);
  892. Assert(ldInstrInLoop->GetDst() == nullptr);
  893. // Create instr to put in landing pad for compensation
  894. Assert(IsPREInstrCandidateLoad(ldInstrInLoop->m_opcode));
  895. IR::Instr * ldInstr = InsertPropertySymPreloadInLandingPad(ldInstrInLoop, loop, propertySym);
  896. if (!ldInstr)
  897. {
  898. return false;
  899. }
  900. Assert(ldInstr->GetDst() == nullptr);
  901. ldInstr->SetDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->globOpt->func));
  902. loop->fieldPRESymStores->Set(symStore->m_id);
  903. landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
  904. Value * objPtrValue = landingPad->globOptData.FindValue(objPtrSym);
  905. objPtrCopyPropSym = objPtrCopyPropSym ? objPtrCopyPropSym : objPtrValue ? landingPad->globOptData.GetCopyPropSym(objPtrSym, objPtrValue) : nullptr;
  906. if (objPtrCopyPropSym)
  907. {
  908. // If we inserted T4 = T1.y, and T3 is the copy prop sym for T1 in the landing pad, we need T3.y
  909. // to be live on back edges to have the merge produce a value for T3.y. Having a value for T1.y
  910. // produced from the merge is not enough as the T1.y in the loop will get obj-ptr-copy-propped to
  911. // T3.y
  912. // T3.y
  913. PropertySym *newPropSym = PropertySym::FindOrCreate(
  914. objPtrCopyPropSym->m_id, propertySym->m_propertyId, propertySym->GetPropertyIdIndex(), propertySym->GetInlineCacheIndex(), propertySym->m_fieldKind, this->globOpt->func);
  915. if (!landingPad->globOptData.FindValue(newPropSym))
  916. {
  917. landingPad->globOptData.SetValue(landingPadValue, newPropSym);
  918. landingPad->globOptData.liveFields->Set(newPropSym->m_id);
  919. MakePropertySymLiveOnBackEdges(newPropSym, loop, propSymValueOnBackEdge);
  920. }
  921. }
  922. ValueType valueType(ValueType::Uninitialized);
  923. Value *initialValue = nullptr;
  924. if (loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue))
  925. {
  926. if (ldInstr->IsProfiledInstr())
  927. {
  928. if (initialValue->GetValueNumber() == propSymValueOnBackEdge->GetValueNumber())
  929. {
  930. if (propSymValueOnBackEdge->GetValueInfo()->IsUninitialized())
  931. {
  932. valueType = ldInstr->AsProfiledInstr()->u.FldInfo().valueType;
  933. }
  934. else
  935. {
  936. valueType = propSymValueOnBackEdge->GetValueInfo()->Type();
  937. }
  938. }
  939. else
  940. {
  941. valueType = ValueType::Uninitialized;
  942. }
  943. ldInstr->AsProfiledInstr()->u.FldInfo().valueType = valueType;
  944. }
  945. }
  946. else
  947. {
  948. valueType = landingPadValue->GetValueInfo()->Type();
  949. }
  950. loop->symsUsedBeforeDefined->Set(symStore->m_id);
  951. if (valueType.IsLikelyNumber())
  952. {
  953. loop->likelyNumberSymsUsedBeforeDefined->Set(symStore->m_id);
  954. if (globOpt->DoAggressiveIntTypeSpec() ? valueType.IsLikelyInt() : valueType.IsInt())
  955. {
  956. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  957. // specialization is enabled
  958. loop->likelyIntSymsUsedBeforeDefined->Set(symStore->m_id);
  959. }
  960. }
  961. #if DBG_DUMP
  962. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->globOpt->func->GetSourceContextId(), this->globOpt->func->GetLocalFunctionId()))
  963. {
  964. Output::Print(_u("** TRACE: Field PRE: field pre-loaded in landing pad of loop head #%-3d: "), loop->GetHeadBlock()->GetBlockNum());
  965. ldInstr->Dump();
  966. Output::Print(_u("\n"));
  967. Output::Flush();
  968. }
  969. #endif
  970. return true;
  971. }
  972. void GlobOpt::PRE::PreloadPRECandidates(Loop *loop)
  973. {
  974. // Insert loads in landing pad for field PRE candidates. Iterate while(changed)
  975. // for the o.x.y cases.
  976. BOOL changed = true;
  977. if (!candidates || !candidates->candidatesList)
  978. {
  979. return;
  980. }
  981. Assert(loop->landingPad->GetFirstInstr() == loop->landingPad->GetLastInstr());
  982. while (changed)
  983. {
  984. changed = false;
  985. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates->candidatesList, iter)
  986. {
  987. if (this->PreloadPRECandidate(loop, candidate))
  988. {
  989. changed = true;
  990. iter.RemoveCurrent();
  991. }
  992. if (PHASE_TRACE(Js::FieldPREPhase, this->globOpt->func))
  993. {
  994. Output::Print(_u("============================\n"));
  995. Output::Flush();
  996. }
  997. } NEXT_SLIST_ENTRY_EDITING;
  998. }
  999. }
  1000. void GlobOpt::FieldPRE(Loop *loop)
  1001. {
  1002. if (!DoFieldPRE(loop))
  1003. {
  1004. return;
  1005. }
  1006. GlobOpt::PRE pre(this);
  1007. pre.FieldPRE(loop);
  1008. }
  1009. void GlobOpt::InsertValueCompensation(
  1010. BasicBlock *const predecessor,
  1011. const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap)
  1012. {
  1013. Assert(predecessor);
  1014. Assert(symsRequiringCompensationToMergedValueInfoMap.Count() != 0);
  1015. IR::Instr *insertBeforeInstr = predecessor->GetLastInstr();
  1016. Func *const func = insertBeforeInstr->m_func;
  1017. bool setLastInstrInPredecessor;
  1018. if(insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  1019. {
  1020. // Don't insert code between the branch and the corresponding ByteCodeUses instructions
  1021. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  1022. {
  1023. insertBeforeInstr = insertBeforeInstr->m_prev;
  1024. }
  1025. setLastInstrInPredecessor = false;
  1026. }
  1027. else
  1028. {
  1029. // Insert at the end of the block and set the last instruction
  1030. Assert(insertBeforeInstr->m_next);
  1031. insertBeforeInstr = insertBeforeInstr->m_next; // Instruction after the last instruction in the predecessor
  1032. setLastInstrInPredecessor = true;
  1033. }
  1034. GlobOptBlockData &predecessorBlockData = predecessor->globOptData;
  1035. GlobOptBlockData &successorBlockData = *CurrentBlockData();
  1036. struct DelayChangeValueInfo
  1037. {
  1038. Value* predecessorValue;
  1039. ArrayValueInfo* valueInfo;
  1040. void ChangeValueInfo(BasicBlock* predecessor, GlobOpt* g)
  1041. {
  1042. g->ChangeValueInfo(
  1043. predecessor,
  1044. predecessorValue,
  1045. valueInfo,
  1046. false /*allowIncompatibleType*/,
  1047. true /*compensated*/);
  1048. }
  1049. };
  1050. JsUtil::List<DelayChangeValueInfo, ArenaAllocator> delayChangeValueInfo(alloc);
  1051. for(auto it = symsRequiringCompensationToMergedValueInfoMap.GetIterator(); it.IsValid(); it.MoveNext())
  1052. {
  1053. const auto &entry = it.Current();
  1054. Sym *const sym = entry.Key();
  1055. Value *const predecessorValue = predecessorBlockData.FindValue(sym);
  1056. Assert(predecessorValue);
  1057. ValueInfo *const predecessorValueInfo = predecessorValue->GetValueInfo();
  1058. // Currently, array value infos are the only ones that require compensation based on values
  1059. Assert(predecessorValueInfo->IsAnyOptimizedArray());
  1060. const ArrayValueInfo *const predecessorArrayValueInfo = predecessorValueInfo->AsArrayValueInfo();
  1061. StackSym *const predecessorHeadSegmentSym = predecessorArrayValueInfo->HeadSegmentSym();
  1062. StackSym *const predecessorHeadSegmentLengthSym = predecessorArrayValueInfo->HeadSegmentLengthSym();
  1063. StackSym *const predecessorLengthSym = predecessorArrayValueInfo->LengthSym();
  1064. ValueInfo *const mergedValueInfo = entry.Value();
  1065. const ArrayValueInfo *const mergedArrayValueInfo = mergedValueInfo->AsArrayValueInfo();
  1066. StackSym *const mergedHeadSegmentSym = mergedArrayValueInfo->HeadSegmentSym();
  1067. StackSym *const mergedHeadSegmentLengthSym = mergedArrayValueInfo->HeadSegmentLengthSym();
  1068. StackSym *const mergedLengthSym = mergedArrayValueInfo->LengthSym();
  1069. Assert(!mergedHeadSegmentSym || predecessorHeadSegmentSym);
  1070. Assert(!mergedHeadSegmentLengthSym || predecessorHeadSegmentLengthSym);
  1071. Assert(!mergedLengthSym || predecessorLengthSym);
  1072. bool compensated = false;
  1073. if(mergedHeadSegmentSym && predecessorHeadSegmentSym != mergedHeadSegmentSym)
  1074. {
  1075. IR::Instr *const newInstr =
  1076. IR::Instr::New(
  1077. Js::OpCode::Ld_A,
  1078. IR::RegOpnd::New(mergedHeadSegmentSym, mergedHeadSegmentSym->GetType(), func),
  1079. IR::RegOpnd::New(predecessorHeadSegmentSym, predecessorHeadSegmentSym->GetType(), func),
  1080. func);
  1081. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1082. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1083. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1084. insertBeforeInstr->InsertBefore(newInstr);
  1085. compensated = true;
  1086. }
  1087. if(mergedHeadSegmentLengthSym && predecessorHeadSegmentLengthSym != mergedHeadSegmentLengthSym)
  1088. {
  1089. IR::Instr *const newInstr =
  1090. IR::Instr::New(
  1091. Js::OpCode::Ld_I4,
  1092. IR::RegOpnd::New(mergedHeadSegmentLengthSym, mergedHeadSegmentLengthSym->GetType(), func),
  1093. IR::RegOpnd::New(predecessorHeadSegmentLengthSym, predecessorHeadSegmentLengthSym->GetType(), func),
  1094. func);
  1095. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1096. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1097. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1098. insertBeforeInstr->InsertBefore(newInstr);
  1099. compensated = true;
  1100. // Merge the head segment length value
  1101. Assert(predecessorBlockData.liveVarSyms->Test(predecessorHeadSegmentLengthSym->m_id));
  1102. predecessorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  1103. successorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  1104. Value *const predecessorHeadSegmentLengthValue =
  1105. predecessorBlockData.FindValue(predecessorHeadSegmentLengthSym);
  1106. Assert(predecessorHeadSegmentLengthValue);
  1107. predecessorBlockData.SetValue(predecessorHeadSegmentLengthValue, mergedHeadSegmentLengthSym);
  1108. Value *const mergedHeadSegmentLengthValue = successorBlockData.FindValue(mergedHeadSegmentLengthSym);
  1109. if(mergedHeadSegmentLengthValue)
  1110. {
  1111. Assert(mergedHeadSegmentLengthValue->GetValueNumber() != predecessorHeadSegmentLengthValue->GetValueNumber());
  1112. if(predecessorHeadSegmentLengthValue->GetValueInfo() != mergedHeadSegmentLengthValue->GetValueInfo())
  1113. {
  1114. mergedHeadSegmentLengthValue->SetValueInfo(
  1115. ValueInfo::MergeLikelyIntValueInfo(
  1116. this->alloc,
  1117. mergedHeadSegmentLengthValue,
  1118. predecessorHeadSegmentLengthValue,
  1119. mergedHeadSegmentLengthValue->GetValueInfo()->Type()
  1120. .Merge(predecessorHeadSegmentLengthValue->GetValueInfo()->Type())));
  1121. }
  1122. }
  1123. else
  1124. {
  1125. successorBlockData.SetValue(CopyValue(predecessorHeadSegmentLengthValue), mergedHeadSegmentLengthSym);
  1126. }
  1127. }
  1128. if(mergedLengthSym && predecessorLengthSym != mergedLengthSym)
  1129. {
  1130. IR::Instr *const newInstr =
  1131. IR::Instr::New(
  1132. Js::OpCode::Ld_I4,
  1133. IR::RegOpnd::New(mergedLengthSym, mergedLengthSym->GetType(), func),
  1134. IR::RegOpnd::New(predecessorLengthSym, predecessorLengthSym->GetType(), func),
  1135. func);
  1136. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1137. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1138. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1139. insertBeforeInstr->InsertBefore(newInstr);
  1140. compensated = true;
  1141. // Merge the length value
  1142. Assert(predecessorBlockData.liveVarSyms->Test(predecessorLengthSym->m_id));
  1143. predecessorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  1144. successorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  1145. Value *const predecessorLengthValue = predecessorBlockData.FindValue(predecessorLengthSym);
  1146. Assert(predecessorLengthValue);
  1147. predecessorBlockData.SetValue(predecessorLengthValue, mergedLengthSym);
  1148. Value *const mergedLengthValue = successorBlockData.FindValue(mergedLengthSym);
  1149. if(mergedLengthValue)
  1150. {
  1151. Assert(mergedLengthValue->GetValueNumber() != predecessorLengthValue->GetValueNumber());
  1152. if(predecessorLengthValue->GetValueInfo() != mergedLengthValue->GetValueInfo())
  1153. {
  1154. mergedLengthValue->SetValueInfo(
  1155. ValueInfo::MergeLikelyIntValueInfo(
  1156. this->alloc,
  1157. mergedLengthValue,
  1158. predecessorLengthValue,
  1159. mergedLengthValue->GetValueInfo()->Type().Merge(predecessorLengthValue->GetValueInfo()->Type())));
  1160. }
  1161. }
  1162. else
  1163. {
  1164. successorBlockData.SetValue(CopyValue(predecessorLengthValue), mergedLengthSym);
  1165. }
  1166. }
  1167. if(compensated)
  1168. {
  1169. // Save the new ValueInfo for later.
  1170. // We don't want other symbols needing compensation to see this new one
  1171. delayChangeValueInfo.Add({
  1172. predecessorValue,
  1173. ArrayValueInfo::New(
  1174. alloc,
  1175. predecessorValueInfo->Type(),
  1176. mergedHeadSegmentSym ? mergedHeadSegmentSym : predecessorHeadSegmentSym,
  1177. mergedHeadSegmentLengthSym ? mergedHeadSegmentLengthSym : predecessorHeadSegmentLengthSym,
  1178. mergedLengthSym ? mergedLengthSym : predecessorLengthSym,
  1179. predecessorValueInfo->GetSymStore())
  1180. });
  1181. }
  1182. }
  1183. // Once we've compensated all the symbols, update the new ValueInfo.
  1184. delayChangeValueInfo.Map([predecessor, this](int, DelayChangeValueInfo d) { d.ChangeValueInfo(predecessor, this); });
  1185. if(setLastInstrInPredecessor)
  1186. {
  1187. predecessor->SetLastInstr(insertBeforeInstr->m_prev);
  1188. }
  1189. }
  1190. bool
  1191. GlobOpt::AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const
  1192. {
  1193. Assert(this->func->m_symTable->FindStackSym(src1->m_sym->m_id) == src1->m_sym);
  1194. Assert(this->func->m_symTable->FindStackSym(dst->m_sym->m_id) == dst->m_sym);
  1195. if (dst->m_sym->HasByteCodeRegSlot() && src1->m_sym->HasByteCodeRegSlot())
  1196. {
  1197. return src1->m_sym->GetByteCodeFunc() == dst->m_sym->GetByteCodeFunc();
  1198. }
  1199. return false;
  1200. }
  1201. /*
  1202. * This is for scope object removal along with Heap Arguments optimization.
  1203. * We track several instructions to facilitate the removal of scope object.
  1204. * - LdSlotArr - This instr is tracked to keep track of the formals array (the dest)
  1205. * - InlineeStart - To keep track of the stack syms for the formals of the inlinee.
  1206. */
  1207. void
  1208. GlobOpt::TrackInstrsForScopeObjectRemoval(IR::Instr * instr)
  1209. {
  1210. IR::Opnd* dst = instr->GetDst();
  1211. IR::Opnd* src1 = instr->GetSrc1();
  1212. if (instr->m_opcode == Js::OpCode::Ld_A && src1->IsRegOpnd())
  1213. {
  1214. AssertMsg(!instr->m_func->IsStackArgsEnabled() || !src1->IsScopeObjOpnd(instr->m_func), "There can be no aliasing for scope object.");
  1215. }
  1216. // The following is to track formals array for Stack Arguments optimization with Formals
  1217. if (instr->m_func->IsStackArgsEnabled() && !this->IsLoopPrePass())
  1218. {
  1219. if (instr->m_opcode == Js::OpCode::LdSlotArr)
  1220. {
  1221. if (instr->GetSrc1()->IsScopeObjOpnd(instr->m_func))
  1222. {
  1223. AssertMsg(!instr->m_func->GetJITFunctionBody()->HasImplicitArgIns(), "No mapping is required in this case. So it should already be generating ArgIns.");
  1224. instr->m_func->TrackFormalsArraySym(dst->GetStackSym()->m_id);
  1225. }
  1226. }
  1227. else if (instr->m_opcode == Js::OpCode::InlineeStart)
  1228. {
  1229. Assert(instr->m_func->IsInlined());
  1230. Js::ArgSlot actualsCount = instr->m_func->actualCount - 1;
  1231. Js::ArgSlot formalsCount = instr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  1232. Func * func = instr->m_func;
  1233. Func * inlinerFunc = func->GetParentFunc(); //Inliner's func
  1234. IR::Instr * argOutInstr = instr->GetSrc2()->GetStackSym()->GetInstrDef();
  1235. //The argout immediately before the InlineeStart will be the ArgOut for NewScObject
  1236. //So we don't want to track the stack sym for this argout.- Skipping it here.
  1237. if (instr->m_func->IsInlinedConstructor())
  1238. {
  1239. //PRE might introduce a second defintion for the Src1. So assert for the opcode only when it has single definition.
  1240. Assert(argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef() == nullptr ||
  1241. argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::NewScObjectNoCtor);
  1242. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1243. }
  1244. if (formalsCount < actualsCount)
  1245. {
  1246. Js::ArgSlot extraActuals = actualsCount - formalsCount;
  1247. //Skipping extra actuals passed
  1248. for (Js::ArgSlot i = 0; i < extraActuals; i++)
  1249. {
  1250. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1251. }
  1252. }
  1253. StackSym * undefinedSym = nullptr;
  1254. for (Js::ArgSlot param = formalsCount; param > 0; param--)
  1255. {
  1256. StackSym * argOutSym = nullptr;
  1257. if (argOutInstr->GetSrc1())
  1258. {
  1259. if (argOutInstr->GetSrc1()->IsRegOpnd())
  1260. {
  1261. argOutSym = argOutInstr->GetSrc1()->GetStackSym();
  1262. }
  1263. else
  1264. {
  1265. // We will always have ArgOut instr - so the source operand will not be removed.
  1266. argOutSym = StackSym::New(inlinerFunc);
  1267. IR::Opnd * srcOpnd = argOutInstr->GetSrc1();
  1268. IR::Opnd * dstOpnd = IR::RegOpnd::New(argOutSym, TyVar, inlinerFunc);
  1269. IR::Instr * assignInstr = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  1270. instr->InsertBefore(assignInstr);
  1271. }
  1272. }
  1273. Assert(!func->HasStackSymForFormal(param - 1));
  1274. if (param <= actualsCount)
  1275. {
  1276. Assert(argOutSym);
  1277. func->TrackStackSymForFormalIndex(param - 1, argOutSym);
  1278. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1279. }
  1280. else
  1281. {
  1282. /*When param is out of range of actuals count, load undefined*/
  1283. // TODO: saravind: This will insert undefined for each of the param not having an actual. - Clean up this by having a sym for undefined on func ?
  1284. Assert(formalsCount > actualsCount);
  1285. if (undefinedSym == nullptr)
  1286. {
  1287. undefinedSym = StackSym::New(inlinerFunc);
  1288. IR::Opnd * srcOpnd = IR::AddrOpnd::New(inlinerFunc->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicMisc, inlinerFunc);
  1289. IR::Opnd * dstOpnd = IR::RegOpnd::New(undefinedSym, TyVar, inlinerFunc);
  1290. IR::Instr * assignUndefined = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  1291. instr->InsertBefore(assignUndefined);
  1292. }
  1293. func->TrackStackSymForFormalIndex(param - 1, undefinedSym);
  1294. }
  1295. }
  1296. }
  1297. }
  1298. }
  1299. void
  1300. GlobOpt::OptArguments(IR::Instr *instr)
  1301. {
  1302. IR::Opnd* dst = instr->GetDst();
  1303. IR::Opnd* src1 = instr->GetSrc1();
  1304. IR::Opnd* src2 = instr->GetSrc2();
  1305. TrackInstrsForScopeObjectRemoval(instr);
  1306. if (!TrackArgumentsObject())
  1307. {
  1308. return;
  1309. }
  1310. if (instr->HasAnyLoadHeapArgsOpCode())
  1311. {
  1312. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1313. if (instr->m_func->IsStackArgsEnabled())
  1314. {
  1315. if (instr->GetSrc1()->IsRegOpnd() && instr->m_func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1316. {
  1317. StackSym * scopeObjSym = instr->GetSrc1()->GetStackSym();
  1318. Assert(scopeObjSym);
  1319. Assert(scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::InitCachedScope || scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::NewScopeObject);
  1320. Assert(instr->m_func->GetScopeObjSym() == scopeObjSym);
  1321. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  1322. {
  1323. Output::Print(_u("StackArgFormals : %s (%d) :Setting scopeObjSym in forward pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetJITFunctionBody()->GetFunctionNumber());
  1324. Output::Flush();
  1325. }
  1326. }
  1327. }
  1328. #endif
  1329. if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
  1330. {
  1331. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1332. }
  1333. else
  1334. {
  1335. CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
  1336. }
  1337. return;
  1338. }
  1339. // Keep track of arguments objects and its aliases
  1340. // LdHeapArguments loads the arguments object and Ld_A tracks the aliases.
  1341. if ((instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::BytecodeArgOutCapture) && (src1->IsRegOpnd() && CurrentBlockData()->IsArgumentsOpnd(src1)))
  1342. {
  1343. // In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
  1344. if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
  1345. {
  1346. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1347. return;
  1348. }
  1349. if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
  1350. {
  1351. CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
  1352. }
  1353. return;
  1354. }
  1355. if (!CurrentBlockData()->TestAnyArgumentsSym())
  1356. {
  1357. // There are no syms to track yet, don't start tracking arguments sym.
  1358. return;
  1359. }
  1360. // Avoid loop prepass
  1361. if (this->currentBlock->loop && this->IsLoopPrePass())
  1362. {
  1363. return;
  1364. }
  1365. SymID id = 0;
  1366. switch(instr->m_opcode)
  1367. {
  1368. case Js::OpCode::LdElemI_A:
  1369. case Js::OpCode::TypeofElem:
  1370. {
  1371. Assert(src1->IsIndirOpnd());
  1372. IR::RegOpnd *indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  1373. if (indexOpnd && CurrentBlockData()->IsArgumentsSymID(indexOpnd->m_sym->m_id))
  1374. {
  1375. // Pathological test cases such as a[arguments]
  1376. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1377. return;
  1378. }
  1379. IR::RegOpnd *baseOpnd = src1->AsIndirOpnd()->GetBaseOpnd();
  1380. id = baseOpnd->m_sym->m_id;
  1381. if (CurrentBlockData()->IsArgumentsSymID(id))
  1382. {
  1383. instr->usesStackArgumentsObject = true;
  1384. }
  1385. break;
  1386. }
  1387. case Js::OpCode::LdLen_A:
  1388. {
  1389. Assert(src1->IsRegOpnd());
  1390. if(CurrentBlockData()->IsArgumentsOpnd(src1))
  1391. {
  1392. instr->usesStackArgumentsObject = true;
  1393. }
  1394. break;
  1395. }
  1396. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  1397. {
  1398. if (CurrentBlockData()->IsArgumentsOpnd(src1))
  1399. {
  1400. instr->usesStackArgumentsObject = true;
  1401. instr->m_func->unoptimizableArgumentsObjReference++;
  1402. }
  1403. if (CurrentBlockData()->IsArgumentsOpnd(src1) &&
  1404. src1->AsRegOpnd()->m_sym->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture)
  1405. {
  1406. // Apply inlining results in such usage - this is to ignore this sym that is def'd by ByteCodeArgOutCapture
  1407. // It's needed because we do not have block level merging of arguments object and this def due to inlining can turn off stack args opt.
  1408. IR::Instr* builtinStart = instr->GetNextRealInstr();
  1409. if (builtinStart->m_opcode == Js::OpCode::InlineBuiltInStart)
  1410. {
  1411. IR::Opnd* builtinOpnd = builtinStart->GetSrc1();
  1412. if (builtinStart->GetSrc1()->IsAddrOpnd())
  1413. {
  1414. Assert(builtinOpnd->AsAddrOpnd()->m_isFunction);
  1415. Js::BuiltinFunction builtinFunction = Js::JavascriptLibrary::GetBuiltInForFuncInfo(((FixedFieldInfo*)builtinOpnd->AsAddrOpnd()->m_metadata)->GetLocalFuncId());
  1416. if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
  1417. {
  1418. CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
  1419. instr->m_func->unoptimizableArgumentsObjReference--;
  1420. }
  1421. }
  1422. else if (builtinOpnd->IsRegOpnd())
  1423. {
  1424. if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
  1425. {
  1426. CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
  1427. instr->m_func->unoptimizableArgumentsObjReference--;
  1428. }
  1429. }
  1430. }
  1431. }
  1432. break;
  1433. }
  1434. case Js::OpCode::BailOnNotStackArgs:
  1435. case Js::OpCode::ArgOut_A_FromStackArgs:
  1436. case Js::OpCode::BytecodeArgOutUse:
  1437. {
  1438. if (src1 && CurrentBlockData()->IsArgumentsOpnd(src1))
  1439. {
  1440. instr->usesStackArgumentsObject = true;
  1441. }
  1442. break;
  1443. }
  1444. default:
  1445. {
  1446. // Super conservative here, if we see the arguments or any of its alias being used in any
  1447. // other opcode just don't do this optimization. Revisit this to optimize further if we see any common
  1448. // case is missed.
  1449. if (src1)
  1450. {
  1451. if (src1->IsRegOpnd() || src1->IsSymOpnd() || src1->IsIndirOpnd())
  1452. {
  1453. if (CurrentBlockData()->IsArgumentsOpnd(src1))
  1454. {
  1455. #ifdef PERF_HINT
  1456. if (PHASE_TRACE1(Js::PerfHintPhase))
  1457. {
  1458. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  1459. }
  1460. #endif
  1461. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1462. return;
  1463. }
  1464. }
  1465. }
  1466. if (src2)
  1467. {
  1468. if (src2->IsRegOpnd() || src2->IsSymOpnd() || src2->IsIndirOpnd())
  1469. {
  1470. if (CurrentBlockData()->IsArgumentsOpnd(src2))
  1471. {
  1472. #ifdef PERF_HINT
  1473. if (PHASE_TRACE1(Js::PerfHintPhase))
  1474. {
  1475. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  1476. }
  1477. #endif
  1478. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1479. return;
  1480. }
  1481. }
  1482. }
  1483. // We should look at dst last to correctly handle cases where it's the same as one of the src operands.
  1484. if (dst)
  1485. {
  1486. if (dst->IsIndirOpnd() || dst->IsSymOpnd())
  1487. {
  1488. if (CurrentBlockData()->IsArgumentsOpnd(dst))
  1489. {
  1490. #ifdef PERF_HINT
  1491. if (PHASE_TRACE1(Js::PerfHintPhase))
  1492. {
  1493. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  1494. }
  1495. #endif
  1496. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1497. return;
  1498. }
  1499. }
  1500. else if (dst->IsRegOpnd())
  1501. {
  1502. if (this->currentBlock->loop && CurrentBlockData()->IsArgumentsOpnd(dst))
  1503. {
  1504. #ifdef PERF_HINT
  1505. if (PHASE_TRACE1(Js::PerfHintPhase))
  1506. {
  1507. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  1508. }
  1509. #endif
  1510. CannotAllocateArgumentsObjectOnStack(instr->m_func);
  1511. return;
  1512. }
  1513. CurrentBlockData()->ClearArgumentsSym(dst->AsRegOpnd());
  1514. }
  1515. }
  1516. }
  1517. break;
  1518. }
  1519. return;
  1520. }
  1521. void
  1522. GlobOpt::MarkArgumentsUsedForBranch(IR::Instr * instr)
  1523. {
  1524. // If it's a conditional branch instruction and the operand used for branching is one of the arguments
  1525. // to the function, tag the m_argUsedForBranch of the functionBody so that it can be used later for inlining decisions.
  1526. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsUnconditional())
  1527. {
  1528. IR::BranchInstr * bInstr = instr->AsBranchInstr();
  1529. IR::Opnd *src1 = bInstr->GetSrc1();
  1530. IR::Opnd *src2 = bInstr->GetSrc2();
  1531. // These are used because we don't want to rely on src1 or src2 to always be the register/constant
  1532. IR::RegOpnd *regOpnd = nullptr;
  1533. if (!src2 && (instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A) && src1->IsRegOpnd())
  1534. {
  1535. regOpnd = src1->AsRegOpnd();
  1536. }
  1537. // We need to check for (0===arg) and (arg===0); this is especially important since some minifiers
  1538. // change all instances of one to the other.
  1539. else if (src2 && src2->IsConstOpnd() && src1->IsRegOpnd())
  1540. {
  1541. regOpnd = src1->AsRegOpnd();
  1542. }
  1543. else if (src2 && src2->IsRegOpnd() && src1->IsConstOpnd())
  1544. {
  1545. regOpnd = src2->AsRegOpnd();
  1546. }
  1547. if (regOpnd != nullptr)
  1548. {
  1549. if (regOpnd->m_sym->IsSingleDef())
  1550. {
  1551. IR::Instr * defInst = regOpnd->m_sym->GetInstrDef();
  1552. IR::Opnd *defSym = defInst->GetSrc1();
  1553. if (defSym && defSym->IsSymOpnd() && defSym->AsSymOpnd()->m_sym->IsStackSym()
  1554. && defSym->AsSymOpnd()->m_sym->AsStackSym()->IsParamSlotSym())
  1555. {
  1556. uint16 param = defSym->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  1557. // We only support functions with 13 arguments to ensure optimal size of callSiteInfo
  1558. if (param < Js::Constants::MaximumArgumentCountForConstantArgumentInlining)
  1559. {
  1560. this->func->GetJITOutput()->SetArgUsedForBranch((uint8)param);
  1561. }
  1562. }
  1563. }
  1564. }
  1565. }
  1566. }
  1567. const InductionVariable*
  1568. GlobOpt::GetInductionVariable(SymID sym, Loop *loop)
  1569. {
  1570. if (loop->inductionVariables)
  1571. {
  1572. for (auto it = loop->inductionVariables->GetIterator(); it.IsValid(); it.MoveNext())
  1573. {
  1574. InductionVariable* iv = &it.CurrentValueReference();
  1575. if (!iv->IsChangeDeterminate() || !iv->IsChangeUnidirectional())
  1576. {
  1577. continue;
  1578. }
  1579. if (iv->Sym()->m_id == sym)
  1580. {
  1581. return iv;
  1582. }
  1583. }
  1584. }
  1585. return nullptr;
  1586. }
  1587. bool
  1588. GlobOpt::IsSymIDInductionVariable(SymID sym, Loop *loop)
  1589. {
  1590. return GetInductionVariable(sym, loop) != nullptr;
  1591. }
  1592. SymID
  1593. GlobOpt::GetVarSymID(StackSym *sym)
  1594. {
  1595. if (sym && sym->m_type != TyVar)
  1596. {
  1597. sym = sym->GetVarEquivSym(nullptr);
  1598. }
  1599. if (!sym)
  1600. {
  1601. return Js::Constants::InvalidSymID;
  1602. }
  1603. return sym->m_id;
  1604. }
  1605. bool
  1606. GlobOpt::IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd)
  1607. {
  1608. Assert(instr);
  1609. if (!baseOpnd || !indexOpnd)
  1610. {
  1611. return false;
  1612. }
  1613. Loop* loop = this->currentBlock->loop;
  1614. const ValueType baseValueType(baseOpnd->GetValueType());
  1615. const ValueType indexValueType(indexOpnd->GetValueType());
  1616. // Validate the array and index types
  1617. if (
  1618. !indexValueType.IsInt() ||
  1619. !(
  1620. baseValueType.IsTypedIntOrFloatArray() ||
  1621. baseValueType.IsArray()
  1622. )
  1623. )
  1624. {
  1625. #if DBG_DUMP
  1626. wchar indexValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  1627. indexValueType.ToString(indexValueTypeStr);
  1628. wchar baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  1629. baseValueType.ToString(baseValueTypeStr);
  1630. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index[%s] or Array[%s] value type is invalid"), indexValueTypeStr, baseValueTypeStr);
  1631. #endif
  1632. return false;
  1633. }
  1634. // The following is conservative and works around a bug in induction variable analysis.
  1635. if (baseOpnd->IsArrayRegOpnd())
  1636. {
  1637. IR::ArrayRegOpnd *baseArrayOp = baseOpnd->AsArrayRegOpnd();
  1638. bool hasBoundChecksRemoved = (
  1639. baseArrayOp->EliminatedLowerBoundCheck() &&
  1640. baseArrayOp->EliminatedUpperBoundCheck() &&
  1641. !instr->extractedUpperBoundCheckWithoutHoisting &&
  1642. !instr->loadedArrayHeadSegment &&
  1643. !instr->loadedArrayHeadSegmentLength
  1644. );
  1645. if (!hasBoundChecksRemoved)
  1646. {
  1647. TRACE_MEMOP_VERBOSE(loop, instr, _u("Missing bounds check optimization"));
  1648. return false;
  1649. }
  1650. }
  1651. if (!baseValueType.IsTypedArray())
  1652. {
  1653. // Check if the instr can kill the value type of the array
  1654. JsArrayKills arrayKills = CheckJsArrayKills(instr);
  1655. if (arrayKills.KillsValueType(baseValueType))
  1656. {
  1657. TRACE_MEMOP_VERBOSE(loop, instr, _u("The array (s%d) can lose its value type"), GetVarSymID(baseOpnd->GetStackSym()));
  1658. return false;
  1659. }
  1660. }
  1661. // Process the Index Operand
  1662. if (!this->OptIsInvariant(baseOpnd, this->currentBlock, loop, CurrentBlockData()->FindValue(baseOpnd->m_sym), false, true))
  1663. {
  1664. TRACE_MEMOP_VERBOSE(loop, instr, _u("Base (s%d) is not invariant"), GetVarSymID(baseOpnd->GetStackSym()));
  1665. return false;
  1666. }
  1667. // Validate the index
  1668. Assert(indexOpnd->GetStackSym());
  1669. SymID indexSymID = GetVarSymID(indexOpnd->GetStackSym());
  1670. const InductionVariable* iv = GetInductionVariable(indexSymID, loop);
  1671. if (!iv)
  1672. {
  1673. // If the index is not an induction variable return
  1674. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index (s%d) is not an induction variable"), indexSymID);
  1675. return false;
  1676. }
  1677. Assert(iv->IsChangeDeterminate() && iv->IsChangeUnidirectional());
  1678. const IntConstantBounds & bounds = iv->ChangeBounds();
  1679. if (loop->memOpInfo)
  1680. {
  1681. // Only accept induction variables that increments by 1
  1682. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  1683. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(indexSymID, inductionVariableChangeInfo);
  1684. if (
  1685. (bounds.LowerBound() != 1 && bounds.LowerBound() != -1) ||
  1686. (bounds.UpperBound() != bounds.LowerBound()) ||
  1687. inductionVariableChangeInfo.unroll > 1 // Must be 0 (not seen yet) or 1 (already seen)
  1688. )
  1689. {
  1690. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index does not change by 1: %d><%d, unroll=%d"), bounds.LowerBound(), bounds.UpperBound(), inductionVariableChangeInfo.unroll);
  1691. return false;
  1692. }
  1693. // Check if the index is the same in all MemOp optimization in this loop
  1694. if (!loop->memOpInfo->candidates->Empty())
  1695. {
  1696. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  1697. // All MemOp operations within the same loop must use the same index
  1698. if (previousCandidate->index != indexSymID)
  1699. {
  1700. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index is not the same as other MemOp in the loop"));
  1701. return false;
  1702. }
  1703. }
  1704. }
  1705. return true;
  1706. }
  1707. bool
  1708. GlobOpt::CollectMemcopyLdElementI(IR::Instr *instr, Loop *loop)
  1709. {
  1710. Assert(instr->GetSrc1()->IsIndirOpnd());
  1711. IR::IndirOpnd *src1 = instr->GetSrc1()->AsIndirOpnd();
  1712. IR::Opnd *indexOpnd = src1->GetIndexOpnd();
  1713. IR::RegOpnd *baseOpnd = src1->GetBaseOpnd()->AsRegOpnd();
  1714. SymID baseSymID = GetVarSymID(baseOpnd->GetStackSym());
  1715. if (!IsAllowedForMemOpt(instr, false, baseOpnd, indexOpnd))
  1716. {
  1717. return false;
  1718. }
  1719. SymID inductionSymID = GetVarSymID(indexOpnd->GetStackSym());
  1720. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1721. loop->EnsureMemOpVariablesInitialized();
  1722. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1723. IR::Opnd * dst = instr->GetDst();
  1724. if (!dst->IsRegOpnd() || !dst->AsRegOpnd()->GetStackSym()->IsSingleDef())
  1725. {
  1726. return false;
  1727. }
  1728. Loop::MemCopyCandidate* memcopyInfo = memcopyInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemCopyCandidate);
  1729. memcopyInfo->ldBase = baseSymID;
  1730. memcopyInfo->ldCount = 1;
  1731. memcopyInfo->count = 0;
  1732. memcopyInfo->bIndexAlreadyChanged = isIndexPreIncr;
  1733. memcopyInfo->base = Js::Constants::InvalidSymID; //need to find the stElem first
  1734. memcopyInfo->index = inductionSymID;
  1735. memcopyInfo->transferSym = dst->AsRegOpnd()->GetStackSym();
  1736. loop->memOpInfo->candidates->Prepend(memcopyInfo);
  1737. return true;
  1738. }
  1739. bool
  1740. GlobOpt::CollectMemsetStElementI(IR::Instr *instr, Loop *loop)
  1741. {
  1742. Assert(instr->GetDst()->IsIndirOpnd());
  1743. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  1744. IR::Opnd *indexOp = dst->GetIndexOpnd();
  1745. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  1746. if (!IsAllowedForMemOpt(instr, true, baseOp, indexOp))
  1747. {
  1748. return false;
  1749. }
  1750. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  1751. IR::Opnd *srcDef = instr->GetSrc1();
  1752. StackSym *srcSym = nullptr;
  1753. if (srcDef->IsRegOpnd())
  1754. {
  1755. IR::RegOpnd* opnd = srcDef->AsRegOpnd();
  1756. if (this->OptIsInvariant(opnd, this->currentBlock, loop, CurrentBlockData()->FindValue(opnd->m_sym), true, true))
  1757. {
  1758. srcSym = opnd->GetStackSym();
  1759. }
  1760. }
  1761. BailoutConstantValue constant = {TyIllegal, 0};
  1762. if (srcDef->IsFloatConstOpnd())
  1763. {
  1764. constant.InitFloatConstValue(srcDef->AsFloatConstOpnd()->m_value);
  1765. }
  1766. else if (srcDef->IsIntConstOpnd())
  1767. {
  1768. constant.InitIntConstValue(srcDef->AsIntConstOpnd()->GetValue(), srcDef->AsIntConstOpnd()->GetType());
  1769. }
  1770. else if (srcDef->IsAddrOpnd())
  1771. {
  1772. constant.InitVarConstValue(srcDef->AsAddrOpnd()->m_address);
  1773. }
  1774. else if(!srcSym)
  1775. {
  1776. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Source is not an invariant"));
  1777. return false;
  1778. }
  1779. // Process the Index Operand
  1780. Assert(indexOp->GetStackSym());
  1781. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  1782. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1783. loop->EnsureMemOpVariablesInitialized();
  1784. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1785. Loop::MemSetCandidate* memsetInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemSetCandidate);
  1786. memsetInfo->base = baseSymID;
  1787. memsetInfo->index = inductionSymID;
  1788. memsetInfo->constant = constant;
  1789. memsetInfo->srcSym = srcSym;
  1790. memsetInfo->count = 1;
  1791. memsetInfo->bIndexAlreadyChanged = isIndexPreIncr;
  1792. loop->memOpInfo->candidates->Prepend(memsetInfo);
  1793. return true;
  1794. }
  1795. bool GlobOpt::CollectMemcopyStElementI(IR::Instr *instr, Loop *loop)
  1796. {
  1797. if (!loop->memOpInfo || loop->memOpInfo->candidates->Empty())
  1798. {
  1799. // There is no ldElem matching this stElem
  1800. return false;
  1801. }
  1802. Assert(instr->GetDst()->IsIndirOpnd());
  1803. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  1804. IR::Opnd *indexOp = dst->GetIndexOpnd();
  1805. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  1806. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  1807. if (!instr->GetSrc1()->IsRegOpnd())
  1808. {
  1809. return false;
  1810. }
  1811. IR::RegOpnd* src1 = instr->GetSrc1()->AsRegOpnd();
  1812. if (!src1->GetIsDead())
  1813. {
  1814. // This must be the last use of the register.
  1815. // It will invalidate `var m = a[i]; b[i] = m;` but this is not a very interesting case.
  1816. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Source (s%d) is still alive after StElemI"), baseSymID);
  1817. return false;
  1818. }
  1819. if (!IsAllowedForMemOpt(instr, false, baseOp, indexOp))
  1820. {
  1821. return false;
  1822. }
  1823. SymID srcSymID = GetVarSymID(src1->GetStackSym());
  1824. // Prepare the memcopyCandidate entry
  1825. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  1826. if (!previousCandidate->IsMemCopy())
  1827. {
  1828. return false;
  1829. }
  1830. Loop::MemCopyCandidate* memcopyInfo = previousCandidate->AsMemCopy();
  1831. // The previous candidate has to have been created by the matching ldElem
  1832. if (
  1833. memcopyInfo->base != Js::Constants::InvalidSymID ||
  1834. GetVarSymID(memcopyInfo->transferSym) != srcSymID
  1835. )
  1836. {
  1837. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("No matching LdElem found (s%d)"), baseSymID);
  1838. return false;
  1839. }
  1840. Assert(indexOp->GetStackSym());
  1841. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  1842. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1843. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1844. if (isIndexPreIncr != memcopyInfo->bIndexAlreadyChanged)
  1845. {
  1846. // The index changed between the load and the store
  1847. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Index value changed between ldElem and stElem"));
  1848. return false;
  1849. }
  1850. // Consider: Can we remove the count field?
  1851. memcopyInfo->count++;
  1852. memcopyInfo->base = baseSymID;
  1853. return true;
  1854. }
  1855. bool
  1856. GlobOpt::CollectMemOpLdElementI(IR::Instr *instr, Loop *loop)
  1857. {
  1858. Assert(instr->m_opcode == Js::OpCode::LdElemI_A);
  1859. return (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyLdElementI(instr, loop));
  1860. }
  1861. bool
  1862. GlobOpt::CollectMemOpStElementI(IR::Instr *instr, Loop *loop)
  1863. {
  1864. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict);
  1865. Assert(instr->GetSrc1());
  1866. return (!PHASE_OFF(Js::MemSetPhase, this->func) && CollectMemsetStElementI(instr, loop)) ||
  1867. (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyStElementI(instr, loop));
  1868. }
  1869. bool
  1870. GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Val, Value *src2Val)
  1871. {
  1872. Assert(this->currentBlock->loop);
  1873. Loop *loop = this->currentBlock->loop;
  1874. if (!loop->blockList.HasTwo())
  1875. {
  1876. // We support memcopy and memset for loops which have only two blocks.
  1877. return false;
  1878. }
  1879. if (loop->GetLoopFlags().isInterpreted && !loop->GetLoopFlags().memopMinCountReached)
  1880. {
  1881. TRACE_MEMOP_VERBOSE(loop, instr, _u("minimum loop count not reached"))
  1882. loop->doMemOp = false;
  1883. return false;
  1884. }
  1885. Assert(loop->doMemOp);
  1886. bool isIncr = true, isChangedByOne = false;
  1887. switch (instr->m_opcode)
  1888. {
  1889. case Js::OpCode::StElemI_A:
  1890. case Js::OpCode::StElemI_A_Strict:
  1891. if (!CollectMemOpStElementI(instr, loop))
  1892. {
  1893. loop->doMemOp = false;
  1894. return false;
  1895. }
  1896. break;
  1897. case Js::OpCode::LdElemI_A:
  1898. if (!CollectMemOpLdElementI(instr, loop))
  1899. {
  1900. loop->doMemOp = false;
  1901. return false;
  1902. }
  1903. break;
  1904. case Js::OpCode::Decr_A:
  1905. isIncr = false;
  1906. case Js::OpCode::Incr_A:
  1907. isChangedByOne = true;
  1908. goto MemOpCheckInductionVariable;
  1909. case Js::OpCode::Sub_I4:
  1910. case Js::OpCode::Sub_A:
  1911. isIncr = false;
  1912. case Js::OpCode::Add_A:
  1913. case Js::OpCode::Add_I4:
  1914. {
  1915. MemOpCheckInductionVariable:
  1916. StackSym *sym = instr->GetSrc1()->GetStackSym();
  1917. if (!sym)
  1918. {
  1919. sym = instr->GetSrc2()->GetStackSym();
  1920. }
  1921. SymID inductionSymID = GetVarSymID(sym);
  1922. if (IsSymIDInductionVariable(inductionSymID, this->currentBlock->loop))
  1923. {
  1924. if (!isChangedByOne)
  1925. {
  1926. IR::Opnd *src1, *src2;
  1927. src1 = instr->GetSrc1();
  1928. src2 = instr->GetSrc2();
  1929. if (src2->IsRegOpnd())
  1930. {
  1931. Value *val = CurrentBlockData()->FindValue(src2->AsRegOpnd()->m_sym);
  1932. if (val)
  1933. {
  1934. ValueInfo *vi = val->GetValueInfo();
  1935. int constValue;
  1936. if (vi && vi->TryGetIntConstantValue(&constValue))
  1937. {
  1938. if (constValue == 1)
  1939. {
  1940. isChangedByOne = true;
  1941. }
  1942. }
  1943. }
  1944. }
  1945. else if (src2->IsIntConstOpnd())
  1946. {
  1947. if (src2->AsIntConstOpnd()->GetValue() == 1)
  1948. {
  1949. isChangedByOne = true;
  1950. }
  1951. }
  1952. }
  1953. loop->EnsureMemOpVariablesInitialized();
  1954. if (!isChangedByOne)
  1955. {
  1956. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { Js::Constants::InvalidLoopUnrollFactor, 0 };
  1957. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  1958. {
  1959. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  1960. }
  1961. else
  1962. {
  1963. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  1964. }
  1965. }
  1966. else
  1967. {
  1968. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  1969. {
  1970. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
  1971. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  1972. }
  1973. else
  1974. {
  1975. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  1976. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
  1977. inductionVariableChangeInfo.unroll++;
  1978. inductionVariableChangeInfo.isIncremental = isIncr;
  1979. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  1980. }
  1981. }
  1982. break;
  1983. }
  1984. // Fallthrough if not an induction variable
  1985. }
  1986. default:
  1987. FOREACH_INSTR_IN_RANGE(chkInstr, instrBegin->m_next, instr)
  1988. {
  1989. if (IsInstrInvalidForMemOp(chkInstr, loop, src1Val, src2Val))
  1990. {
  1991. loop->doMemOp = false;
  1992. return false;
  1993. }
  1994. // Make sure this instruction doesn't use the memcopy transfer sym before it is checked by StElemI
  1995. if (loop->memOpInfo && !loop->memOpInfo->candidates->Empty())
  1996. {
  1997. Loop::MemOpCandidate* prevCandidate = loop->memOpInfo->candidates->Head();
  1998. if (prevCandidate->IsMemCopy())
  1999. {
  2000. Loop::MemCopyCandidate* memcopyCandidate = prevCandidate->AsMemCopy();
  2001. if (memcopyCandidate->base == Js::Constants::InvalidSymID)
  2002. {
  2003. if (chkInstr->HasSymUse(memcopyCandidate->transferSym))
  2004. {
  2005. loop->doMemOp = false;
  2006. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, chkInstr, _u("Found illegal use of LdElemI value(s%d)"), GetVarSymID(memcopyCandidate->transferSym));
  2007. return false;
  2008. }
  2009. }
  2010. }
  2011. }
  2012. }
  2013. NEXT_INSTR_IN_RANGE;
  2014. }
  2015. return true;
  2016. }
  2017. bool
  2018. GlobOpt::IsInstrInvalidForMemOp(IR::Instr *instr, Loop *loop, Value *src1Val, Value *src2Val)
  2019. {
  2020. // List of instruction that are valid with memop (ie: instr that gets removed if memop is emitted)
  2021. if (
  2022. this->currentBlock != loop->GetHeadBlock() &&
  2023. !instr->IsLabelInstr() &&
  2024. instr->IsRealInstr() &&
  2025. instr->m_opcode != Js::OpCode::IncrLoopBodyCount &&
  2026. instr->m_opcode != Js::OpCode::StLoopBodyCount &&
  2027. instr->m_opcode != Js::OpCode::Ld_A &&
  2028. instr->m_opcode != Js::OpCode::Ld_I4 &&
  2029. !(instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  2030. )
  2031. {
  2032. TRACE_MEMOP_VERBOSE(loop, instr, _u("Instruction not accepted for memop"));
  2033. return true;
  2034. }
  2035. // Check prev instr because it could have been added by an optimization and we won't see it here.
  2036. if (OpCodeAttr::FastFldInstr(instr->m_opcode) || (instr->m_prev && OpCodeAttr::FastFldInstr(instr->m_prev->m_opcode)))
  2037. {
  2038. // Refuse any operations interacting with Fields
  2039. TRACE_MEMOP_VERBOSE(loop, instr, _u("Field interaction detected"));
  2040. return true;
  2041. }
  2042. if (Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementSlot)
  2043. {
  2044. // Refuse any operations interacting with slots
  2045. TRACE_MEMOP_VERBOSE(loop, instr, _u("Slot interaction detected"));
  2046. return true;
  2047. }
  2048. if (this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val))
  2049. {
  2050. TRACE_MEMOP_VERBOSE(loop, instr, _u("Implicit call bailout detected"));
  2051. return true;
  2052. }
  2053. return false;
  2054. }
  2055. void
  2056. GlobOpt::TryReplaceLdLen(IR::Instr *& instr)
  2057. {
  2058. // Change LdLen on objects other than arrays, strings, and 'arguments' to LdFld. Otherwise, convert the SymOpnd to a RegOpnd here.
  2059. if (instr->m_opcode == Js::OpCode::LdLen_A && instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd())
  2060. {
  2061. IR::SymOpnd * opnd = instr->GetSrc1()->AsSymOpnd();
  2062. Sym *sym = opnd->m_sym;
  2063. Assert(sym->IsPropertySym());
  2064. PropertySym *originalPropertySym = sym->AsPropertySym();
  2065. IR::RegOpnd* newopnd = IR::RegOpnd::New(originalPropertySym->m_stackSym, IRType::TyVar, instr->m_func);
  2066. ValueInfo *const objectValueInfo = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym)->GetValueInfo();
  2067. // things we'd emit a fast path for
  2068. if (
  2069. objectValueInfo->IsLikelyAnyArray() ||
  2070. objectValueInfo->HasHadStringTag() ||
  2071. objectValueInfo->IsLikelyString() ||
  2072. newopnd->IsArgumentsObject() ||
  2073. (CurrentBlockData()->argObjSyms && CurrentBlockData()->IsArgumentsOpnd(newopnd))
  2074. )
  2075. {
  2076. // We need to properly transfer over the information from the old operand, which is
  2077. // a SymOpnd, to the new one, which is a RegOpnd. Unfortunately, the types mean the
  2078. // normal copy methods won't work here, so we're going to directly copy data.
  2079. newopnd->SetIsJITOptimizedReg(opnd->GetIsJITOptimizedReg());
  2080. newopnd->SetValueType(objectValueInfo->Type());
  2081. newopnd->SetIsDead(opnd->GetIsDead());
  2082. instr->ReplaceSrc1(newopnd);
  2083. }
  2084. else
  2085. {
  2086. // otherwise, change the instruction to an LdFld here.
  2087. instr->m_opcode = Js::OpCode::LdFld;
  2088. }
  2089. }
  2090. }
  2091. IR::Instr *
  2092. GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
  2093. {
  2094. Assert(instr->m_func->IsTopFunc() || instr->m_func->isGetterSetter || instr->m_func->callSiteIdInParentFunc != UINT16_MAX);
  2095. IR::Opnd *src1, *src2;
  2096. Value *src1Val = nullptr, *src2Val = nullptr, *dstVal = nullptr;
  2097. Value *src1IndirIndexVal = nullptr, *dstIndirIndexVal = nullptr;
  2098. IR::Instr *instrPrev = instr->m_prev;
  2099. IR::Instr *instrNext = instr->m_next;
  2100. if (instr->IsLabelInstr() && this->func->HasTry() && this->func->DoOptimizeTry())
  2101. {
  2102. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  2103. Assert(this->currentRegion);
  2104. }
  2105. if(PrepareForIgnoringIntOverflow(instr))
  2106. {
  2107. if(!IsLoopPrePass())
  2108. {
  2109. *isInstrRemoved = true;
  2110. currentBlock->RemoveInstr(instr);
  2111. }
  2112. return instrNext;
  2113. }
  2114. if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
  2115. {
  2116. return instrNext;
  2117. }
  2118. if (instr->m_opcode == Js::OpCode::Yield)
  2119. {
  2120. // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
  2121. CurrentBlockData()->KillStateForGeneratorYield();
  2122. }
  2123. if (!IsLoopPrePass())
  2124. {
  2125. // Change LdLen on objects other than arrays, strings, and 'arguments' to LdFld.
  2126. this->TryReplaceLdLen(instr);
  2127. }
  2128. // Consider: Do we ever get post-op bailout here, and if so is the FillBailOutInfo call in the right place?
  2129. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  2130. {
  2131. this->FillBailOutInfo(this->currentBlock, instr);
  2132. }
  2133. this->instrCountSinceLastCleanUp++;
  2134. instr = this->PreOptPeep(instr);
  2135. this->OptArguments(instr);
  2136. //StackArguments Optimization - We bail out if the index is out of range of actuals.
  2137. if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  2138. instr->DoStackArgsOpt() && !this->IsLoopPrePass())
  2139. {
  2140. GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
  2141. }
  2142. #if DBG
  2143. PropertySym *propertySymUseBefore = nullptr;
  2144. Assert(this->byteCodeUses == nullptr);
  2145. this->byteCodeUsesBeforeOpt->ClearAll();
  2146. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  2147. Assert(noImplicitCallUsesToInsert->Count() == 0);
  2148. #endif
  2149. this->ignoredIntOverflowForCurrentInstr = false;
  2150. this->ignoredNegativeZeroForCurrentInstr = false;
  2151. src1 = instr->GetSrc1();
  2152. src2 = instr->GetSrc2();
  2153. if (src1)
  2154. {
  2155. src1Val = this->OptSrc(src1, &instr, &src1IndirIndexVal);
  2156. GOPT_TRACE_VALUENUMBER(_u("[src1] "), instr->GetSrc1(), _u("%d"), src1Val ? src1Val->GetValueNumber() : -1);
  2157. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  2158. if (src2)
  2159. {
  2160. src2Val = this->OptSrc(src2, &instr);
  2161. GOPT_TRACE_VALUENUMBER(_u("[src2] "), instr->GetSrc2(), _u("%d"), src2Val ? src2Val->GetValueNumber() : -1);
  2162. }
  2163. }
  2164. if(instr->GetDst() && instr->GetDst()->IsIndirOpnd())
  2165. {
  2166. this->OptSrc(instr->GetDst(), &instr, &dstIndirIndexVal);
  2167. }
  2168. MarkArgumentsUsedForBranch(instr);
  2169. CSEOptimize(this->currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal);
  2170. OptimizeChecks(instr);
  2171. OptArraySrc(&instr, &src1Val, &src2Val);
  2172. OptNewScObject(&instr, src1Val);
  2173. OptStackArgLenAndConst(instr, &src1Val);
  2174. instr = this->OptPeep(instr, src1Val, src2Val);
  2175. if (instr->m_opcode == Js::OpCode::Nop ||
  2176. (instr->m_opcode == Js::OpCode::CheckThis &&
  2177. instr->GetSrc1()->IsRegOpnd() &&
  2178. instr->GetSrc1()->AsRegOpnd()->m_sym->m_isSafeThis))
  2179. {
  2180. instrNext = instr->m_next;
  2181. InsertNoImplicitCallUses(instr);
  2182. if (this->byteCodeUses)
  2183. {
  2184. this->InsertByteCodeUses(instr);
  2185. }
  2186. *isInstrRemoved = true;
  2187. this->currentBlock->RemoveInstr(instr);
  2188. return instrNext;
  2189. }
  2190. else if (instr->m_opcode == Js::OpCode::GetNewScObject && !this->IsLoopPrePass() && src1Val->GetValueInfo()->IsPrimitive())
  2191. {
  2192. // Constructor returned (src1) a primitive value, so fold this into "dst = Ld_A src2", where src2 is the new object that
  2193. // was passed into the constructor as its 'this' parameter
  2194. instr->FreeSrc1();
  2195. instr->SetSrc1(instr->UnlinkSrc2());
  2196. instr->m_opcode = Js::OpCode::Ld_A;
  2197. src1Val = src2Val;
  2198. src2Val = nullptr;
  2199. }
  2200. else if ((instr->m_opcode == Js::OpCode::TryCatch && this->func->DoOptimizeTry()) || (instr->m_opcode == Js::OpCode::TryFinally && this->func->DoOptimizeTry()))
  2201. {
  2202. ProcessTryHandler(instr);
  2203. }
  2204. else if (instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException)
  2205. {
  2206. if (this->ProcessExceptionHandlingEdges(instr))
  2207. {
  2208. *isInstrRemoved = true;
  2209. return instrNext;
  2210. }
  2211. }
  2212. bool isAlreadyTypeSpecialized = false;
  2213. if (!IsLoopPrePass() && instr->HasBailOutInfo())
  2214. {
  2215. if (instr->GetBailOutKind() == IR::BailOutExpectingInteger)
  2216. {
  2217. isAlreadyTypeSpecialized = TypeSpecializeBailoutExpectedInteger(instr, src1Val, &dstVal);
  2218. }
  2219. else if (instr->GetBailOutKind() == IR::BailOutExpectingString)
  2220. {
  2221. if (instr->GetSrc1()->IsRegOpnd())
  2222. {
  2223. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyString())
  2224. {
  2225. // Disable SwitchOpt if the source is definitely not a string - This may be realized only in Globopt
  2226. Assert(IsSwitchOptEnabled());
  2227. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  2228. }
  2229. }
  2230. }
  2231. }
  2232. bool forceInvariantHoisting = false;
  2233. const bool ignoreIntOverflowInRangeForInstr = instr->ignoreIntOverflowInRange; // Save it since the instr can change
  2234. if (!isAlreadyTypeSpecialized)
  2235. {
  2236. bool redoTypeSpec;
  2237. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  2238. if(redoTypeSpec && instr->m_opcode != Js::OpCode::Nop)
  2239. {
  2240. forceInvariantHoisting = false;
  2241. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  2242. Assert(!redoTypeSpec);
  2243. }
  2244. if (instr->m_opcode == Js::OpCode::Nop)
  2245. {
  2246. InsertNoImplicitCallUses(instr);
  2247. if (this->byteCodeUses)
  2248. {
  2249. this->InsertByteCodeUses(instr);
  2250. }
  2251. instrNext = instr->m_next;
  2252. *isInstrRemoved = true;
  2253. this->currentBlock->RemoveInstr(instr);
  2254. return instrNext;
  2255. }
  2256. }
  2257. if (ignoreIntOverflowInRangeForInstr)
  2258. {
  2259. VerifyIntSpecForIgnoringIntOverflow(instr);
  2260. }
  2261. // Track calls after any pre-op bailouts have been inserted before the call, because they will need to restore out params.
  2262. this->TrackCalls(instr);
  2263. if (instr->GetSrc1())
  2264. {
  2265. this->UpdateObjPtrValueType(instr->GetSrc1(), instr);
  2266. }
  2267. IR::Opnd *dst = instr->GetDst();
  2268. if (dst)
  2269. {
  2270. // Copy prop dst uses and mark live/available type syms before tracking kills.
  2271. CopyPropDstUses(dst, instr, src1Val);
  2272. }
  2273. // Track mark temp object before we process the dst so we can generate pre-op bailout
  2274. instr = this->TrackMarkTempObject(instrPrev->m_next, instr);
  2275. bool removed = OptTagChecks(instr);
  2276. if (removed)
  2277. {
  2278. *isInstrRemoved = true;
  2279. return instrNext;
  2280. }
  2281. dstVal = this->OptDst(&instr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  2282. if (dst)
  2283. {
  2284. GOPT_TRACE_VALUENUMBER(_u("[dst] "), instr->GetDst(), _u("%d\n"), dstVal ? dstVal->GetValueNumber() : -1);
  2285. }
  2286. dst = instr->GetDst();
  2287. instrNext = instr->m_next;
  2288. if (dst)
  2289. {
  2290. if (this->func->HasTry() && this->func->DoOptimizeTry())
  2291. {
  2292. this->InsertToVarAtDefInTryRegion(instr, dst);
  2293. }
  2294. instr = this->SetTypeCheckBailOut(dst, instr, nullptr);
  2295. this->UpdateObjPtrValueType(dst, instr);
  2296. }
  2297. BVSparse<JitArenaAllocator> instrByteCodeStackSymUsedAfter(this->alloc);
  2298. PropertySym *propertySymUseAfter = nullptr;
  2299. if (this->byteCodeUses != nullptr)
  2300. {
  2301. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  2302. }
  2303. #if DBG
  2304. else
  2305. {
  2306. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  2307. instrByteCodeStackSymUsedAfter.Equal(this->byteCodeUsesBeforeOpt);
  2308. Assert(propertySymUseAfter == propertySymUseBefore);
  2309. }
  2310. #endif
  2311. bool isHoisted = false;
  2312. if (this->currentBlock->loop && !this->IsLoopPrePass())
  2313. {
  2314. isHoisted = this->TryHoistInvariant(instr, this->currentBlock, dstVal, src1Val, src2Val, true, false, forceInvariantHoisting);
  2315. }
  2316. src1 = instr->GetSrc1();
  2317. if (!this->IsLoopPrePass() && src1)
  2318. {
  2319. // instr const, nonConst => canonicalize by swapping operands
  2320. // This simplifies lowering. (somewhat machine dependent)
  2321. // Note that because of Var overflows, src1 may not have been constant prop'd to an IntConst
  2322. this->PreLowerCanonicalize(instr, &src1Val, &src2Val);
  2323. }
  2324. if (!PHASE_OFF(Js::MemOpPhase, this->func) &&
  2325. !isHoisted &&
  2326. !(instr->IsJitProfilingInstr()) &&
  2327. this->currentBlock->loop && !IsLoopPrePass() &&
  2328. !func->IsJitInDebugMode() &&
  2329. (func->HasProfileInfo() && !func->GetReadOnlyProfileInfo()->IsMemOpDisabled()) &&
  2330. this->currentBlock->loop->doMemOp)
  2331. {
  2332. CollectMemOpInfo(instrPrev, instr, src1Val, src2Val);
  2333. }
  2334. InsertNoImplicitCallUses(instr);
  2335. if (this->byteCodeUses != nullptr)
  2336. {
  2337. // Optimization removed some uses from the instruction.
  2338. // Need to insert fake uses so we can get the correct live register to restore in bailout.
  2339. this->byteCodeUses->Minus(&instrByteCodeStackSymUsedAfter);
  2340. if (this->propertySymUse == propertySymUseAfter)
  2341. {
  2342. this->propertySymUse = nullptr;
  2343. }
  2344. this->InsertByteCodeUses(instr);
  2345. }
  2346. if (!this->IsLoopPrePass() && !isHoisted && this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val))
  2347. {
  2348. IR::BailOutKind kind = IR::BailOutOnImplicitCalls;
  2349. if(instr->HasBailOutInfo())
  2350. {
  2351. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  2352. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  2353. if((bailOutKind & ~IR::BailOutKindBits) != IR::BailOutOnImplicitCallsPreOp)
  2354. {
  2355. Assert(!(bailOutKind & ~IR::BailOutKindBits));
  2356. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  2357. }
  2358. }
  2359. else if (instr->forcePreOpBailOutIfNeeded || this->isRecursiveCallOnLandingPad)
  2360. {
  2361. // We can't have a byte code reg slot as dst to generate a
  2362. // pre-op implicit call after we have processed the dst.
  2363. // Consider: This might miss an opportunity to use a copy prop sym to restore
  2364. // some other byte code reg if the dst is that copy prop that we already killed.
  2365. Assert(!instr->GetDst()
  2366. || !instr->GetDst()->IsRegOpnd()
  2367. || instr->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg()
  2368. || !instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot());
  2369. this->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  2370. }
  2371. else
  2372. {
  2373. // Capture value of the bailout after the operation is done.
  2374. this->GenerateBailAfterOperation(&instr, kind);
  2375. }
  2376. }
  2377. if (CurrentBlockData()->capturedValuesCandidate && !this->IsLoopPrePass())
  2378. {
  2379. this->CommitCapturedValuesCandidate();
  2380. }
  2381. #if DBG
  2382. if (CONFIG_FLAG(ValidateIntRanges) && !IsLoopPrePass())
  2383. {
  2384. if (instr->ShouldEmitIntRangeCheck())
  2385. {
  2386. this->EmitIntRangeChecks(instr);
  2387. }
  2388. }
  2389. #endif
  2390. return instrNext;
  2391. }
  2392. bool
  2393. GlobOpt::OptTagChecks(IR::Instr *instr)
  2394. {
  2395. if (PHASE_OFF(Js::OptTagChecksPhase, this->func) || !this->DoTagChecks())
  2396. {
  2397. return false;
  2398. }
  2399. StackSym *stackSym = nullptr;
  2400. IR::SymOpnd *symOpnd = nullptr;
  2401. IR::RegOpnd *regOpnd = nullptr;
  2402. switch(instr->m_opcode)
  2403. {
  2404. case Js::OpCode::LdFld:
  2405. case Js::OpCode::LdMethodFld:
  2406. case Js::OpCode::CheckFixedFld:
  2407. case Js::OpCode::CheckPropertyGuardAndLoadType:
  2408. symOpnd = instr->GetSrc1()->AsSymOpnd();
  2409. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  2410. break;
  2411. case Js::OpCode::BailOnNotObject:
  2412. case Js::OpCode::BailOnNotArray:
  2413. if (instr->GetSrc1()->IsRegOpnd())
  2414. {
  2415. regOpnd = instr->GetSrc1()->AsRegOpnd();
  2416. stackSym = regOpnd->m_sym;
  2417. }
  2418. break;
  2419. case Js::OpCode::StFld:
  2420. symOpnd = instr->GetDst()->AsSymOpnd();
  2421. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  2422. break;
  2423. }
  2424. if (stackSym)
  2425. {
  2426. Value *value = CurrentBlockData()->FindValue(stackSym);
  2427. if (value)
  2428. {
  2429. ValueInfo *valInfo = value->GetValueInfo();
  2430. if (valInfo->GetSymStore() && valInfo->GetSymStore()->IsStackSym() && valInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable())
  2431. {
  2432. return false;
  2433. }
  2434. ValueType valueType = value->GetValueInfo()->Type();
  2435. if (instr->m_opcode == Js::OpCode::BailOnNotObject)
  2436. {
  2437. if (valueType.CanBeTaggedValue())
  2438. {
  2439. // We're not adding new information to the value other than changing the value type. Preserve any existing
  2440. // information and just change the value type.
  2441. ChangeValueType(nullptr, value, valueType.SetCanBeTaggedValue(false), true /*preserveSubClassInfo*/);
  2442. return false;
  2443. }
  2444. if (this->byteCodeUses)
  2445. {
  2446. this->InsertByteCodeUses(instr);
  2447. }
  2448. this->currentBlock->RemoveInstr(instr);
  2449. return true;
  2450. }
  2451. if (valueType.CanBeTaggedValue() &&
  2452. !valueType.HasBeenNumber() &&
  2453. !this->IsLoopPrePass())
  2454. {
  2455. ValueType newValueType = valueType.SetCanBeTaggedValue(false);
  2456. // Split out the tag check as a separate instruction.
  2457. IR::Instr *bailOutInstr;
  2458. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, IR::BailOutOnTaggedValue, instr, instr->m_func);
  2459. if (!this->IsLoopPrePass())
  2460. {
  2461. FillBailOutInfo(this->currentBlock, bailOutInstr);
  2462. }
  2463. IR::RegOpnd *srcOpnd = regOpnd;
  2464. if (!srcOpnd)
  2465. {
  2466. srcOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), instr->m_func);
  2467. AnalysisAssert(symOpnd);
  2468. if (symOpnd->GetIsJITOptimizedReg())
  2469. {
  2470. srcOpnd->SetIsJITOptimizedReg(true);
  2471. }
  2472. }
  2473. bailOutInstr->SetSrc1(srcOpnd);
  2474. bailOutInstr->GetSrc1()->SetValueType(valueType);
  2475. bailOutInstr->SetByteCodeOffset(instr);
  2476. instr->InsertBefore(bailOutInstr);
  2477. if (this->currentBlock->loop)
  2478. {
  2479. // Try hoisting the BailOnNotObject instr.
  2480. // But since this isn't the current instr being optimized, we need to play tricks with
  2481. // the byteCodeUse fields...
  2482. TrackByteCodeUsesForInstrAddedInOptInstr(bailOutInstr, [&]()
  2483. {
  2484. TryHoistInvariant(bailOutInstr, this->currentBlock, nullptr, value, nullptr, true, false, false, IR::BailOutOnTaggedValue);
  2485. });
  2486. }
  2487. if (symOpnd)
  2488. {
  2489. symOpnd->SetPropertyOwnerValueType(newValueType);
  2490. }
  2491. else
  2492. {
  2493. regOpnd->SetValueType(newValueType);
  2494. }
  2495. ChangeValueType(nullptr, value, newValueType, false);
  2496. }
  2497. }
  2498. }
  2499. return false;
  2500. }
  2501. bool
  2502. GlobOpt::TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal)
  2503. {
  2504. bool isAlreadyTypeSpecialized = false;
  2505. if(instr->GetSrc1()->IsRegOpnd())
  2506. {
  2507. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyInt() || instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
  2508. {
  2509. Assert(IsSwitchOptEnabledForIntTypeSpec());
  2510. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  2511. }
  2512. // Attach the BailOutExpectingInteger to FromVar and Remove the bail out info on the Ld_A (Begin Switch) instr.
  2513. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, TyInt32, IR::BailOutExpectingInteger, false, instr);
  2514. //TypeSpecialize the dst of Ld_A
  2515. TypeSpecializeIntDst(instr, instr->m_opcode, src1Val, src1Val, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, dstVal);
  2516. isAlreadyTypeSpecialized = true;
  2517. }
  2518. instr->ClearBailOutInfo();
  2519. return isAlreadyTypeSpecialized;
  2520. }
  2521. Value*
  2522. GlobOpt::OptDst(
  2523. IR::Instr ** pInstr,
  2524. Value *dstVal,
  2525. Value *src1Val,
  2526. Value *src2Val,
  2527. Value *dstIndirIndexVal,
  2528. Value *src1IndirIndexVal)
  2529. {
  2530. IR::Instr *&instr = *pInstr;
  2531. IR::Opnd *opnd = instr->GetDst();
  2532. if (opnd)
  2533. {
  2534. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  2535. {
  2536. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  2537. }
  2538. else if (instr->m_opcode == Js::OpCode::StElemI_A ||
  2539. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  2540. instr->m_opcode == Js::OpCode::InitComputedProperty)
  2541. {
  2542. this->KillObjectHeaderInlinedTypeSyms(this->currentBlock, false);
  2543. }
  2544. if (opnd->IsIndirOpnd() && !this->IsLoopPrePass())
  2545. {
  2546. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  2547. const ValueType baseValueType(baseOpnd->GetValueType());
  2548. if ((
  2549. baseValueType.IsLikelyNativeArray() ||
  2550. #ifdef _M_IX86
  2551. (
  2552. !AutoSystemInfo::Data.SSE2Available() &&
  2553. baseValueType.IsLikelyObject() &&
  2554. (
  2555. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  2556. baseValueType.GetObjectType() == ObjectType::Float64Array
  2557. )
  2558. )
  2559. #else
  2560. false
  2561. #endif
  2562. ) &&
  2563. instr->GetSrc1()->IsVar())
  2564. {
  2565. if(instr->m_opcode == Js::OpCode::StElemC)
  2566. {
  2567. // StElemC has different code that handles native array conversion or missing value stores. Add a bailout
  2568. // for those cases.
  2569. Assert(baseValueType.IsLikelyNativeArray());
  2570. Assert(!instr->HasBailOutInfo());
  2571. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  2572. }
  2573. else if(instr->HasBailOutInfo())
  2574. {
  2575. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  2576. // path. Note that the removed bailouts should not be necessary for correctness. Bailout on native array
  2577. // conversion will be handled automatically as normal.
  2578. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  2579. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  2580. {
  2581. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  2582. }
  2583. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  2584. {
  2585. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  2586. }
  2587. if(bailOutKind)
  2588. {
  2589. instr->SetBailOutKind(bailOutKind);
  2590. }
  2591. else
  2592. {
  2593. instr->ClearBailOutInfo();
  2594. }
  2595. }
  2596. }
  2597. }
  2598. }
  2599. this->ProcessKills(instr);
  2600. if (opnd)
  2601. {
  2602. if (dstVal == nullptr)
  2603. {
  2604. dstVal = ValueNumberDst(pInstr, src1Val, src2Val);
  2605. }
  2606. if (this->IsLoopPrePass())
  2607. {
  2608. // Keep track of symbols defined in the loop.
  2609. if (opnd->IsRegOpnd())
  2610. {
  2611. StackSym *symDst = opnd->AsRegOpnd()->m_sym;
  2612. rootLoopPrePass->symsDefInLoop->Set(symDst->m_id);
  2613. }
  2614. }
  2615. else if (dstVal)
  2616. {
  2617. opnd->SetValueType(dstVal->GetValueInfo()->Type());
  2618. if(currentBlock->loop &&
  2619. !IsLoopPrePass() &&
  2620. (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
  2621. instr->GetSrc1()->IsRegOpnd() &&
  2622. !func->IsJitInDebugMode() &&
  2623. func->DoGlobOptsForGeneratorFunc())
  2624. {
  2625. // Look for the following patterns:
  2626. //
  2627. // Pattern 1:
  2628. // s1[liveOnBackEdge] = s3[dead]
  2629. //
  2630. // Pattern 2:
  2631. // s3 = operation(s1[liveOnBackEdge], s2)
  2632. // s1[liveOnBackEdge] = s3
  2633. //
  2634. // In both patterns, s1 and s3 have the same value by the end. Prefer to use s1 as the sym store instead of s3
  2635. // since s1 is live on back-edge, as otherwise, their lifetimes overlap, requiring two registers to hold the
  2636. // value instead of one.
  2637. do
  2638. {
  2639. IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  2640. StackSym *srcVarSym = src->m_sym;
  2641. if(srcVarSym->IsTypeSpec())
  2642. {
  2643. srcVarSym = srcVarSym->GetVarEquivSym(nullptr);
  2644. Assert(srcVarSym);
  2645. }
  2646. if(dstVal->GetValueInfo()->GetSymStore() != srcVarSym)
  2647. {
  2648. break;
  2649. }
  2650. IR::RegOpnd *const dst = opnd->AsRegOpnd();
  2651. StackSym *dstVarSym = dst->m_sym;
  2652. if(dstVarSym->IsTypeSpec())
  2653. {
  2654. dstVarSym = dstVarSym->GetVarEquivSym(nullptr);
  2655. Assert(dstVarSym);
  2656. }
  2657. if(!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(dstVarSym->m_id))
  2658. {
  2659. break;
  2660. }
  2661. Value *const srcValue = CurrentBlockData()->FindValue(srcVarSym);
  2662. if(srcValue->GetValueNumber() != dstVal->GetValueNumber())
  2663. {
  2664. break;
  2665. }
  2666. if(!src->GetIsDead())
  2667. {
  2668. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  2669. IR::Opnd *const prevDst = prevInstr->GetDst();
  2670. if(!prevDst ||
  2671. !src->IsEqualInternal(prevDst) ||
  2672. !(
  2673. (prevInstr->GetSrc1() && dst->IsEqual(prevInstr->GetSrc1())) ||
  2674. (prevInstr->GetSrc2() && dst->IsEqual(prevInstr->GetSrc2()))
  2675. ))
  2676. {
  2677. break;
  2678. }
  2679. }
  2680. this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
  2681. } while(false);
  2682. }
  2683. }
  2684. this->ValueNumberObjectType(opnd, instr);
  2685. }
  2686. this->CSEAddInstr(this->currentBlock, *pInstr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  2687. return dstVal;
  2688. }
  2689. void
  2690. GlobOpt::CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val)
  2691. {
  2692. if (opnd->IsSymOpnd())
  2693. {
  2694. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  2695. if (symOpnd->m_sym->IsPropertySym())
  2696. {
  2697. PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
  2698. Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
  2699. symOpnd->SetPropertyOwnerValueType(objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  2700. this->CopyPropPropertySymObj(symOpnd, instr);
  2701. }
  2702. }
  2703. }
  2704. void
  2705. GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym)
  2706. {
  2707. Value *initialValue = nullptr;
  2708. StackSym *symStore;
  2709. if (loop->allFieldsKilled || loop->fieldKilled->Test(originalPropertySym->m_id) || loop->fieldKilled->Test(propertySym->m_id))
  2710. {
  2711. return;
  2712. }
  2713. // Value already exists
  2714. if (CurrentBlockData()->FindValue(propertySym))
  2715. {
  2716. return;
  2717. }
  2718. // If this initial value was already added, we would find in the current value table.
  2719. Assert(!loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue));
  2720. // If propertySym is live in landingPad, we don't need an initial value.
  2721. if (loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  2722. {
  2723. return;
  2724. }
  2725. StackSym * objectSym = propertySym->m_stackSym;
  2726. Value *landingPadObjPtrVal, *currentObjPtrVal;
  2727. landingPadObjPtrVal = loop->landingPad->globOptData.FindValue(objectSym);
  2728. currentObjPtrVal = CurrentBlockData()->FindValue(objectSym);
  2729. auto CanSetInitialValue = [&]() -> bool {
  2730. if (!currentObjPtrVal)
  2731. {
  2732. return false;
  2733. }
  2734. if (landingPadObjPtrVal)
  2735. {
  2736. return currentObjPtrVal->GetValueNumber() == landingPadObjPtrVal->GetValueNumber();
  2737. }
  2738. else
  2739. {
  2740. if (!objectSym->IsSingleDef())
  2741. {
  2742. return false;
  2743. }
  2744. IR::Instr * defInstr = objectSym->GetInstrDef();
  2745. IR::Opnd * src1 = defInstr->GetSrc1();
  2746. while (!(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->m_sym->IsPropertySym()))
  2747. {
  2748. if (src1 && src1->IsRegOpnd() && src1->AsRegOpnd()->GetStackSym()->IsSingleDef())
  2749. {
  2750. defInstr = src1->AsRegOpnd()->GetStackSym()->GetInstrDef();
  2751. src1 = defInstr->GetSrc1();
  2752. }
  2753. else
  2754. {
  2755. return false;
  2756. }
  2757. }
  2758. return true;
  2759. // Todo: allow other kinds of operands as src1 of instr def of the object sym of the current propertySym
  2760. // SymOpnd, but not PropertySymOpnd - LdSlotArr, some LdSlots (?)
  2761. // nullptr - NewScObject
  2762. }
  2763. };
  2764. if (!CanSetInitialValue())
  2765. {
  2766. // objPtr has a different value in the landing pad.
  2767. return;
  2768. }
  2769. // The opnd's value type has not yet been initialized. Since the property sym doesn't have a value, it effectively has an
  2770. // Uninitialized value type. Use the profiled value type from the instruction.
  2771. const ValueType profiledValueType =
  2772. instr->IsProfiledInstr() ? instr->AsProfiledInstr()->u.FldInfo().valueType : ValueType::Uninitialized;
  2773. Assert(!profiledValueType.IsDefinite()); // Hence the values created here don't need to be tracked for kills
  2774. initialValue = this->NewGenericValue(profiledValueType, propertySym);
  2775. symStore = StackSym::New(this->func);
  2776. initialValue->GetValueInfo()->SetSymStore(symStore);
  2777. loop->initialValueFieldMap.Add(propertySym, initialValue->Copy(this->alloc, initialValue->GetValueNumber()));
  2778. // Copy the initial value into the landing pad, but without a symStore
  2779. Value *landingPadInitialValue = Value::New(this->alloc, initialValue->GetValueNumber(),
  2780. ValueInfo::New(this->alloc, initialValue->GetValueInfo()->Type()));
  2781. loop->landingPad->globOptData.SetValue(landingPadInitialValue, propertySym);
  2782. loop->landingPad->globOptData.liveFields->Set(propertySym->m_id);
  2783. #if DBG_DUMP
  2784. if (PHASE_TRACE(Js::FieldPREPhase, this->func))
  2785. {
  2786. Output::Print(_u("** TRACE: Field PRE initial value for loop head #%d. Val:%d symStore:"),
  2787. loop->GetHeadBlock()->GetBlockNum(), initialValue->GetValueNumber());
  2788. symStore->Dump();
  2789. Output::Print(_u("\n Instr: "));
  2790. instr->Dump();
  2791. Output::Flush();
  2792. }
  2793. #endif
  2794. // Add initial value to all the previous blocks in the loop.
  2795. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, this->currentBlock->GetPrev(), loop->GetHeadBlock())
  2796. {
  2797. if (block->GetDataUseCount() == 0)
  2798. {
  2799. // All successor blocks have been processed, no point in adding the value.
  2800. continue;
  2801. }
  2802. Value *newValue = initialValue->Copy(this->alloc, initialValue->GetValueNumber());
  2803. block->globOptData.SetValue(newValue, propertySym);
  2804. block->globOptData.liveFields->Set(propertySym->m_id);
  2805. block->globOptData.SetValue(newValue, symStore);
  2806. block->globOptData.liveVarSyms->Set(symStore->m_id);
  2807. } NEXT_BLOCK_BACKWARD_IN_RANGE;
  2808. CurrentBlockData()->SetValue(initialValue, symStore);
  2809. CurrentBlockData()->liveVarSyms->Set(symStore->m_id);
  2810. CurrentBlockData()->liveFields->Set(propertySym->m_id);
  2811. }
  2812. // Examine src, apply copy prop and value number it
  2813. Value*
  2814. GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, IR::IndirOpnd *parentIndirOpnd)
  2815. {
  2816. IR::Instr * &instr = *pInstr;
  2817. Assert(!indirIndexValRef || !*indirIndexValRef);
  2818. Assert(
  2819. parentIndirOpnd
  2820. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  2821. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  2822. Sym *sym;
  2823. Value *val;
  2824. PropertySym *originalPropertySym = nullptr;
  2825. switch(opnd->GetKind())
  2826. {
  2827. case IR::OpndKindIntConst:
  2828. val = this->GetIntConstantValue(opnd->AsIntConstOpnd()->AsInt32(), instr);
  2829. opnd->SetValueType(val->GetValueInfo()->Type());
  2830. return val;
  2831. case IR::OpndKindInt64Const:
  2832. val = this->GetIntConstantValue(opnd->AsInt64ConstOpnd()->GetValue(), instr);
  2833. opnd->SetValueType(val->GetValueInfo()->Type());
  2834. return val;
  2835. case IR::OpndKindFloatConst:
  2836. {
  2837. const FloatConstType floatValue = opnd->AsFloatConstOpnd()->m_value;
  2838. int32 int32Value;
  2839. if(Js::JavascriptNumber::TryGetInt32Value(floatValue, &int32Value))
  2840. {
  2841. val = GetIntConstantValue(int32Value, instr);
  2842. }
  2843. else
  2844. {
  2845. val = NewFloatConstantValue(floatValue);
  2846. }
  2847. opnd->SetValueType(val->GetValueInfo()->Type());
  2848. return val;
  2849. }
  2850. case IR::OpndKindAddr:
  2851. {
  2852. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  2853. if (addrOpnd->m_isFunction)
  2854. {
  2855. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func), "Fixed function address operand with fixed method calls phase disabled?");
  2856. val = NewFixedFunctionValue((Js::JavascriptFunction *)addrOpnd->m_address, addrOpnd);
  2857. opnd->SetValueType(val->GetValueInfo()->Type());
  2858. return val;
  2859. }
  2860. else if (addrOpnd->IsVar() && Js::TaggedInt::Is(addrOpnd->m_address))
  2861. {
  2862. val = this->GetIntConstantValue(Js::TaggedInt::ToInt32(addrOpnd->m_address), instr);
  2863. opnd->SetValueType(val->GetValueInfo()->Type());
  2864. return val;
  2865. }
  2866. val = this->GetVarConstantValue(addrOpnd);
  2867. return val;
  2868. }
  2869. case IR::OpndKindSym:
  2870. {
  2871. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  2872. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  2873. // value if available, before returning from this function.
  2874. opnd->SetValueType(ValueType::Uninitialized);
  2875. sym = opnd->AsSymOpnd()->m_sym;
  2876. // Don't create a new value for ArgSlots and don't copy prop them away.
  2877. if (sym->IsStackSym() && sym->AsStackSym()->IsArgSlotSym())
  2878. {
  2879. return nullptr;
  2880. }
  2881. // Unless we have profile info, don't create a new value for ArgSlots and don't copy prop them away.
  2882. if (sym->IsStackSym() && sym->AsStackSym()->IsParamSlotSym())
  2883. {
  2884. if (!instr->m_func->IsLoopBody() && instr->m_func->HasProfileInfo())
  2885. {
  2886. // Skip "this" pointer.
  2887. int paramSlotNum = sym->AsStackSym()->GetParamSlotNum() - 2;
  2888. if (paramSlotNum >= 0)
  2889. {
  2890. const auto parameterType = instr->m_func->GetReadOnlyProfileInfo()->GetParameterInfo(static_cast<Js::ArgSlot>(paramSlotNum));
  2891. val = NewGenericValue(parameterType);
  2892. opnd->SetValueType(val->GetValueInfo()->Type());
  2893. return val;
  2894. }
  2895. }
  2896. return nullptr;
  2897. }
  2898. if (!sym->IsPropertySym())
  2899. {
  2900. break;
  2901. }
  2902. originalPropertySym = sym->AsPropertySym();
  2903. // Dont give a vale to 'arguments' property sym to prevent field copy prop of 'arguments'
  2904. if (originalPropertySym->AsPropertySym()->m_propertyId == Js::PropertyIds::arguments &&
  2905. originalPropertySym->AsPropertySym()->m_fieldKind == PropertyKindData)
  2906. {
  2907. return nullptr;
  2908. }
  2909. Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
  2910. opnd->AsSymOpnd()->SetPropertyOwnerValueType(
  2911. objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  2912. sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
  2913. if (!DoFieldCopyProp())
  2914. {
  2915. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  2916. {
  2917. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  2918. }
  2919. return nullptr;
  2920. }
  2921. switch (instr->m_opcode)
  2922. {
  2923. // These need the symbolic reference to the field, don't copy prop the value of the field
  2924. case Js::OpCode::DeleteFld:
  2925. case Js::OpCode::DeleteRootFld:
  2926. case Js::OpCode::DeleteFldStrict:
  2927. case Js::OpCode::DeleteRootFldStrict:
  2928. case Js::OpCode::ScopedDeleteFld:
  2929. case Js::OpCode::ScopedDeleteFldStrict:
  2930. case Js::OpCode::LdMethodFromFlags:
  2931. case Js::OpCode::BrOnNoProperty:
  2932. case Js::OpCode::BrOnHasProperty:
  2933. case Js::OpCode::LdMethodFldPolyInlineMiss:
  2934. case Js::OpCode::StSlotChkUndecl:
  2935. case Js::OpCode::ScopedLdInst:
  2936. return nullptr;
  2937. };
  2938. if (instr->CallsGetter())
  2939. {
  2940. return nullptr;
  2941. }
  2942. if (this->IsLoopPrePass() && this->DoFieldPRE(this->rootLoopPrePass))
  2943. {
  2944. if (!this->prePassLoop->allFieldsKilled && !this->prePassLoop->fieldKilled->Test(sym->m_id))
  2945. {
  2946. this->SetLoopFieldInitialValue(this->rootLoopPrePass, instr, sym->AsPropertySym(), originalPropertySym);
  2947. }
  2948. if (this->IsPREInstrCandidateLoad(instr->m_opcode))
  2949. {
  2950. // Foreach property sym, remember the first instruction that loads it.
  2951. // Can this be done in one call?
  2952. if (!this->prePassInstrMap->ContainsKey(sym->m_id))
  2953. {
  2954. this->prePassInstrMap->AddNew(sym->m_id, instr->CopyWithoutDst());
  2955. }
  2956. }
  2957. }
  2958. break;
  2959. }
  2960. case IR::OpndKindReg:
  2961. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  2962. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  2963. // value if available, before returning from this function.
  2964. opnd->SetValueType(ValueType::Uninitialized);
  2965. sym = opnd->AsRegOpnd()->m_sym;
  2966. CurrentBlockData()->MarkTempLastUse(instr, opnd->AsRegOpnd());
  2967. if (sym->AsStackSym()->IsTypeSpec())
  2968. {
  2969. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  2970. }
  2971. break;
  2972. case IR::OpndKindIndir:
  2973. this->OptimizeIndirUses(opnd->AsIndirOpnd(), &instr, indirIndexValRef);
  2974. return nullptr;
  2975. default:
  2976. return nullptr;
  2977. }
  2978. val = CurrentBlockData()->FindValue(sym);
  2979. if (val)
  2980. {
  2981. Assert(CurrentBlockData()->IsLive(sym) || (sym->IsPropertySym()));
  2982. if (instr)
  2983. {
  2984. opnd = this->CopyProp(opnd, instr, val, parentIndirOpnd);
  2985. }
  2986. // Check if we freed the operand.
  2987. if (opnd == nullptr)
  2988. {
  2989. return nullptr;
  2990. }
  2991. // In a loop prepass, determine stack syms that are used before they are defined in the root loop for which the prepass
  2992. // is being done. This information is used to do type specialization conversions in the landing pad where appropriate.
  2993. if(IsLoopPrePass() &&
  2994. sym->IsStackSym() &&
  2995. !rootLoopPrePass->symsUsedBeforeDefined->Test(sym->m_id) &&
  2996. rootLoopPrePass->landingPad->globOptData.IsLive(sym) && !isAsmJSFunc) // no typespec in asmjs and hence skipping this
  2997. {
  2998. Value *const landingPadValue = rootLoopPrePass->landingPad->globOptData.FindValue(sym);
  2999. if(landingPadValue && val->GetValueNumber() == landingPadValue->GetValueNumber())
  3000. {
  3001. rootLoopPrePass->symsUsedBeforeDefined->Set(sym->m_id);
  3002. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  3003. if(landingPadValueInfo->IsLikelyNumber())
  3004. {
  3005. rootLoopPrePass->likelyNumberSymsUsedBeforeDefined->Set(sym->m_id);
  3006. if(DoAggressiveIntTypeSpec() ? landingPadValueInfo->IsLikelyInt() : landingPadValueInfo->IsInt())
  3007. {
  3008. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  3009. // specialization is enabled.
  3010. rootLoopPrePass->likelyIntSymsUsedBeforeDefined->Set(sym->m_id);
  3011. }
  3012. }
  3013. }
  3014. }
  3015. }
  3016. else if ((instr->TransfersSrcValue() || OpCodeAttr::CanCSE(instr->m_opcode)) && (opnd == instr->GetSrc1() || opnd == instr->GetSrc2()))
  3017. {
  3018. if (sym->IsPropertySym())
  3019. {
  3020. val = this->CreateFieldSrcValue(sym->AsPropertySym(), originalPropertySym, &opnd, instr);
  3021. }
  3022. else
  3023. {
  3024. val = this->NewGenericValue(ValueType::Uninitialized, opnd);
  3025. }
  3026. }
  3027. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  3028. {
  3029. TryOptimizeInstrWithFixedDataProperty(&instr);
  3030. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  3031. }
  3032. if (val)
  3033. {
  3034. ValueType valueType(val->GetValueInfo()->Type());
  3035. // This block uses per-instruction profile information on array types to optimize using the best available profile
  3036. // information and to prevent infinite bailouts by ensuring array type information is updated on bailouts.
  3037. if (valueType.IsLikelyArray() && !valueType.IsDefinite() && !valueType.IsObject() && instr->IsProfiledInstr())
  3038. {
  3039. // See if we have profile data for the array type
  3040. IR::ProfiledInstr *const profiledInstr = instr->AsProfiledInstr();
  3041. ValueType profiledArrayType;
  3042. bool useAggressiveSpecialization = true;
  3043. switch(instr->m_opcode)
  3044. {
  3045. case Js::OpCode::LdElemI_A:
  3046. if(instr->GetSrc1()->IsIndirOpnd() && opnd == instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd())
  3047. {
  3048. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  3049. useAggressiveSpecialization = !profiledInstr->u.ldElemInfo->IsAggressiveSpecializationDisabled();
  3050. }
  3051. break;
  3052. case Js::OpCode::StElemI_A:
  3053. case Js::OpCode::StElemI_A_Strict:
  3054. case Js::OpCode::StElemC:
  3055. if(instr->GetDst()->IsIndirOpnd() && opnd == instr->GetDst()->AsIndirOpnd()->GetBaseOpnd())
  3056. {
  3057. profiledArrayType = profiledInstr->u.stElemInfo->GetArrayType();
  3058. useAggressiveSpecialization = !profiledInstr->u.stElemInfo->IsAggressiveSpecializationDisabled();
  3059. }
  3060. break;
  3061. case Js::OpCode::LdLen_A:
  3062. if(instr->GetSrc1()->IsRegOpnd() && opnd == instr->GetSrc1())
  3063. {
  3064. profiledArrayType = profiledInstr->u.LdLenInfo().GetArrayType();
  3065. useAggressiveSpecialization = !profiledInstr->u.LdLenInfo().IsAggressiveSpecializationDisabled();
  3066. }
  3067. break;
  3068. case Js::OpCode::IsIn:
  3069. if (instr->GetSrc2()->IsRegOpnd() && opnd == instr->GetSrc2())
  3070. {
  3071. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  3072. useAggressiveSpecialization = !profiledInstr->u.ldElemInfo->IsAggressiveSpecializationDisabled();
  3073. }
  3074. break;
  3075. }
  3076. if (profiledArrayType.IsLikelyObject())
  3077. {
  3078. // Ideally we want to use the most specialized type seen by this path, but when that causes bailouts use the least specialized type instead.
  3079. if (useAggressiveSpecialization &&
  3080. profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
  3081. !valueType.IsLikelyNativeIntArray() &&
  3082. (
  3083. profiledArrayType.HasIntElements() || (valueType.HasVarElements() && profiledArrayType.HasFloatElements())
  3084. ))
  3085. {
  3086. // use the more specialized type profiled by the instruction.
  3087. valueType = profiledArrayType.SetHasNoMissingValues(valueType.HasNoMissingValues());
  3088. ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false);
  3089. }
  3090. else if (!useAggressiveSpecialization &&
  3091. (profiledArrayType.GetObjectType() != valueType.GetObjectType() ||
  3092. (
  3093. valueType.IsLikelyNativeArray() &&
  3094. (
  3095. profiledArrayType.HasVarElements() || (valueType.HasIntElements() && profiledArrayType.HasFloatElements())
  3096. )
  3097. )
  3098. ))
  3099. {
  3100. // Merge array type we pulled from profile with type propagated by dataflow.
  3101. if (profiledArrayType.IsLikelyArray())
  3102. {
  3103. valueType = valueType.Merge(profiledArrayType).SetHasNoMissingValues(valueType.HasNoMissingValues());
  3104. }
  3105. else
  3106. {
  3107. valueType = valueType.Merge(profiledArrayType);
  3108. }
  3109. ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false, true);
  3110. }
  3111. }
  3112. }
  3113. opnd->SetValueType(valueType);
  3114. if(!IsLoopPrePass() && opnd->IsSymOpnd() && valueType.IsDefinite())
  3115. {
  3116. if (opnd->AsSymOpnd()->m_sym->IsPropertySym())
  3117. {
  3118. // A property sym can only be guaranteed to have a definite value type when implicit calls are disabled from the
  3119. // point where the sym was defined with the definite value type. Insert an instruction to indicate to the
  3120. // dead-store pass that implicit calls need to be kept disabled until after this instruction.
  3121. Assert(DoFieldCopyProp());
  3122. CaptureNoImplicitCallUses(opnd, false, instr);
  3123. }
  3124. }
  3125. }
  3126. else
  3127. {
  3128. opnd->SetValueType(ValueType::Uninitialized);
  3129. }
  3130. return val;
  3131. }
  3132. /*
  3133. * GlobOpt::TryOptimizeInstrWithFixedDataProperty
  3134. * Converts Ld[Root]Fld instr to
  3135. * * CheckFixedFld
  3136. * * Dst = Ld_A <int Constant value>
  3137. * This API assumes that the source operand is a Sym/PropertySym kind.
  3138. */
  3139. void
  3140. GlobOpt::TryOptimizeInstrWithFixedDataProperty(IR::Instr ** const pInstr)
  3141. {
  3142. Assert(pInstr);
  3143. IR::Instr * &instr = *pInstr;
  3144. IR::Opnd * src1 = instr->GetSrc1();
  3145. Assert(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->IsPropertySymOpnd());
  3146. if(PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func))
  3147. {
  3148. return;
  3149. }
  3150. if (!this->IsLoopPrePass() && !this->isRecursiveCallOnLandingPad &&
  3151. OpCodeAttr::CanLoadFixedFields(instr->m_opcode))
  3152. {
  3153. instr->TryOptimizeInstrWithFixedDataProperty(&instr, this);
  3154. }
  3155. }
  3156. // Constant prop if possible, otherwise if this value already resides in another
  3157. // symbol, reuse this previous symbol. This should help register allocation.
  3158. IR::Opnd *
  3159. GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd)
  3160. {
  3161. Assert(
  3162. parentIndirOpnd
  3163. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  3164. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  3165. if (this->IsLoopPrePass())
  3166. {
  3167. // Transformations are not legal in prepass...
  3168. return opnd;
  3169. }
  3170. if (!this->func->DoGlobOptsForGeneratorFunc())
  3171. {
  3172. // Don't copy prop in generator functions because non-bytecode temps that span a yield
  3173. // cannot be saved and restored by the current bail-out mechanics utilized by generator
  3174. // yield/resume.
  3175. // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
  3176. return opnd;
  3177. }
  3178. if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
  3179. {
  3180. // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
  3181. return opnd;
  3182. }
  3183. // Don't copy-prop link operands of ExtendedArgs
  3184. if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc2())
  3185. {
  3186. return opnd;
  3187. }
  3188. // Don't copy-prop operand of SIMD instr with ExtendedArg operands. Each instr should have its exclusive EA sequence.
  3189. if (
  3190. Js::IsSimd128Opcode(instr->m_opcode) &&
  3191. instr->GetSrc1() != nullptr &&
  3192. instr->GetSrc1()->IsRegOpnd() &&
  3193. instr->GetSrc2() == nullptr
  3194. )
  3195. {
  3196. StackSym *sym = instr->GetSrc1()->GetStackSym();
  3197. if (sym && sym->IsSingleDef() && sym->GetInstrDef()->m_opcode == Js::OpCode::ExtendArg_A)
  3198. {
  3199. return opnd;
  3200. }
  3201. }
  3202. ValueInfo *valueInfo = val->GetValueInfo();
  3203. if (this->func->HasFinally())
  3204. {
  3205. // s0 = undefined was added on functions with early exit in try-finally functions, that can get copy-proped and case incorrect results
  3206. if (instr->m_opcode == Js::OpCode::ArgOut_A_Inline && valueInfo->GetSymStore() &&
  3207. valueInfo->GetSymStore()->m_id == 0)
  3208. {
  3209. // We don't want to copy-prop s0 (return symbol) into inlinee code
  3210. return opnd;
  3211. }
  3212. }
  3213. // Constant prop?
  3214. int32 intConstantValue;
  3215. int64 int64ConstantValue;
  3216. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  3217. {
  3218. if (PHASE_OFF(Js::ConstPropPhase, this->func))
  3219. {
  3220. return opnd;
  3221. }
  3222. if ((
  3223. instr->m_opcode == Js::OpCode::StElemI_A ||
  3224. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  3225. instr->m_opcode == Js::OpCode::StElemC
  3226. ) && instr->GetSrc1() == opnd)
  3227. {
  3228. // Disabling prop to src of native array store, because we were losing the chance to type specialize.
  3229. // Is it possible to type specialize this src if we allow constants, etc., to be prop'd here?
  3230. if (instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray())
  3231. {
  3232. return opnd;
  3233. }
  3234. }
  3235. if(opnd != instr->GetSrc1() && opnd != instr->GetSrc2())
  3236. {
  3237. if(PHASE_OFF(Js::IndirCopyPropPhase, instr->m_func))
  3238. {
  3239. return opnd;
  3240. }
  3241. // Const-prop an indir opnd's constant index into its offset
  3242. IR::Opnd *srcs[] = { instr->GetSrc1(), instr->GetSrc2(), instr->GetDst() };
  3243. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]); ++i)
  3244. {
  3245. const auto src = srcs[i];
  3246. if(!src || !src->IsIndirOpnd())
  3247. {
  3248. continue;
  3249. }
  3250. const auto indir = src->AsIndirOpnd();
  3251. if ((int64)indir->GetOffset() + intConstantValue > INT32_MAX)
  3252. {
  3253. continue;
  3254. }
  3255. if(opnd == indir->GetIndexOpnd())
  3256. {
  3257. Assert(indir->GetScale() == 0);
  3258. GOPT_TRACE_OPND(opnd, _u("Constant prop indir index into offset (value: %d)\n"), intConstantValue);
  3259. this->CaptureByteCodeSymUses(instr);
  3260. indir->SetOffset(indir->GetOffset() + intConstantValue);
  3261. indir->SetIndexOpnd(nullptr);
  3262. }
  3263. }
  3264. return opnd;
  3265. }
  3266. if (Js::TaggedInt::IsOverflow(intConstantValue))
  3267. {
  3268. return opnd;
  3269. }
  3270. IR::Opnd *constOpnd;
  3271. if (opnd->IsVar())
  3272. {
  3273. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked((int)intConstantValue), IR::AddrOpndKindConstantVar, instr->m_func);
  3274. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), addrOpnd->m_address, intConstantValue);
  3275. constOpnd = addrOpnd;
  3276. }
  3277. else
  3278. {
  3279. // Note: Jit loop body generates some i32 operands...
  3280. Assert(opnd->IsInt32() || opnd->IsInt64() || opnd->IsUInt32());
  3281. IRType opndType;
  3282. IntConstType constVal;
  3283. if (opnd->IsUInt32())
  3284. {
  3285. // avoid sign extension
  3286. constVal = (uint32)intConstantValue;
  3287. opndType = TyUint32;
  3288. }
  3289. else
  3290. {
  3291. constVal = intConstantValue;
  3292. opndType = TyInt32;
  3293. }
  3294. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(constVal, opndType, instr->m_func);
  3295. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), intOpnd->GetImmediateValue(instr->m_func), intConstantValue);
  3296. constOpnd = intOpnd;
  3297. }
  3298. #if ENABLE_DEBUG_CONFIG_OPTIONS
  3299. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  3300. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  3301. {
  3302. instr->DumpFieldCopyPropTestTrace(this->isRecursiveCallOnLandingPad);
  3303. }
  3304. #endif
  3305. this->CaptureByteCodeSymUses(instr);
  3306. opnd = instr->ReplaceSrc(opnd, constOpnd);
  3307. switch (instr->m_opcode)
  3308. {
  3309. case Js::OpCode::LdSlot:
  3310. case Js::OpCode::LdSlotArr:
  3311. case Js::OpCode::LdFld:
  3312. case Js::OpCode::LdFldForTypeOf:
  3313. case Js::OpCode::LdRootFldForTypeOf:
  3314. case Js::OpCode::LdFldForCallApplyTarget:
  3315. case Js::OpCode::LdRootFld:
  3316. case Js::OpCode::LdMethodFld:
  3317. case Js::OpCode::LdRootMethodFld:
  3318. case Js::OpCode::LdMethodFromFlags:
  3319. case Js::OpCode::ScopedLdMethodFld:
  3320. case Js::OpCode::ScopedLdFld:
  3321. case Js::OpCode::ScopedLdFldForTypeOf:
  3322. instr->m_opcode = Js::OpCode::Ld_A;
  3323. case Js::OpCode::Ld_A:
  3324. {
  3325. IR::Opnd * dst = instr->GetDst();
  3326. if (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsSingleDef())
  3327. {
  3328. dst->AsRegOpnd()->m_sym->SetIsIntConst((int)intConstantValue);
  3329. }
  3330. break;
  3331. }
  3332. case Js::OpCode::ArgOut_A:
  3333. case Js::OpCode::ArgOut_A_Inline:
  3334. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  3335. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  3336. if (instr->GetDst()->IsRegOpnd())
  3337. {
  3338. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  3339. instr->GetDst()->AsRegOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  3340. }
  3341. else
  3342. {
  3343. instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  3344. }
  3345. break;
  3346. case Js::OpCode::TypeofElem:
  3347. instr->m_opcode = Js::OpCode::Typeof;
  3348. break;
  3349. case Js::OpCode::StSlotChkUndecl:
  3350. if (instr->GetSrc2() == opnd)
  3351. {
  3352. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  3353. // due to the implicit read for ChkUndecl.
  3354. instr->m_opcode = Js::OpCode::StSlot;
  3355. instr->FreeSrc2();
  3356. opnd = nullptr;
  3357. }
  3358. break;
  3359. }
  3360. return opnd;
  3361. }
  3362. else if (valueInfo->TryGetIntConstantValue(&int64ConstantValue, false))
  3363. {
  3364. if (PHASE_OFF(Js::ConstPropPhase, this->func) || !PHASE_ON(Js::Int64ConstPropPhase, this->func))
  3365. {
  3366. return opnd;
  3367. }
  3368. Assert(this->func->GetJITFunctionBody()->IsWasmFunction());
  3369. if (this->func->GetJITFunctionBody()->IsWasmFunction() && opnd->IsInt64())
  3370. {
  3371. IR::Int64ConstOpnd *intOpnd = IR::Int64ConstOpnd::New(int64ConstantValue, opnd->GetType(), instr->m_func);
  3372. GOPT_TRACE_OPND(opnd, _u("Constant prop %lld (value:%lld)\n"), intOpnd->GetImmediateValue(instr->m_func), int64ConstantValue);
  3373. this->CaptureByteCodeSymUses(instr);
  3374. opnd = instr->ReplaceSrc(opnd, intOpnd);
  3375. }
  3376. return opnd;
  3377. }
  3378. Sym *opndSym = nullptr;
  3379. if (opnd->IsRegOpnd())
  3380. {
  3381. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  3382. opndSym = regOpnd->m_sym;
  3383. }
  3384. else if (opnd->IsSymOpnd())
  3385. {
  3386. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  3387. opndSym = symOpnd->m_sym;
  3388. }
  3389. if (!opndSym)
  3390. {
  3391. return opnd;
  3392. }
  3393. if (PHASE_OFF(Js::CopyPropPhase, this->func))
  3394. {
  3395. this->SetSymStoreDirect(valueInfo, opndSym);
  3396. return opnd;
  3397. }
  3398. StackSym *copySym = CurrentBlockData()->GetCopyPropSym(opndSym, val);
  3399. if (copySym != nullptr)
  3400. {
  3401. Assert(!opndSym->IsStackSym() || copySym->GetSymSize() == opndSym->AsStackSym()->GetSymSize());
  3402. // Copy prop.
  3403. return CopyPropReplaceOpnd(instr, opnd, copySym, parentIndirOpnd);
  3404. }
  3405. else
  3406. {
  3407. if (valueInfo->GetSymStore() && instr->m_opcode == Js::OpCode::Ld_A && instr->GetDst()->IsRegOpnd()
  3408. && valueInfo->GetSymStore() == instr->GetDst()->AsRegOpnd()->m_sym)
  3409. {
  3410. // Avoid resetting symStore after fieldHoisting:
  3411. // t1 = LdFld field <- set symStore to fieldHoistSym
  3412. // fieldHoistSym = Ld_A t1 <- we're looking at t1 now, but want to copy-prop fieldHoistSym forward
  3413. return opnd;
  3414. }
  3415. this->SetSymStoreDirect(valueInfo, opndSym);
  3416. }
  3417. return opnd;
  3418. }
  3419. IR::Opnd *
  3420. GlobOpt::CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd)
  3421. {
  3422. Assert(
  3423. parentIndirOpnd
  3424. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  3425. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  3426. Assert(CurrentBlockData()->IsLive(copySym));
  3427. IR::RegOpnd *regOpnd;
  3428. StackSym *newSym = copySym;
  3429. GOPT_TRACE_OPND(opnd, _u("Copy prop s%d\n"), newSym->m_id);
  3430. #if ENABLE_DEBUG_CONFIG_OPTIONS
  3431. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  3432. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  3433. {
  3434. instr->DumpFieldCopyPropTestTrace(this->isRecursiveCallOnLandingPad);
  3435. }
  3436. #endif
  3437. this->CaptureByteCodeSymUses(instr);
  3438. if (opnd->IsRegOpnd())
  3439. {
  3440. regOpnd = opnd->AsRegOpnd();
  3441. regOpnd->m_sym = newSym;
  3442. regOpnd->SetIsJITOptimizedReg(true);
  3443. // The dead bit on the opnd is specific to the sym it is referencing. Since we replaced the sym, the bit is reset.
  3444. regOpnd->SetIsDead(false);
  3445. if(parentIndirOpnd)
  3446. {
  3447. return regOpnd;
  3448. }
  3449. }
  3450. else
  3451. {
  3452. // If this is an object type specialized field load inside a loop, and it produces a type value which wasn't live
  3453. // before, make sure the type check is left in the loop, because it may be the last type check in the loop protecting
  3454. // other fields which are not hoistable and are lexically upstream in the loop. If the check is not ultimately
  3455. // needed, the dead store pass will remove it.
  3456. if (this->currentBlock->loop != nullptr && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  3457. {
  3458. IR::PropertySymOpnd* propertySymOpnd = opnd->AsPropertySymOpnd();
  3459. if (CheckIfPropOpEmitsTypeCheck(instr, propertySymOpnd))
  3460. {
  3461. // We only set guarded properties in the dead store pass, so they shouldn't be set here yet. If they were
  3462. // we would need to move them from this operand to the operand which is being copy propagated.
  3463. Assert(propertySymOpnd->GetGuardedPropOps() == nullptr);
  3464. // We're creating a copy of this operand to be reused in the same spot in the flow, so we can copy all
  3465. // flow sensitive fields. However, we will do only a type check here (no property access) and only for
  3466. // the sake of downstream instructions, so the flags pertaining to this property access are irrelevant.
  3467. IR::PropertySymOpnd* checkObjTypeOpnd = CreateOpndForTypeCheckOnly(propertySymOpnd, instr->m_func);
  3468. IR::Instr* checkObjTypeInstr = IR::Instr::New(Js::OpCode::CheckObjType, instr->m_func);
  3469. checkObjTypeInstr->SetSrc1(checkObjTypeOpnd);
  3470. checkObjTypeInstr->SetByteCodeOffset(instr);
  3471. instr->InsertBefore(checkObjTypeInstr);
  3472. // Since we inserted this instruction before the one that is being processed in natural flow, we must process
  3473. // it for object type spec explicitly here.
  3474. FinishOptPropOp(checkObjTypeInstr, checkObjTypeOpnd);
  3475. Assert(!propertySymOpnd->IsTypeChecked());
  3476. checkObjTypeInstr = this->SetTypeCheckBailOut(checkObjTypeOpnd, checkObjTypeInstr, nullptr);
  3477. Assert(checkObjTypeInstr->HasBailOutInfo());
  3478. if (this->currentBlock->loop && !this->IsLoopPrePass())
  3479. {
  3480. // Try hoisting this checkObjType.
  3481. // But since this isn't the current instr being optimized, we need to play tricks with
  3482. // the byteCodeUse fields...
  3483. TrackByteCodeUsesForInstrAddedInOptInstr(checkObjTypeInstr, [&]()
  3484. {
  3485. TryHoistInvariant(checkObjTypeInstr, this->currentBlock, NULL, CurrentBlockData()->FindValue(copySym), NULL, true);
  3486. });
  3487. }
  3488. }
  3489. }
  3490. if (opnd->IsSymOpnd() && opnd->GetIsDead())
  3491. {
  3492. // Take the property sym out of the live fields set
  3493. this->EndFieldLifetime(opnd->AsSymOpnd());
  3494. }
  3495. regOpnd = IR::RegOpnd::New(newSym, opnd->GetType(), instr->m_func);
  3496. regOpnd->SetIsJITOptimizedReg(true);
  3497. instr->ReplaceSrc(opnd, regOpnd);
  3498. }
  3499. switch (instr->m_opcode)
  3500. {
  3501. case Js::OpCode::Ld_A:
  3502. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  3503. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  3504. {
  3505. this->InsertByteCodeUses(instr, true);
  3506. instr->m_opcode = Js::OpCode::Nop;
  3507. }
  3508. break;
  3509. case Js::OpCode::LdSlot:
  3510. case Js::OpCode::LdSlotArr:
  3511. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  3512. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  3513. {
  3514. this->InsertByteCodeUses(instr, true);
  3515. instr->m_opcode = Js::OpCode::Nop;
  3516. }
  3517. else
  3518. {
  3519. instr->m_opcode = Js::OpCode::Ld_A;
  3520. }
  3521. break;
  3522. case Js::OpCode::StSlotChkUndecl:
  3523. if (instr->GetSrc2()->IsRegOpnd())
  3524. {
  3525. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  3526. // due to the implicit read for ChkUndecl.
  3527. instr->m_opcode = Js::OpCode::StSlot;
  3528. instr->FreeSrc2();
  3529. return nullptr;
  3530. }
  3531. break;
  3532. case Js::OpCode::LdFld:
  3533. case Js::OpCode::LdFldForTypeOf:
  3534. case Js::OpCode::LdRootFldForTypeOf:
  3535. case Js::OpCode::LdFldForCallApplyTarget:
  3536. case Js::OpCode::LdRootFld:
  3537. case Js::OpCode::LdMethodFld:
  3538. case Js::OpCode::LdRootMethodFld:
  3539. case Js::OpCode::ScopedLdMethodFld:
  3540. case Js::OpCode::ScopedLdFld:
  3541. case Js::OpCode::ScopedLdFldForTypeOf:
  3542. instr->m_opcode = Js::OpCode::Ld_A;
  3543. break;
  3544. case Js::OpCode::LdMethodFromFlags:
  3545. // The bailout is checked on the loop top and we don't need to check bailout again in loop.
  3546. instr->m_opcode = Js::OpCode::Ld_A;
  3547. instr->ClearBailOutInfo();
  3548. break;
  3549. case Js::OpCode::TypeofElem:
  3550. instr->m_opcode = Js::OpCode::Typeof;
  3551. break;
  3552. }
  3553. CurrentBlockData()->MarkTempLastUse(instr, regOpnd);
  3554. return regOpnd;
  3555. }
  3556. ValueNumber
  3557. GlobOpt::NewValueNumber()
  3558. {
  3559. ValueNumber valueNumber = this->currentValue++;
  3560. if (valueNumber == 0)
  3561. {
  3562. Js::Throw::OutOfMemory();
  3563. }
  3564. return valueNumber;
  3565. }
  3566. Value *GlobOpt::NewValue(ValueInfo *const valueInfo)
  3567. {
  3568. return NewValue(NewValueNumber(), valueInfo);
  3569. }
  3570. Value *GlobOpt::NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo)
  3571. {
  3572. Assert(valueInfo);
  3573. return Value::New(alloc, valueNumber, valueInfo);
  3574. }
  3575. Value *GlobOpt::CopyValue(Value const *const value)
  3576. {
  3577. return CopyValue(value, NewValueNumber());
  3578. }
  3579. Value *GlobOpt::CopyValue(Value const *const value, const ValueNumber valueNumber)
  3580. {
  3581. Assert(value);
  3582. return value->Copy(alloc, valueNumber);
  3583. }
  3584. Value *
  3585. GlobOpt::NewGenericValue(const ValueType valueType)
  3586. {
  3587. return NewGenericValue(valueType, static_cast<IR::Opnd *>(nullptr));
  3588. }
  3589. Value *
  3590. GlobOpt::NewGenericValue(const ValueType valueType, IR::Opnd *const opnd)
  3591. {
  3592. // Shouldn't assign a likely-int value to something that is definitely not an int
  3593. Assert(!(valueType.IsLikelyInt() && opnd && opnd->IsNotInt()));
  3594. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  3595. Value *val = NewValue(valueInfo);
  3596. TrackNewValueForKills(val);
  3597. CurrentBlockData()->InsertNewValue(val, opnd);
  3598. return val;
  3599. }
  3600. Value *
  3601. GlobOpt::NewGenericValue(const ValueType valueType, Sym *const sym)
  3602. {
  3603. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  3604. Value *val = NewValue(valueInfo);
  3605. TrackNewValueForKills(val);
  3606. CurrentBlockData()->SetValue(val, sym);
  3607. return val;
  3608. }
  3609. Value *
  3610. GlobOpt::GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  3611. {
  3612. Value *value = nullptr;
  3613. Value *const cachedValue = this->intConstantToValueMap->Lookup(intConst, nullptr);
  3614. if(cachedValue)
  3615. {
  3616. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3617. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3618. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3619. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3620. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3621. // Otherwise, we have to create a new value with a new value number.
  3622. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  3623. if (symStore && CurrentBlockData()->IsLive(symStore))
  3624. {
  3625. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3626. int32 symStoreIntConstantValue;
  3627. if (symStoreValue &&
  3628. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  3629. symStoreValue->GetValueInfo()->TryGetIntConstantValue(&symStoreIntConstantValue) &&
  3630. symStoreIntConstantValue == intConst)
  3631. {
  3632. value = symStoreValue;
  3633. }
  3634. }
  3635. }
  3636. if (!value)
  3637. {
  3638. value = NewIntConstantValue(intConst, instr, !Js::TaggedInt::IsOverflow(intConst));
  3639. }
  3640. return CurrentBlockData()->InsertNewValue(value, opnd);
  3641. }
  3642. Value *
  3643. GlobOpt::GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  3644. {
  3645. Assert(instr->m_func->GetJITFunctionBody()->IsWasmFunction());
  3646. Value *value = nullptr;
  3647. Value *const cachedValue = this->int64ConstantToValueMap->Lookup(intConst, nullptr);
  3648. if (cachedValue)
  3649. {
  3650. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3651. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3652. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3653. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3654. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3655. // Otherwise, we have to create a new value with a new value number.
  3656. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  3657. if (symStore && this->currentBlock->globOptData.IsLive(symStore))
  3658. {
  3659. Value *const symStoreValue = this->currentBlock->globOptData.FindValue(symStore);
  3660. int64 symStoreIntConstantValue;
  3661. if (symStoreValue &&
  3662. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  3663. symStoreValue->GetValueInfo()->TryGetInt64ConstantValue(&symStoreIntConstantValue, false) &&
  3664. symStoreIntConstantValue == intConst)
  3665. {
  3666. value = symStoreValue;
  3667. }
  3668. }
  3669. }
  3670. if (!value)
  3671. {
  3672. value = NewInt64ConstantValue(intConst, instr);
  3673. }
  3674. return this->currentBlock->globOptData.InsertNewValue(value, opnd);
  3675. }
  3676. Value *
  3677. GlobOpt::NewInt64ConstantValue(const int64 intConst, IR::Instr* instr)
  3678. {
  3679. Value * value = NewValue(Int64ConstantValueInfo::New(this->alloc, intConst));
  3680. this->int64ConstantToValueMap->Item(intConst, value);
  3681. if (!value->GetValueInfo()->GetSymStore() &&
  3682. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  3683. {
  3684. StackSym * sym = instr->GetDst()->GetStackSym();
  3685. Assert(sym && !sym->IsTypeSpec());
  3686. this->currentBlock->globOptData.SetValue(value, sym);
  3687. this->currentBlock->globOptData.liveVarSyms->Set(sym->m_id);
  3688. }
  3689. return value;
  3690. }
  3691. Value *
  3692. GlobOpt::NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable)
  3693. {
  3694. Value * value = NewValue(IntConstantValueInfo::New(this->alloc, intConst));
  3695. this->intConstantToValueMap->Item(intConst, value);
  3696. if (isTaggable &&
  3697. !PHASE_OFF(Js::HoistConstIntPhase, this->func))
  3698. {
  3699. // When creating a new int constant value, make sure it gets a symstore. If the int const doesn't have a symstore,
  3700. // any downstream instruction using the same int will have to create a new value (object) for the int.
  3701. // This gets in the way of CSE.
  3702. value = HoistConstantLoadAndPropagateValueBackward(Js::TaggedInt::ToVarUnchecked(intConst), instr, value);
  3703. if (!value->GetValueInfo()->GetSymStore() &&
  3704. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  3705. {
  3706. StackSym * sym = instr->GetDst()->GetStackSym();
  3707. Assert(sym);
  3708. if (sym->IsTypeSpec())
  3709. {
  3710. Assert(sym->IsInt32());
  3711. StackSym * varSym = sym->GetVarEquivSym(instr->m_func);
  3712. CurrentBlockData()->SetValue(value, varSym);
  3713. CurrentBlockData()->liveInt32Syms->Set(varSym->m_id);
  3714. }
  3715. else
  3716. {
  3717. CurrentBlockData()->SetValue(value, sym);
  3718. CurrentBlockData()->liveVarSyms->Set(sym->m_id);
  3719. }
  3720. }
  3721. }
  3722. return value;
  3723. }
  3724. ValueInfo *
  3725. GlobOpt::NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout)
  3726. {
  3727. return ValueInfo::NewIntRangeValueInfo(this->alloc, min, max, wasNegativeZeroPreventedByBailout);
  3728. }
  3729. ValueInfo *GlobOpt::NewIntRangeValueInfo(
  3730. const ValueInfo *const originalValueInfo,
  3731. const int32 min,
  3732. const int32 max) const
  3733. {
  3734. Assert(originalValueInfo);
  3735. ValueInfo *valueInfo;
  3736. if(min == max)
  3737. {
  3738. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  3739. // 'wasNegativeZeroPreventedByBailout'
  3740. valueInfo = IntConstantValueInfo::New(alloc, min);
  3741. }
  3742. else
  3743. {
  3744. valueInfo =
  3745. IntRangeValueInfo::New(
  3746. alloc,
  3747. min,
  3748. max,
  3749. min <= 0 && max >= 0 && originalValueInfo->WasNegativeZeroPreventedByBailout());
  3750. }
  3751. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  3752. return valueInfo;
  3753. }
  3754. Value *
  3755. GlobOpt::NewIntRangeValue(
  3756. const int32 min,
  3757. const int32 max,
  3758. const bool wasNegativeZeroPreventedByBailout,
  3759. IR::Opnd *const opnd)
  3760. {
  3761. ValueInfo *valueInfo = this->NewIntRangeValueInfo(min, max, wasNegativeZeroPreventedByBailout);
  3762. Value *val = NewValue(valueInfo);
  3763. if (opnd)
  3764. {
  3765. GOPT_TRACE_OPND(opnd, _u("Range %d (0x%X) to %d (0x%X)\n"), min, min, max, max);
  3766. }
  3767. CurrentBlockData()->InsertNewValue(val, opnd);
  3768. return val;
  3769. }
  3770. IntBoundedValueInfo *GlobOpt::NewIntBoundedValueInfo(
  3771. const ValueInfo *const originalValueInfo,
  3772. const IntBounds *const bounds) const
  3773. {
  3774. Assert(originalValueInfo);
  3775. bounds->Verify();
  3776. IntBoundedValueInfo *const valueInfo =
  3777. IntBoundedValueInfo::New(
  3778. originalValueInfo->Type(),
  3779. bounds,
  3780. (
  3781. bounds->ConstantLowerBound() <= 0 &&
  3782. bounds->ConstantUpperBound() >= 0 &&
  3783. originalValueInfo->WasNegativeZeroPreventedByBailout()
  3784. ),
  3785. alloc);
  3786. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  3787. return valueInfo;
  3788. }
  3789. Value *GlobOpt::NewIntBoundedValue(
  3790. const ValueType valueType,
  3791. const IntBounds *const bounds,
  3792. const bool wasNegativeZeroPreventedByBailout,
  3793. IR::Opnd *const opnd)
  3794. {
  3795. Value *const value = NewValue(IntBoundedValueInfo::New(valueType, bounds, wasNegativeZeroPreventedByBailout, alloc));
  3796. CurrentBlockData()->InsertNewValue(value, opnd);
  3797. return value;
  3798. }
  3799. Value *
  3800. GlobOpt::NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd)
  3801. {
  3802. FloatConstantValueInfo *valueInfo = FloatConstantValueInfo::New(this->alloc, floatValue);
  3803. Value *val = NewValue(valueInfo);
  3804. CurrentBlockData()->InsertNewValue(val, opnd);
  3805. return val;
  3806. }
  3807. Value *
  3808. GlobOpt::GetVarConstantValue(IR::AddrOpnd *addrOpnd)
  3809. {
  3810. bool isVar = addrOpnd->IsVar();
  3811. bool isString = isVar && addrOpnd->m_localAddress && JITJavascriptString::Is(addrOpnd->m_localAddress);
  3812. Value *val = nullptr;
  3813. Value *cachedValue = nullptr;
  3814. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  3815. {
  3816. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3817. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3818. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3819. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3820. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3821. // Otherwise, we have to create a new value with a new value number.
  3822. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  3823. if(symStore && CurrentBlockData()->IsLive(symStore))
  3824. {
  3825. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3826. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  3827. {
  3828. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  3829. if(symStoreValueInfo->IsVarConstant() && symStoreValueInfo->AsVarConstant()->VarValue() == addrOpnd->m_address)
  3830. {
  3831. val = symStoreValue;
  3832. }
  3833. }
  3834. }
  3835. }
  3836. else if (isString)
  3837. {
  3838. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  3839. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  3840. if (this->stringConstantToValueMap->TryGetValue(internalString, &cachedValue))
  3841. {
  3842. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  3843. if (symStore && CurrentBlockData()->IsLive(symStore))
  3844. {
  3845. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3846. if (symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  3847. {
  3848. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  3849. if (symStoreValueInfo->IsVarConstant())
  3850. {
  3851. JITJavascriptString * cachedString = JITJavascriptString::FromVar(symStoreValue->GetValueInfo()->AsVarConstant()->VarValue(true));
  3852. Js::InternalString cachedInternalString(cachedString->GetString(), cachedString->GetLength());
  3853. if (Js::InternalStringComparer::Equals(internalString, cachedInternalString))
  3854. {
  3855. val = symStoreValue;
  3856. }
  3857. }
  3858. }
  3859. }
  3860. }
  3861. }
  3862. if(!val)
  3863. {
  3864. val = NewVarConstantValue(addrOpnd, isString);
  3865. }
  3866. addrOpnd->SetValueType(val->GetValueInfo()->Type());
  3867. return val;
  3868. }
  3869. Value *
  3870. GlobOpt::NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString)
  3871. {
  3872. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, addrOpnd->m_address, addrOpnd->GetValueType(), false, addrOpnd->m_localAddress);
  3873. Value * value = NewValue(valueInfo);
  3874. this->addrConstantToValueMap->Item(addrOpnd->m_address, value);
  3875. if (isString)
  3876. {
  3877. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  3878. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  3879. this->stringConstantToValueMap->Item(internalString, value);
  3880. }
  3881. return value;
  3882. }
  3883. Value *
  3884. GlobOpt::HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value)
  3885. {
  3886. if (this->IsLoopPrePass() ||
  3887. ((this->currentBlock == this->func->m_fg->blockList) &&
  3888. origInstr->TransfersSrcValue()))
  3889. {
  3890. return value;
  3891. }
  3892. // Only hoisting taggable int const loads for now. Could be extended to other constants (floats, strings, addr opnds) if we see some benefit.
  3893. Assert(Js::TaggedInt::Is(varConst));
  3894. // Insert a load of the constant at the top of the function
  3895. StackSym * dstSym = StackSym::New(this->func);
  3896. IR::RegOpnd * constRegOpnd = IR::RegOpnd::New(dstSym, TyVar, this->func);
  3897. IR::Instr * loadInstr = IR::Instr::NewConstantLoad(constRegOpnd, (intptr_t)varConst, ValueType::GetInt(true), this->func);
  3898. this->func->m_fg->blockList->GetFirstInstr()->InsertAfter(loadInstr);
  3899. // Type-spec the load (Support for floats needs to be added when we start hoisting float constants).
  3900. bool typeSpecedToInt = false;
  3901. if (Js::TaggedInt::Is(varConst) && !IsTypeSpecPhaseOff(this->func))
  3902. {
  3903. typeSpecedToInt = true;
  3904. loadInstr->m_opcode = Js::OpCode::Ld_I4;
  3905. ToInt32Dst(loadInstr, loadInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  3906. loadInstr->GetDst()->GetStackSym()->SetIsConst();
  3907. }
  3908. else
  3909. {
  3910. CurrentBlockData()->liveVarSyms->Set(dstSym->m_id);
  3911. }
  3912. // Add the value (object) to the current block's symToValueMap and propagate the value backward to all relevant blocks so it is available on merges.
  3913. value = CurrentBlockData()->InsertNewValue(value, constRegOpnd);
  3914. BVSparse<JitArenaAllocator>* GlobOptBlockData::*bv;
  3915. bv = typeSpecedToInt ? &GlobOptBlockData::liveInt32Syms : &GlobOptBlockData::liveVarSyms; // Will need to be expanded when we start hoisting float constants.
  3916. if (this->currentBlock != this->func->m_fg->blockList)
  3917. {
  3918. for (InvariantBlockBackwardIterator it(this, this->currentBlock, this->func->m_fg->blockList, nullptr);
  3919. it.IsValid();
  3920. it.MoveNext())
  3921. {
  3922. BasicBlock * block = it.Block();
  3923. (block->globOptData.*bv)->Set(dstSym->m_id);
  3924. if (!block->globOptData.FindValue(dstSym))
  3925. {
  3926. Value *const valueCopy = CopyValue(value, value->GetValueNumber());
  3927. block->globOptData.SetValue(valueCopy, dstSym);
  3928. }
  3929. }
  3930. }
  3931. return value;
  3932. }
  3933. Value *
  3934. GlobOpt::NewFixedFunctionValue(Js::JavascriptFunction *function, IR::AddrOpnd *addrOpnd)
  3935. {
  3936. Assert(function != nullptr);
  3937. Value *val = nullptr;
  3938. Value *cachedValue = nullptr;
  3939. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  3940. {
  3941. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3942. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3943. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3944. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3945. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3946. // Otherwise, we have to create a new value with a new value number.
  3947. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  3948. if(symStore && CurrentBlockData()->IsLive(symStore))
  3949. {
  3950. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3951. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  3952. {
  3953. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  3954. if(symStoreValueInfo->IsVarConstant())
  3955. {
  3956. VarConstantValueInfo *const symStoreVarConstantValueInfo = symStoreValueInfo->AsVarConstant();
  3957. if(symStoreVarConstantValueInfo->VarValue() == addrOpnd->m_address &&
  3958. symStoreVarConstantValueInfo->IsFunction())
  3959. {
  3960. val = symStoreValue;
  3961. }
  3962. }
  3963. }
  3964. }
  3965. }
  3966. if(!val)
  3967. {
  3968. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, function, addrOpnd->GetValueType(), true, addrOpnd->m_localAddress);
  3969. val = NewValue(valueInfo);
  3970. this->addrConstantToValueMap->AddNew(addrOpnd->m_address, val);
  3971. }
  3972. CurrentBlockData()->InsertNewValue(val, addrOpnd);
  3973. return val;
  3974. }
  3975. StackSym *GlobOpt::GetTaggedIntConstantStackSym(const int32 intConstantValue) const
  3976. {
  3977. Assert(!Js::TaggedInt::IsOverflow(intConstantValue));
  3978. return intConstantToStackSymMap->Lookup(intConstantValue, nullptr);
  3979. }
  3980. StackSym *GlobOpt::GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const
  3981. {
  3982. StackSym *stackSym = GetTaggedIntConstantStackSym(intConstantValue);
  3983. if(stackSym)
  3984. {
  3985. return stackSym;
  3986. }
  3987. stackSym = StackSym::New(TyVar,func);
  3988. intConstantToStackSymMap->Add(intConstantValue, stackSym);
  3989. return stackSym;
  3990. }
  3991. Sym *
  3992. GlobOpt::SetSymStore(ValueInfo *valueInfo, Sym *sym)
  3993. {
  3994. if (sym->IsStackSym())
  3995. {
  3996. StackSym *stackSym = sym->AsStackSym();
  3997. if (stackSym->IsTypeSpec())
  3998. {
  3999. stackSym = stackSym->GetVarEquivSym(this->func);
  4000. sym = stackSym;
  4001. }
  4002. }
  4003. if (valueInfo->GetSymStore() == nullptr || valueInfo->GetSymStore()->IsPropertySym())
  4004. {
  4005. SetSymStoreDirect(valueInfo, sym);
  4006. }
  4007. return sym;
  4008. }
  4009. void
  4010. GlobOpt::SetSymStoreDirect(ValueInfo * valueInfo, Sym * sym)
  4011. {
  4012. Sym * prevSymStore = valueInfo->GetSymStore();
  4013. CurrentBlockData()->SetChangedSym(prevSymStore);
  4014. valueInfo->SetSymStore(sym);
  4015. }
  4016. // Figure out the Value of this dst.
  4017. Value *
  4018. GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
  4019. {
  4020. IR::Instr *&instr = *pInstr;
  4021. IR::Opnd *dst = instr->GetDst();
  4022. Value *dstVal = nullptr;
  4023. Sym *sym;
  4024. if (instr->CallsSetter())
  4025. {
  4026. return nullptr;
  4027. }
  4028. if (dst == nullptr)
  4029. {
  4030. return nullptr;
  4031. }
  4032. switch (dst->GetKind())
  4033. {
  4034. case IR::OpndKindSym:
  4035. sym = dst->AsSymOpnd()->m_sym;
  4036. break;
  4037. case IR::OpndKindReg:
  4038. sym = dst->AsRegOpnd()->m_sym;
  4039. if (OpCodeAttr::TempNumberProducing(instr->m_opcode))
  4040. {
  4041. CurrentBlockData()->isTempSrc->Set(sym->m_id);
  4042. }
  4043. else if (OpCodeAttr::TempNumberTransfer(instr->m_opcode))
  4044. {
  4045. IR::Opnd *src1 = instr->GetSrc1();
  4046. if (src1->IsRegOpnd() && CurrentBlockData()->isTempSrc->Test(src1->AsRegOpnd()->m_sym->m_id))
  4047. {
  4048. StackSym *src1Sym = src1->AsRegOpnd()->m_sym;
  4049. // isTempSrc is used for marking isTempLastUse, which is used to generate AddLeftDead()
  4050. // calls instead of the normal Add helpers. It tells the runtime that concats can use string
  4051. // builders.
  4052. // We need to be careful in the case where src1 points to a string builder and is getting aliased.
  4053. // Clear the bit on src and dst of the transfer instr in this case, unless we can prove src1
  4054. // isn't pointing at a string builder, like if it is single def and the def instr is not an Add,
  4055. // but TempProducing.
  4056. if (src1Sym->IsSingleDef() && src1Sym->m_instrDef->m_opcode != Js::OpCode::Add_A
  4057. && OpCodeAttr::TempNumberProducing(src1Sym->m_instrDef->m_opcode))
  4058. {
  4059. CurrentBlockData()->isTempSrc->Set(sym->m_id);
  4060. }
  4061. else
  4062. {
  4063. CurrentBlockData()->isTempSrc->Clear(src1->AsRegOpnd()->m_sym->m_id);
  4064. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4065. }
  4066. }
  4067. else
  4068. {
  4069. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4070. }
  4071. }
  4072. else
  4073. {
  4074. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4075. }
  4076. break;
  4077. case IR::OpndKindIndir:
  4078. return nullptr;
  4079. default:
  4080. return nullptr;
  4081. }
  4082. int32 min1, max1, min2, max2, newMin, newMax;
  4083. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  4084. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  4085. switch (instr->m_opcode)
  4086. {
  4087. case Js::OpCode::Conv_PrimStr:
  4088. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  4089. "Creator of this instruction should have set the type");
  4090. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsPrimitive())
  4091. {
  4092. break;
  4093. }
  4094. instr->m_opcode = Js::OpCode::Conv_Str;
  4095. // fall-through
  4096. case Js::OpCode::Conv_Str:
  4097. // This opcode is commented out since we don't track regex information in GlobOpt now.
  4098. //case Js::OpCode::Coerce_Regex:
  4099. case Js::OpCode::Coerce_Str:
  4100. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  4101. "Creator of this instruction should have set the type");
  4102. // fall-through
  4103. case Js::OpCode::Coerce_StrOrRegex:
  4104. // We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
  4105. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString())
  4106. {
  4107. break;
  4108. }
  4109. instr->m_opcode = Js::OpCode::Ld_A;
  4110. // fall-through
  4111. case Js::OpCode::BytecodeArgOutCapture:
  4112. case Js::OpCode::InitConst:
  4113. case Js::OpCode::LdAsmJsFunc:
  4114. case Js::OpCode::Ld_A:
  4115. case Js::OpCode::Ld_I4:
  4116. // Propagate sym attributes across the reg copy.
  4117. if (!this->IsLoopPrePass() && instr->GetSrc1()->IsRegOpnd())
  4118. {
  4119. if (dst->AsRegOpnd()->m_sym->IsSingleDef())
  4120. {
  4121. dst->AsRegOpnd()->m_sym->CopySymAttrs(instr->GetSrc1()->AsRegOpnd()->m_sym);
  4122. }
  4123. }
  4124. if (instr->IsProfiledInstr())
  4125. {
  4126. const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  4127. if(!(
  4128. profiledValueType.IsLikelyInt() &&
  4129. (
  4130. (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber) ||
  4131. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
  4132. )
  4133. ))
  4134. {
  4135. if(!src1ValueInfo)
  4136. {
  4137. dstVal = this->NewGenericValue(profiledValueType, dst);
  4138. }
  4139. else if(src1ValueInfo->IsUninitialized())
  4140. {
  4141. if(IsLoopPrePass())
  4142. {
  4143. dstVal = this->NewGenericValue(profiledValueType, dst);
  4144. }
  4145. else
  4146. {
  4147. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4148. // can improve the original value type.
  4149. src1ValueInfo->Type() = profiledValueType;
  4150. instr->GetSrc1()->SetValueType(profiledValueType);
  4151. }
  4152. }
  4153. }
  4154. }
  4155. if (dstVal == nullptr)
  4156. {
  4157. // Ld_A is just transferring the value
  4158. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4159. }
  4160. break;
  4161. case Js::OpCode::ExtendArg_A:
  4162. {
  4163. // SIMD_JS
  4164. // We avoid transforming EAs to Lds to keep the IR shape consistent and avoid CSEing of EAs.
  4165. // CSEOptimize only assigns a Value to the EA dst, and doesn't turn it to a Ld. If this happened, we shouldn't assign a new Value here.
  4166. if (DoCSE())
  4167. {
  4168. IR::Opnd * currDst = instr->GetDst();
  4169. Value * currDstVal = CurrentBlockData()->FindValue(currDst->GetStackSym());
  4170. if (currDstVal != nullptr)
  4171. {
  4172. return currDstVal;
  4173. }
  4174. }
  4175. break;
  4176. }
  4177. case Js::OpCode::CheckFixedFld:
  4178. AssertMsg(false, "CheckFixedFld doesn't have a dst, so we should never get here");
  4179. break;
  4180. case Js::OpCode::LdSlot:
  4181. case Js::OpCode::LdSlotArr:
  4182. case Js::OpCode::LdFld:
  4183. case Js::OpCode::LdFldForTypeOf:
  4184. case Js::OpCode::LdFldForCallApplyTarget:
  4185. // Do not transfer value type on LdRootFldForTypeOf to prevent copy-prop to LdRootFld in case the field doesn't exist since LdRootFldForTypeOf does not throw.
  4186. // Same goes for ScopedLdFldForTypeOf as we'll end up loading the property from the root object if the property is not in the scope chain.
  4187. //case Js::OpCode::LdRootFldForTypeOf:
  4188. //case Js::OpCode::ScopedLdFldForTypeOf:
  4189. case Js::OpCode::LdRootFld:
  4190. case Js::OpCode::LdMethodFld:
  4191. case Js::OpCode::LdRootMethodFld:
  4192. case Js::OpCode::ScopedLdMethodFld:
  4193. case Js::OpCode::LdMethodFromFlags:
  4194. case Js::OpCode::ScopedLdFld:
  4195. if (instr->IsProfiledInstr())
  4196. {
  4197. ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  4198. if(!(profiledValueType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber))
  4199. {
  4200. if(!src1ValueInfo)
  4201. {
  4202. dstVal = this->NewGenericValue(profiledValueType, dst);
  4203. }
  4204. else if(src1ValueInfo->IsUninitialized())
  4205. {
  4206. if(IsLoopPrePass() && (!dst->IsRegOpnd() || !dst->AsRegOpnd()->m_sym->IsSingleDef()))
  4207. {
  4208. dstVal = this->NewGenericValue(profiledValueType, dst);
  4209. }
  4210. else
  4211. {
  4212. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4213. // can improve the original value type.
  4214. src1ValueInfo->Type() = profiledValueType;
  4215. instr->GetSrc1()->SetValueType(profiledValueType);
  4216. }
  4217. }
  4218. }
  4219. }
  4220. if (dstVal == nullptr)
  4221. {
  4222. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4223. }
  4224. if(!this->IsLoopPrePass())
  4225. {
  4226. // We cannot transfer value if the field hasn't been copy prop'd because we don't generate
  4227. // an implicit call bailout between those values if we don't have "live fields" unless, we are hoisting the field.
  4228. ValueInfo *dstValueInfo = (dstVal ? dstVal->GetValueInfo() : nullptr);
  4229. // Update symStore if it isn't a stackSym
  4230. if (dstVal && (!dstValueInfo->GetSymStore() || !dstValueInfo->GetSymStore()->IsStackSym()))
  4231. {
  4232. Assert(dst->IsRegOpnd());
  4233. this->SetSymStoreDirect(dstValueInfo, dst->AsRegOpnd()->m_sym);
  4234. }
  4235. if (src1Val != dstVal)
  4236. {
  4237. CurrentBlockData()->SetValue(dstVal, instr->GetSrc1());
  4238. }
  4239. }
  4240. break;
  4241. case Js::OpCode::LdC_A_R8:
  4242. case Js::OpCode::LdC_A_I4:
  4243. case Js::OpCode::ArgIn_A:
  4244. dstVal = src1Val;
  4245. break;
  4246. case Js::OpCode::LdStr:
  4247. if (src1Val == nullptr)
  4248. {
  4249. src1Val = NewGenericValue(ValueType::String, dst);
  4250. }
  4251. dstVal = src1Val;
  4252. break;
  4253. // LdElemUndef only assign undef if the field doesn't exist.
  4254. // So we don't actually know what the value is, so we can't really copy prop it.
  4255. //case Js::OpCode::LdElemUndef:
  4256. case Js::OpCode::StSlot:
  4257. case Js::OpCode::StSlotChkUndecl:
  4258. case Js::OpCode::StFld:
  4259. case Js::OpCode::StRootFld:
  4260. case Js::OpCode::StFldStrict:
  4261. case Js::OpCode::StRootFldStrict:
  4262. case Js::OpCode::InitFld:
  4263. case Js::OpCode::InitComputedProperty:
  4264. if (DoFieldCopyProp())
  4265. {
  4266. if (src1Val == nullptr)
  4267. {
  4268. // src1 may have no value if it's not a valid var, e.g., NULL for let/const initialization.
  4269. // Consider creating generic values for such things.
  4270. return nullptr;
  4271. }
  4272. AssertMsg(!src2Val, "Bad src Values...");
  4273. Assert(sym->IsPropertySym());
  4274. SymID symId = sym->m_id;
  4275. Assert(instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl || !CurrentBlockData()->liveFields->Test(symId));
  4276. CurrentBlockData()->liveFields->Set(symId);
  4277. if (!this->IsLoopPrePass() && dst->GetIsDead())
  4278. {
  4279. // Take the property sym out of the live fields set (with special handling for loops).
  4280. this->EndFieldLifetime(dst->AsSymOpnd());
  4281. }
  4282. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4283. }
  4284. else
  4285. {
  4286. return nullptr;
  4287. }
  4288. break;
  4289. case Js::OpCode::Conv_Num:
  4290. if(src1ValueInfo->IsNumber())
  4291. {
  4292. dstVal = ValueNumberTransferDst(instr, src1Val);
  4293. }
  4294. else
  4295. {
  4296. return NewGenericValue(src1ValueInfo->Type().ToDefiniteAnyNumber(), dst);
  4297. }
  4298. break;
  4299. case Js::OpCode::Not_A:
  4300. {
  4301. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  4302. {
  4303. min1 = INT32_MIN;
  4304. max1 = INT32_MAX;
  4305. }
  4306. this->PropagateIntRangeForNot(min1, max1, &newMin, &newMax);
  4307. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  4308. }
  4309. case Js::OpCode::Xor_A:
  4310. case Js::OpCode::Or_A:
  4311. case Js::OpCode::And_A:
  4312. case Js::OpCode::Shl_A:
  4313. case Js::OpCode::Shr_A:
  4314. case Js::OpCode::ShrU_A:
  4315. {
  4316. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  4317. {
  4318. min1 = INT32_MIN;
  4319. max1 = INT32_MAX;
  4320. }
  4321. if (!src2Val || !src2ValueInfo->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec()))
  4322. {
  4323. min2 = INT32_MIN;
  4324. max2 = INT32_MAX;
  4325. }
  4326. if (instr->m_opcode == Js::OpCode::ShrU_A &&
  4327. min1 < 0 &&
  4328. IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  4329. {
  4330. // Src1 may be too large to represent as a signed int32, and src2 may be zero.
  4331. // Since the result can therefore be too large to represent as a signed int32,
  4332. // include Number in the value type.
  4333. return CreateDstUntransferredValue(
  4334. ValueType::AnyNumber.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
  4335. }
  4336. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  4337. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  4338. }
  4339. case Js::OpCode::Incr_A:
  4340. case Js::OpCode::Decr_A:
  4341. {
  4342. ValueType valueType;
  4343. if(src1Val)
  4344. {
  4345. valueType = src1Val->GetValueInfo()->Type().ToDefiniteAnyNumber();
  4346. }
  4347. else
  4348. {
  4349. valueType = ValueType::Number;
  4350. }
  4351. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4352. }
  4353. case Js::OpCode::Add_A:
  4354. {
  4355. ValueType valueType;
  4356. if (src1Val && src1ValueInfo->IsLikelyNumber() && src2Val && src2ValueInfo->IsLikelyNumber())
  4357. {
  4358. if(src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())
  4359. {
  4360. // When doing aggressiveIntType, just assume the result is likely going to be int
  4361. // if both input is int.
  4362. const bool isLikelyTagged = src1ValueInfo->IsLikelyTaggedInt() && src2ValueInfo->IsLikelyTaggedInt();
  4363. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4364. {
  4365. // If both of them are numbers then we can definitely say that the result is a number.
  4366. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  4367. }
  4368. else
  4369. {
  4370. // This is only likely going to be int but can be a string as well.
  4371. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  4372. }
  4373. }
  4374. else
  4375. {
  4376. // We can only be certain of any thing if both of them are numbers.
  4377. // Otherwise, the result could be string.
  4378. if (src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4379. {
  4380. if (src1ValueInfo->IsFloat() || src2ValueInfo->IsFloat())
  4381. {
  4382. // If one of them is a float, the result probably is a float instead of just int
  4383. // but should always be a number.
  4384. valueType = ValueType::Float;
  4385. }
  4386. else
  4387. {
  4388. // Could be int, could be number
  4389. valueType = ValueType::Number;
  4390. }
  4391. }
  4392. else if (src1ValueInfo->IsLikelyFloat() || src2ValueInfo->IsLikelyFloat())
  4393. {
  4394. // Result is likely a float (but can be anything)
  4395. valueType = ValueType::Float.ToLikely();
  4396. }
  4397. else
  4398. {
  4399. // Otherwise it is a likely int or float (but can be anything)
  4400. valueType = ValueType::Number.ToLikely();
  4401. }
  4402. }
  4403. }
  4404. else if((src1Val && src1ValueInfo->IsString()) || (src2Val && src2ValueInfo->IsString()))
  4405. {
  4406. // String + anything should always result in a string
  4407. valueType = ValueType::String;
  4408. }
  4409. else if((src1Val && src1ValueInfo->IsNotString() && src1ValueInfo->IsPrimitive())
  4410. && (src2Val && src2ValueInfo->IsNotString() && src2ValueInfo->IsPrimitive()))
  4411. {
  4412. // If src1 and src2 are not strings and primitive, add should yield a number.
  4413. valueType = ValueType::Number;
  4414. }
  4415. else if((src1Val && src1ValueInfo->IsLikelyString()) || (src2Val && src2ValueInfo->IsLikelyString()))
  4416. {
  4417. // likelystring + anything should always result in a likelystring
  4418. valueType = ValueType::String.ToLikely();
  4419. }
  4420. else
  4421. {
  4422. // Number or string. Could make the value a merge of Number and String, but Uninitialized is more useful at the moment.
  4423. Assert(valueType.IsUninitialized());
  4424. }
  4425. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4426. }
  4427. case Js::OpCode::Div_A:
  4428. {
  4429. ValueType divValueType = GetDivValueType(instr, src1Val, src2Val, false);
  4430. if (divValueType.IsLikelyInt() || divValueType.IsFloat())
  4431. {
  4432. return CreateDstUntransferredValue(divValueType, instr, src1Val, src2Val);
  4433. }
  4434. }
  4435. // fall-through
  4436. case Js::OpCode::Sub_A:
  4437. case Js::OpCode::Mul_A:
  4438. case Js::OpCode::Rem_A:
  4439. {
  4440. ValueType valueType;
  4441. if( src1Val &&
  4442. src1ValueInfo->IsLikelyInt() &&
  4443. src2Val &&
  4444. src2ValueInfo->IsLikelyInt() &&
  4445. instr->m_opcode != Js::OpCode::Div_A)
  4446. {
  4447. const bool isLikelyTagged =
  4448. src1ValueInfo->IsLikelyTaggedInt() && (src2ValueInfo->IsLikelyTaggedInt() || instr->m_opcode == Js::OpCode::Rem_A);
  4449. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4450. {
  4451. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  4452. }
  4453. else
  4454. {
  4455. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  4456. }
  4457. }
  4458. else if ((src1Val && src1ValueInfo->IsLikelyFloat()) || (src2Val && src2ValueInfo->IsLikelyFloat()))
  4459. {
  4460. // This should ideally be NewNumberAndLikelyFloatValue since we know the result is a number but not sure if it will
  4461. // be a float value. However, that Number/LikelyFloat value type doesn't exist currently and all the necessary
  4462. // checks are done for float values (tagged int checks, etc.) so it's sufficient to just create a float value here.
  4463. valueType = ValueType::Float;
  4464. }
  4465. else
  4466. {
  4467. valueType = ValueType::Number;
  4468. }
  4469. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4470. }
  4471. case Js::OpCode::CallI:
  4472. Assert(dst->IsRegOpnd());
  4473. return NewGenericValue(dst->AsRegOpnd()->GetValueType(), dst);
  4474. case Js::OpCode::LdElemI_A:
  4475. {
  4476. dstVal = ValueNumberLdElemDst(pInstr, src1Val);
  4477. const ValueType baseValueType(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  4478. if( (
  4479. baseValueType.IsLikelyNativeArray() ||
  4480. #ifdef _M_IX86
  4481. (
  4482. !AutoSystemInfo::Data.SSE2Available() &&
  4483. baseValueType.IsLikelyObject() &&
  4484. (
  4485. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  4486. baseValueType.GetObjectType() == ObjectType::Float64Array
  4487. )
  4488. )
  4489. #else
  4490. false
  4491. #endif
  4492. ) &&
  4493. instr->GetDst()->IsVar() &&
  4494. instr->HasBailOutInfo())
  4495. {
  4496. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4497. // path. Note that the removed bailouts should not be necessary for correctness.
  4498. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4499. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4500. {
  4501. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4502. }
  4503. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4504. {
  4505. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4506. }
  4507. if(bailOutKind)
  4508. {
  4509. instr->SetBailOutKind(bailOutKind);
  4510. }
  4511. else
  4512. {
  4513. instr->ClearBailOutInfo();
  4514. }
  4515. }
  4516. return dstVal;
  4517. }
  4518. case Js::OpCode::LdMethodElem:
  4519. // Not worth profiling this, just assume it's likely object (should be likely function but ValueType does not track
  4520. // functions currently, so using ObjectType::Object instead)
  4521. dstVal = NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely(), dst);
  4522. if(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray() && instr->HasBailOutInfo())
  4523. {
  4524. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4525. // path. Note that the removed bailouts should not be necessary for correctness.
  4526. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4527. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4528. {
  4529. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4530. }
  4531. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4532. {
  4533. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4534. }
  4535. if(bailOutKind)
  4536. {
  4537. instr->SetBailOutKind(bailOutKind);
  4538. }
  4539. else
  4540. {
  4541. instr->ClearBailOutInfo();
  4542. }
  4543. }
  4544. return dstVal;
  4545. case Js::OpCode::StElemI_A:
  4546. case Js::OpCode::StElemI_A_Strict:
  4547. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4548. break;
  4549. case Js::OpCode::LdLen_A:
  4550. if (instr->IsProfiledInstr())
  4551. {
  4552. const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  4553. if(!(profiledValueType.IsLikelyInt() && dst->AsRegOpnd()->m_sym->m_isNotNumber))
  4554. {
  4555. return this->NewGenericValue(profiledValueType, dst);
  4556. }
  4557. }
  4558. break;
  4559. case Js::OpCode::BrOnEmpty:
  4560. case Js::OpCode::BrOnNotEmpty:
  4561. Assert(dst->IsRegOpnd());
  4562. Assert(dst->GetValueType().IsString());
  4563. return this->NewGenericValue(ValueType::String, dst);
  4564. case Js::OpCode::IsInst:
  4565. case Js::OpCode::LdTrue:
  4566. case Js::OpCode::LdFalse:
  4567. case Js::OpCode::CmEq_A:
  4568. case Js::OpCode::CmSrEq_A:
  4569. case Js::OpCode::CmNeq_A:
  4570. case Js::OpCode::CmSrNeq_A:
  4571. case Js::OpCode::CmLe_A:
  4572. case Js::OpCode::CmUnLe_A:
  4573. case Js::OpCode::CmLt_A:
  4574. case Js::OpCode::CmUnLt_A:
  4575. case Js::OpCode::CmGe_A:
  4576. case Js::OpCode::CmUnGe_A:
  4577. case Js::OpCode::CmGt_A:
  4578. case Js::OpCode::CmUnGt_A:
  4579. return this->NewGenericValue(ValueType::Boolean, dst);
  4580. case Js::OpCode::LdUndef:
  4581. return this->NewGenericValue(ValueType::Undefined, dst);
  4582. case Js::OpCode::LdC_A_Null:
  4583. return this->NewGenericValue(ValueType::Null, dst);
  4584. case Js::OpCode::LdThis:
  4585. if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
  4586. (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
  4587. {
  4588. return this->NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely().SetCanBeTaggedValue(false), dst);
  4589. }
  4590. break;
  4591. case Js::OpCode::Typeof:
  4592. case Js::OpCode::TypeofElem:
  4593. return this->NewGenericValue(ValueType::String, dst);
  4594. case Js::OpCode::InitLocalClosure:
  4595. Assert(instr->GetDst());
  4596. Assert(instr->GetDst()->IsRegOpnd());
  4597. IR::RegOpnd *regOpnd = instr->GetDst()->AsRegOpnd();
  4598. StackSym *opndStackSym = regOpnd->m_sym;
  4599. Assert(opndStackSym != nullptr);
  4600. ObjectSymInfo *objectSymInfo = opndStackSym->m_objectInfo;
  4601. Assert(objectSymInfo != nullptr);
  4602. for (PropertySym *localVarSlotList = objectSymInfo->m_propertySymList; localVarSlotList; localVarSlotList = localVarSlotList->m_nextInStackSymList)
  4603. {
  4604. this->slotSyms->Set(localVarSlotList->m_id);
  4605. }
  4606. break;
  4607. }
  4608. if (dstVal == nullptr)
  4609. {
  4610. return this->NewGenericValue(dst->GetValueType(), dst);
  4611. }
  4612. return CurrentBlockData()->SetValue(dstVal, dst);
  4613. }
  4614. Value *
  4615. GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
  4616. {
  4617. IR::Instr *&instr = *pInstr;
  4618. IR::Opnd *dst = instr->GetDst();
  4619. Value *dstVal = nullptr;
  4620. int32 newMin, newMax;
  4621. ValueInfo *srcValueInfo = (srcVal ? srcVal->GetValueInfo() : nullptr);
  4622. ValueType profiledElementType;
  4623. if (instr->IsProfiledInstr())
  4624. {
  4625. profiledElementType = instr->AsProfiledInstr()->u.ldElemInfo->GetElementType();
  4626. if(!(profiledElementType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotNumber) &&
  4627. srcVal &&
  4628. srcValueInfo->IsUninitialized())
  4629. {
  4630. if(IsLoopPrePass())
  4631. {
  4632. dstVal = NewGenericValue(profiledElementType, dst);
  4633. }
  4634. else
  4635. {
  4636. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4637. // can improve the original value type.
  4638. srcValueInfo->Type() = profiledElementType;
  4639. instr->GetSrc1()->SetValueType(profiledElementType);
  4640. }
  4641. }
  4642. }
  4643. IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
  4644. const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
  4645. if (instr->DoStackArgsOpt() ||
  4646. !(
  4647. baseValueType.IsLikelyOptimizedTypedArray() ||
  4648. (baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
  4649. ) ||
  4650. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  4651. // Don't do type spec on native array with a history of accessing gaps, as this is a bailout
  4652. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  4653. !ShouldExpectConventionalArrayIndexValue(src))
  4654. {
  4655. if(DoTypedArrayTypeSpec() && !IsLoopPrePass())
  4656. {
  4657. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access.\n"));
  4658. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  4659. {
  4660. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4661. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4662. baseValueType.ToString(baseValueTypeStr);
  4663. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because %s.\n"),
  4664. this->func->GetJITFunctionBody()->GetDisplayName(),
  4665. this->func->GetDebugNumberSet(debugStringBuffer),
  4666. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  4667. baseValueTypeStr,
  4668. instr->DoStackArgsOpt() ? _u("instruction uses the arguments object") :
  4669. baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
  4670. Output::Flush();
  4671. }
  4672. }
  4673. if(!dstVal)
  4674. {
  4675. if(srcVal)
  4676. {
  4677. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4678. }
  4679. else
  4680. {
  4681. dstVal = NewGenericValue(profiledElementType, dst);
  4682. }
  4683. }
  4684. return dstVal;
  4685. }
  4686. Assert(instr->GetSrc1()->IsIndirOpnd());
  4687. IRType toType = TyVar;
  4688. IR::BailOutKind bailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  4689. switch(baseValueType.GetObjectType())
  4690. {
  4691. case ObjectType::Int8Array:
  4692. case ObjectType::Int8VirtualArray:
  4693. case ObjectType::Int8MixedArray:
  4694. newMin = Int8ConstMin;
  4695. newMax = Int8ConstMax;
  4696. goto IntArrayCommon;
  4697. case ObjectType::Uint8Array:
  4698. case ObjectType::Uint8VirtualArray:
  4699. case ObjectType::Uint8MixedArray:
  4700. case ObjectType::Uint8ClampedArray:
  4701. case ObjectType::Uint8ClampedVirtualArray:
  4702. case ObjectType::Uint8ClampedMixedArray:
  4703. newMin = Uint8ConstMin;
  4704. newMax = Uint8ConstMax;
  4705. goto IntArrayCommon;
  4706. case ObjectType::Int16Array:
  4707. case ObjectType::Int16VirtualArray:
  4708. case ObjectType::Int16MixedArray:
  4709. newMin = Int16ConstMin;
  4710. newMax = Int16ConstMax;
  4711. goto IntArrayCommon;
  4712. case ObjectType::Uint16Array:
  4713. case ObjectType::Uint16VirtualArray:
  4714. case ObjectType::Uint16MixedArray:
  4715. newMin = Uint16ConstMin;
  4716. newMax = Uint16ConstMax;
  4717. goto IntArrayCommon;
  4718. case ObjectType::Int32Array:
  4719. case ObjectType::Int32VirtualArray:
  4720. case ObjectType::Int32MixedArray:
  4721. case ObjectType::Uint32Array: // int-specialized loads from uint32 arrays will bail out on values that don't fit in an int32
  4722. case ObjectType::Uint32VirtualArray:
  4723. case ObjectType::Uint32MixedArray:
  4724. Int32Array:
  4725. newMin = Int32ConstMin;
  4726. newMax = Int32ConstMax;
  4727. goto IntArrayCommon;
  4728. IntArrayCommon:
  4729. Assert(dst->IsRegOpnd());
  4730. // If int type spec is disabled, it is ok to load int values as they can help float type spec, and merging int32 with float64 => float64.
  4731. // But if float type spec is also disabled, we'll have problems because float64 merged with var => float64...
  4732. if (!this->DoAggressiveIntTypeSpec() && !this->DoFloatTypeSpec())
  4733. {
  4734. if (!dstVal)
  4735. {
  4736. if (srcVal)
  4737. {
  4738. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4739. }
  4740. else
  4741. {
  4742. dstVal = NewGenericValue(profiledElementType, dst);
  4743. }
  4744. }
  4745. return dstVal;
  4746. }
  4747. if (!this->IsLoopPrePass())
  4748. {
  4749. if (instr->HasBailOutInfo())
  4750. {
  4751. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  4752. Assert(
  4753. (
  4754. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  4755. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  4756. ) &&
  4757. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  4758. if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  4759. {
  4760. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  4761. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  4762. // bails out for the right reason.
  4763. instr->SetBailOutKind(
  4764. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  4765. }
  4766. else
  4767. {
  4768. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  4769. // calls to occur, so it must be merged in to eliminate generating the helper call
  4770. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  4771. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  4772. }
  4773. }
  4774. else
  4775. {
  4776. GenerateBailAtOperation(&instr, bailOutKind);
  4777. }
  4778. }
  4779. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, bailOutKind, newMin, newMax, &dstVal);
  4780. toType = TyInt32;
  4781. break;
  4782. case ObjectType::Float32Array:
  4783. case ObjectType::Float32VirtualArray:
  4784. case ObjectType::Float32MixedArray:
  4785. case ObjectType::Float64Array:
  4786. case ObjectType::Float64VirtualArray:
  4787. case ObjectType::Float64MixedArray:
  4788. Float64Array:
  4789. Assert(dst->IsRegOpnd());
  4790. // If float type spec is disabled, don't load float64 values
  4791. if (!this->DoFloatTypeSpec())
  4792. {
  4793. if (!dstVal)
  4794. {
  4795. if (srcVal)
  4796. {
  4797. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4798. }
  4799. else
  4800. {
  4801. dstVal = NewGenericValue(profiledElementType, dst);
  4802. }
  4803. }
  4804. return dstVal;
  4805. }
  4806. if (!this->IsLoopPrePass())
  4807. {
  4808. if (instr->HasBailOutInfo())
  4809. {
  4810. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  4811. Assert(
  4812. (
  4813. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  4814. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  4815. ) &&
  4816. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  4817. if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  4818. {
  4819. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  4820. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  4821. // bails out for the right reason.
  4822. instr->SetBailOutKind(
  4823. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  4824. }
  4825. else
  4826. {
  4827. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  4828. // calls to occur, so it must be merged in to eliminate generating the helper call
  4829. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  4830. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  4831. }
  4832. }
  4833. else
  4834. {
  4835. GenerateBailAtOperation(&instr, bailOutKind);
  4836. }
  4837. }
  4838. TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, &dstVal);
  4839. toType = TyFloat64;
  4840. break;
  4841. default:
  4842. Assert(baseValueType.IsLikelyNativeArray());
  4843. bailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  4844. if(baseValueType.HasIntElements())
  4845. {
  4846. goto Int32Array;
  4847. }
  4848. Assert(baseValueType.HasFloatElements());
  4849. goto Float64Array;
  4850. }
  4851. if(!dstVal)
  4852. {
  4853. dstVal = NewGenericValue(profiledElementType, dst);
  4854. }
  4855. Assert(toType != TyVar);
  4856. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  4857. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  4858. {
  4859. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4860. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4861. baseValueType.ToString(baseValueTypeStr);
  4862. char dstValTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4863. dstVal->GetValueInfo()->Type().ToString(dstValTypeStr);
  4864. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s producing %S"),
  4865. this->func->GetJITFunctionBody()->GetDisplayName(),
  4866. this->func->GetDebugNumberSet(debugStringBuffer),
  4867. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  4868. baseValueTypeStr,
  4869. toType == TyInt32 ? _u("int32") : _u("float64"),
  4870. dstValTypeStr);
  4871. #if DBG_DUMP
  4872. Output::Print(_u(" ("));
  4873. dstVal->Dump();
  4874. Output::Print(_u(").\n"));
  4875. #else
  4876. Output::Print(_u(".\n"));
  4877. #endif
  4878. Output::Flush();
  4879. }
  4880. return dstVal;
  4881. }
  4882. ValueType
  4883. GlobOpt::GetPrepassValueTypeForDst(
  4884. const ValueType desiredValueType,
  4885. IR::Instr *const instr,
  4886. Value *const src1Value,
  4887. Value *const src2Value,
  4888. bool const isValueInfoPrecise) const
  4889. {
  4890. // Values with definite types can be created in the loop prepass only when it is guaranteed that the value type will be the
  4891. // same on any iteration of the loop. The heuristics currently used are:
  4892. // - If the source sym is not live on the back-edge, then it acquires a new value for each iteration of the loop, so
  4893. // that value type can be definite
  4894. // - Consider: A better solution for this is to track values that originate in this loop, which can have definite value
  4895. // types. That catches more cases, should look into that in the future.
  4896. // - If the source sym has a constant value that doesn't change for the duration of the function
  4897. // - The operation always results in a definite value type. For instance, signed bitwise operations always result in an
  4898. // int32, conv_num and ++ always result in a number, etc.
  4899. // - For operations that always result in an int32, the resulting int range is precise only if the source syms pass
  4900. // the above heuristics. Otherwise, the range must be expanded to the full int32 range.
  4901. Assert(IsLoopPrePass());
  4902. Assert(instr);
  4903. if(!desiredValueType.IsDefinite())
  4904. {
  4905. return desiredValueType;
  4906. }
  4907. if(!isValueInfoPrecise)
  4908. {
  4909. // If the desired value type is not precise, the value type of the destination is derived from the value types of the
  4910. // sources. Since the value type of a source sym is not definite, the destination value type also cannot be definite.
  4911. if(desiredValueType.IsInt() && OpCodeAttr::IsInt32(instr->m_opcode))
  4912. {
  4913. // The op always produces an int32, but not always a tagged int
  4914. return ValueType::GetInt(desiredValueType.IsLikelyTaggedInt());
  4915. }
  4916. if(desiredValueType.IsNumber() && OpCodeAttr::ProducesNumber(instr->m_opcode))
  4917. {
  4918. // The op always produces a number, but not always an int
  4919. return desiredValueType.ToDefiniteAnyNumber();
  4920. }
  4921. return desiredValueType.ToLikely();
  4922. }
  4923. return desiredValueType;
  4924. }
  4925. bool
  4926. GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Instr *const instr, Value *const src1Value, Value *const src2Value, bool * isSafeToTransferInPrepass) const
  4927. {
  4928. return
  4929. (!instr->GetSrc1() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Value, isSafeToTransferInPrepass)) &&
  4930. (!instr->GetSrc2() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Value, isSafeToTransferInPrepass));
  4931. }
  4932. bool
  4933. GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue, bool * isSafeToTransferInPrepass) const
  4934. {
  4935. Assert(IsLoopPrePass());
  4936. Assert(src);
  4937. if (isSafeToTransferInPrepass)
  4938. {
  4939. *isSafeToTransferInPrepass = false;
  4940. }
  4941. if (src->IsAddrOpnd() &&
  4942. srcValue->GetValueInfo()->GetSymStore() &&
  4943. srcValue->GetValueInfo()->GetSymStore()->IsStackSym() &&
  4944. srcValue->GetValueInfo()->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable())
  4945. {
  4946. if (isSafeToTransferInPrepass)
  4947. {
  4948. *isSafeToTransferInPrepass = false;
  4949. }
  4950. return true;
  4951. }
  4952. if (!src->IsRegOpnd() || !srcValue)
  4953. {
  4954. return false;
  4955. }
  4956. ValueInfo *const srcValueInfo = srcValue->GetValueInfo();
  4957. bool isValueInfoDefinite = srcValueInfo->IsDefinite();
  4958. StackSym * srcSym = src->AsRegOpnd()->m_sym;
  4959. bool isSafeToTransfer = IsSafeToTransferInPrepass(srcSym, srcValueInfo);
  4960. if (isSafeToTransferInPrepass)
  4961. {
  4962. *isSafeToTransferInPrepass = isSafeToTransfer;
  4963. }
  4964. return isValueInfoDefinite && isSafeToTransfer;
  4965. }
  4966. bool
  4967. GlobOpt::IsSafeToTransferInPrepass(StackSym * const srcSym, ValueInfo *const srcValueInfo) const
  4968. {
  4969. int32 intConstantValue;
  4970. return
  4971. srcSym->IsFromByteCodeConstantTable() ||
  4972. (
  4973. srcValueInfo->TryGetIntConstantValue(&intConstantValue) &&
  4974. !Js::TaggedInt::IsOverflow(intConstantValue) &&
  4975. GetTaggedIntConstantStackSym(intConstantValue) == srcSym
  4976. ) ||
  4977. !currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(srcSym->m_id) ||
  4978. !currentBlock->loop->IsSymAssignedToInSelfOrParents(srcSym);
  4979. }
  4980. bool
  4981. GlobOpt::SafeToCopyPropInPrepass(StackSym * const originalSym, StackSym * const copySym, Value *const value) const
  4982. {
  4983. Assert(this->currentBlock->globOptData.GetCopyPropSym(originalSym, value) == copySym);
  4984. // In the following example, to copy-prop s2 into s1, it is not enough to check if s1 and s2 are safe to transfer.
  4985. // In fact, both s1 and s2 are safe to transfer, but it is not legal to copy prop s2 into s1.
  4986. //
  4987. // s1 = s2
  4988. // $Loop:
  4989. // s3 = s1
  4990. // s2 = s4
  4991. // Br $Loop
  4992. //
  4993. // In general, requirements for copy-propping in prepass are more restricted than those for transferring values.
  4994. // For copy prop in prepass, if the original sym is live on back-edge, then the copy-prop sym should not be written to
  4995. // in the loop (or its parents)
  4996. ValueInfo* const valueInfo = value->GetValueInfo();
  4997. return IsSafeToTransferInPrepass(originalSym, valueInfo) &&
  4998. IsSafeToTransferInPrepass(copySym, valueInfo) &&
  4999. (!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(originalSym->m_id) || !currentBlock->loop->IsSymAssignedToInSelfOrParents(copySym));
  5000. }
  5001. Value *GlobOpt::CreateDstUntransferredIntValue(
  5002. const int32 min,
  5003. const int32 max,
  5004. IR::Instr *const instr,
  5005. Value *const src1Value,
  5006. Value *const src2Value)
  5007. {
  5008. Assert(instr);
  5009. Assert(instr->GetDst());
  5010. Assert(OpCodeAttr::ProducesNumber(instr->m_opcode)
  5011. || (instr->m_opcode == Js::OpCode::Add_A && src1Value->GetValueInfo()->IsNumber()
  5012. && src2Value->GetValueInfo()->IsNumber()));
  5013. ValueType valueType(ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  5014. Assert(valueType.IsInt());
  5015. bool isValueInfoPrecise;
  5016. if(IsLoopPrePass())
  5017. {
  5018. isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value);
  5019. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, isValueInfoPrecise);
  5020. }
  5021. else
  5022. {
  5023. isValueInfoPrecise = true;
  5024. }
  5025. IR::Opnd *const dst = instr->GetDst();
  5026. if(isValueInfoPrecise)
  5027. {
  5028. Assert(valueType == ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  5029. Assert(!(dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsTypeSpec()));
  5030. return NewIntRangeValue(min, max, false, dst);
  5031. }
  5032. return NewGenericValue(valueType, dst);
  5033. }
  5034. Value *
  5035. GlobOpt::CreateDstUntransferredValue(
  5036. const ValueType desiredValueType,
  5037. IR::Instr *const instr,
  5038. Value *const src1Value,
  5039. Value *const src2Value)
  5040. {
  5041. Assert(instr);
  5042. Assert(instr->GetDst());
  5043. Assert(!desiredValueType.IsInt()); // use CreateDstUntransferredIntValue instead
  5044. ValueType valueType(desiredValueType);
  5045. if(IsLoopPrePass())
  5046. {
  5047. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value));
  5048. }
  5049. return NewGenericValue(valueType, instr->GetDst());
  5050. }
  5051. Value *
  5052. GlobOpt::ValueNumberTransferDst(IR::Instr *const instr, Value * src1Val)
  5053. {
  5054. Value *dstVal = this->IsLoopPrePass() ? this->ValueNumberTransferDstInPrepass(instr, src1Val) : src1Val;
  5055. // Don't copy-prop a temp over a user symbol. This is likely to extend the temp's lifetime, as the user symbol
  5056. // is more likely to already have later references.
  5057. // REVIEW: Enabling this does cause perf issues...
  5058. #if 0
  5059. if (dstVal != src1Val)
  5060. {
  5061. return dstVal;
  5062. }
  5063. Sym *dstSym = dst->GetStackSym();
  5064. if (dstVal && dstSym && dstSym->IsStackSym() && !dstSym->AsStackSym()->m_isBytecodeTmp)
  5065. {
  5066. Sym *dstValSym = dstVal->GetValueInfo()->GetSymStore();
  5067. if (dstValSym && dstValSym->AsStackSym()->m_isBytecodeTmp /* src->GetIsDead()*/)
  5068. {
  5069. dstVal->GetValueInfo()->SetSymStore(dstSym);
  5070. }
  5071. }
  5072. #endif
  5073. return dstVal;
  5074. }
  5075. bool
  5076. GlobOpt::IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue)
  5077. {
  5078. if (src->IsRegOpnd())
  5079. {
  5080. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  5081. if (srcSym->IsFromByteCodeConstantTable())
  5082. {
  5083. return true;
  5084. }
  5085. ValueInfo *srcValueInfo = srcValue->GetValueInfo();
  5086. int32 srcIntConstantValue;
  5087. if (srcValueInfo->TryGetIntConstantValue(&srcIntConstantValue) && !Js::TaggedInt::IsOverflow(srcIntConstantValue)
  5088. && GetTaggedIntConstantStackSym(srcIntConstantValue) == srcSym)
  5089. {
  5090. return true;
  5091. }
  5092. }
  5093. return false;
  5094. }
  5095. Value *
  5096. GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val)
  5097. {
  5098. Value *dstVal = nullptr;
  5099. if (!src1Val)
  5100. {
  5101. return nullptr;
  5102. }
  5103. bool isValueInfoPrecise;
  5104. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  5105. // TODO: This conflicts with new values created by the type specialization code
  5106. // We should re-enable if we change that code to avoid the new values.
  5107. #if 0
  5108. if (this->IsSafeToTransferInPrePass(instr->GetSrc1(), src1Val))
  5109. {
  5110. return src1Val;
  5111. }
  5112. if (this->IsPREInstrCandidateLoad(instr->m_opcode) && instr->GetDst())
  5113. {
  5114. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  5115. for (Loop *curLoop = this->currentBlock->loop; curLoop; curLoop = curLoop->parent)
  5116. {
  5117. if (curLoop->fieldPRESymStore->Test(dstSym->m_id))
  5118. {
  5119. return src1Val;
  5120. }
  5121. }
  5122. }
  5123. if (instr->GetDst()->IsRegOpnd())
  5124. {
  5125. StackSym *stackSym = instr->GetDst()->AsRegOpnd()->m_sym;
  5126. if (stackSym->IsSingleDef() || this->IsLive(stackSym, this->prePassLoop->landingPad))
  5127. {
  5128. IntConstantBounds src1IntConstantBounds;
  5129. if (src1ValueInfo->TryGetIntConstantBounds(&src1IntConstantBounds) &&
  5130. !(
  5131. src1IntConstantBounds.LowerBound() == INT32_MIN &&
  5132. src1IntConstantBounds.UpperBound() == INT32_MAX
  5133. ))
  5134. {
  5135. const ValueType valueType(
  5136. GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  5137. if (isValueInfoPrecise)
  5138. {
  5139. return src1Val;
  5140. }
  5141. }
  5142. else
  5143. {
  5144. return src1Val;
  5145. }
  5146. }
  5147. }
  5148. #endif
  5149. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  5150. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  5151. // first pass when transferring a value that is live on the back-edge.
  5152. // In prepass we are going to copy the value but with a different value number
  5153. // for aggressive int type spec.
  5154. bool isSafeToTransferInPrepass = false;
  5155. isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Val, nullptr, &isSafeToTransferInPrepass);
  5156. const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, isValueInfoPrecise));
  5157. if(isValueInfoPrecise || isSafeToTransferInPrepass)
  5158. {
  5159. Assert(valueType == src1ValueInfo->Type());
  5160. if (!PHASE_OFF1(Js::AVTInPrePassPhase))
  5161. {
  5162. dstVal = src1Val;
  5163. }
  5164. else
  5165. {
  5166. dstVal = CopyValue(src1Val);
  5167. TrackCopiedValueForKills(dstVal);
  5168. }
  5169. }
  5170. else if (valueType == src1ValueInfo->Type() && src1ValueInfo->IsGeneric()) // this else branch is probably not needed
  5171. {
  5172. Assert(valueType == src1ValueInfo->Type());
  5173. dstVal = CopyValue(src1Val);
  5174. TrackCopiedValueForKills(dstVal);
  5175. }
  5176. else
  5177. {
  5178. dstVal = NewGenericValue(valueType);
  5179. dstVal->GetValueInfo()->SetSymStore(src1ValueInfo->GetSymStore());
  5180. }
  5181. return dstVal;
  5182. }
  5183. void
  5184. GlobOpt::PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32* pNewMax)
  5185. {
  5186. int32 tmp;
  5187. Int32Math::Not(minimum, pNewMin);
  5188. *pNewMax = *pNewMin;
  5189. Int32Math::Not(maximum, &tmp);
  5190. *pNewMin = min(*pNewMin, tmp);
  5191. *pNewMax = max(*pNewMax, tmp);
  5192. }
  5193. void
  5194. GlobOpt::PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  5195. int32 min2, int32 max2, int32 *pNewMin, int32* pNewMax)
  5196. {
  5197. int32 min, max, tmp, tmp2;
  5198. min = INT32_MIN;
  5199. max = INT32_MAX;
  5200. switch (instr->m_opcode)
  5201. {
  5202. case Js::OpCode::Xor_A:
  5203. case Js::OpCode::Or_A:
  5204. // Find range with highest high order bit
  5205. tmp = ::max((uint32)min1, (uint32)max1);
  5206. tmp2 = ::max((uint32)min2, (uint32)max2);
  5207. if ((uint32)tmp > (uint32)tmp2)
  5208. {
  5209. max = tmp;
  5210. }
  5211. else
  5212. {
  5213. max = tmp2;
  5214. }
  5215. if (max < 0)
  5216. {
  5217. min = INT32_MIN; // REVIEW: conservative...
  5218. max = INT32_MAX;
  5219. }
  5220. else
  5221. {
  5222. // Turn values like 0x1010 into 0x1111
  5223. max = 1 << Math::Log2(max);
  5224. max = (uint32)(max << 1) - 1;
  5225. min = 0;
  5226. }
  5227. break;
  5228. case Js::OpCode::And_A:
  5229. if (min1 == INT32_MIN && min2 == INT32_MIN)
  5230. {
  5231. // Shortcut
  5232. break;
  5233. }
  5234. // Find range with lowest higher bit
  5235. tmp = ::max((uint32)min1, (uint32)max1);
  5236. tmp2 = ::max((uint32)min2, (uint32)max2);
  5237. if ((uint32)tmp < (uint32)tmp2)
  5238. {
  5239. min = min1;
  5240. max = max1;
  5241. }
  5242. else
  5243. {
  5244. min = min2;
  5245. max = max2;
  5246. }
  5247. // To compute max, look if min has higher high bit
  5248. if ((uint32)min > (uint32)max)
  5249. {
  5250. max = min;
  5251. }
  5252. // If max is negative, max let's assume it could be -1, so result in MAX_INT
  5253. if (max < 0)
  5254. {
  5255. max = INT32_MAX;
  5256. }
  5257. // If min is positive, the resulting min is zero
  5258. if (min >= 0)
  5259. {
  5260. min = 0;
  5261. }
  5262. else
  5263. {
  5264. min = INT32_MIN;
  5265. }
  5266. break;
  5267. case Js::OpCode::Shl_A:
  5268. {
  5269. // Shift count
  5270. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  5271. {
  5272. min2 = 0;
  5273. max2 = 0x1F;
  5274. }
  5275. else
  5276. {
  5277. min2 &= 0x1F;
  5278. max2 &= 0x1F;
  5279. }
  5280. int32 min1FreeTopBitCount = min1 ? (sizeof(int32) * 8) - (Math::Log2(min1) + 1) : (sizeof(int32) * 8);
  5281. int32 max1FreeTopBitCount = max1 ? (sizeof(int32) * 8) - (Math::Log2(max1) + 1) : (sizeof(int32) * 8);
  5282. if (min1FreeTopBitCount <= max2 || max1FreeTopBitCount <= max2)
  5283. {
  5284. // If the shift is going to touch the sign bit return the max range
  5285. min = INT32_MIN;
  5286. max = INT32_MAX;
  5287. }
  5288. else
  5289. {
  5290. // Compute max
  5291. // Turn values like 0x1010 into 0x1111
  5292. if (min1)
  5293. {
  5294. min1 = 1 << Math::Log2(min1);
  5295. min1 = (min1 << 1) - 1;
  5296. }
  5297. if (max1)
  5298. {
  5299. max1 = 1 << Math::Log2(max1);
  5300. max1 = (uint32)(max1 << 1) - 1;
  5301. }
  5302. if (max1 > 0)
  5303. {
  5304. int32 nrTopBits = (sizeof(int32) * 8) - Math::Log2(max1);
  5305. if (nrTopBits < ::min(max2, 30))
  5306. max = INT32_MAX;
  5307. else
  5308. max = ::max((max1 << ::min(max2, 30)) & ~0x80000000, (min1 << min2) & ~0x80000000);
  5309. }
  5310. else
  5311. {
  5312. max = (max1 << min2) & ~0x80000000;
  5313. }
  5314. // Compute min
  5315. if (min1 < 0)
  5316. {
  5317. min = ::min(min1 << max2, max1 << max2);
  5318. }
  5319. else
  5320. {
  5321. min = ::min(min1 << min2, max1 << max2);
  5322. }
  5323. // Turn values like 0x1110 into 0x1000
  5324. if (min)
  5325. {
  5326. min = 1 << Math::Log2(min);
  5327. }
  5328. }
  5329. }
  5330. break;
  5331. case Js::OpCode::Shr_A:
  5332. // Shift count
  5333. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  5334. {
  5335. min2 = 0;
  5336. max2 = 0x1F;
  5337. }
  5338. else
  5339. {
  5340. min2 &= 0x1F;
  5341. max2 &= 0x1F;
  5342. }
  5343. // Compute max
  5344. if (max1 < 0)
  5345. {
  5346. max = max1 >> max2;
  5347. }
  5348. else
  5349. {
  5350. max = max1 >> min2;
  5351. }
  5352. // Compute min
  5353. if (min1 < 0)
  5354. {
  5355. min = min1 >> min2;
  5356. }
  5357. else
  5358. {
  5359. min = min1 >> max2;
  5360. }
  5361. break;
  5362. case Js::OpCode::ShrU_A:
  5363. // shift count is constant zero
  5364. if ((min2 == max2) && (max2 & 0x1f) == 0)
  5365. {
  5366. // We can't encode uint32 result, so it has to be used as int32 only or the original value is positive.
  5367. Assert(instr->ignoreIntOverflow || min1 >= 0);
  5368. // We can transfer the signed int32 range.
  5369. min = min1;
  5370. max = max1;
  5371. break;
  5372. }
  5373. const IntConstantBounds src2NewBounds = IntConstantBounds(min2, max2).And_0x1f();
  5374. // Zero is only allowed if result is always a signed int32 or always used as a signed int32
  5375. Assert(min1 >= 0 || instr->ignoreIntOverflow || !src2NewBounds.Contains(0));
  5376. min2 = src2NewBounds.LowerBound();
  5377. max2 = src2NewBounds.UpperBound();
  5378. Assert(min2 <= max2);
  5379. // zero shift count is only allowed if result is used as int32 and/or value is positive
  5380. Assert(min2 > 0 || instr->ignoreIntOverflow || min1 >= 0);
  5381. uint32 umin1 = (uint32)min1;
  5382. uint32 umax1 = (uint32)max1;
  5383. if (umin1 > umax1)
  5384. {
  5385. uint32 temp = umax1;
  5386. umax1 = umin1;
  5387. umin1 = temp;
  5388. }
  5389. Assert(min2 >= 0 && max2 < 32);
  5390. // Compute max
  5391. if (min1 < 0)
  5392. {
  5393. umax1 = UINT32_MAX;
  5394. }
  5395. max = umax1 >> min2;
  5396. // Compute min
  5397. if (min1 <= 0 && max1 >=0)
  5398. {
  5399. min = 0;
  5400. }
  5401. else
  5402. {
  5403. min = umin1 >> max2;
  5404. }
  5405. // We should be able to fit uint32 range as int32
  5406. Assert(instr->ignoreIntOverflow || (min >= 0 && max >= 0) );
  5407. if (min > max)
  5408. {
  5409. // can only happen if shift count can be zero
  5410. Assert(min2 == 0 && (instr->ignoreIntOverflow || min1 >= 0));
  5411. min = Int32ConstMin;
  5412. max = Int32ConstMax;
  5413. }
  5414. break;
  5415. }
  5416. *pNewMin = min;
  5417. *pNewMax = max;
  5418. }
  5419. IR::Instr *
  5420. GlobOpt::TypeSpecialization(
  5421. IR::Instr *instr,
  5422. Value **pSrc1Val,
  5423. Value **pSrc2Val,
  5424. Value **pDstVal,
  5425. bool *redoTypeSpecRef,
  5426. bool *const forceInvariantHoistingRef)
  5427. {
  5428. Value *&src1Val = *pSrc1Val;
  5429. Value *&src2Val = *pSrc2Val;
  5430. *redoTypeSpecRef = false;
  5431. Assert(!*forceInvariantHoistingRef);
  5432. this->ignoredIntOverflowForCurrentInstr = false;
  5433. this->ignoredNegativeZeroForCurrentInstr = false;
  5434. // - Int32 values that can't be tagged are created as float constant values instead because a JavascriptNumber var is needed
  5435. // for that value at runtime. For the purposes of type specialization, recover the int32 values so that they will be
  5436. // treated as ints.
  5437. // - If int overflow does not matter for the instruction, we can additionally treat uint32 values as int32 values because
  5438. // the value resulting from the operation will eventually be converted to int32 anyway
  5439. Value *const src1OriginalVal = src1Val;
  5440. Value *const src2OriginalVal = src2Val;
  5441. if(!instr->ShouldCheckForIntOverflow())
  5442. {
  5443. if(src1Val && src1Val->GetValueInfo()->IsFloatConstant())
  5444. {
  5445. int32 int32Value;
  5446. bool isInt32;
  5447. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  5448. src1Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  5449. &int32Value,
  5450. &isInt32))
  5451. {
  5452. src1Val = GetIntConstantValue(int32Value, instr);
  5453. if(!isInt32)
  5454. {
  5455. this->ignoredIntOverflowForCurrentInstr = true;
  5456. }
  5457. }
  5458. }
  5459. if(src2Val && src2Val->GetValueInfo()->IsFloatConstant())
  5460. {
  5461. int32 int32Value;
  5462. bool isInt32;
  5463. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  5464. src2Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  5465. &int32Value,
  5466. &isInt32))
  5467. {
  5468. src2Val = GetIntConstantValue(int32Value, instr);
  5469. if(!isInt32)
  5470. {
  5471. this->ignoredIntOverflowForCurrentInstr = true;
  5472. }
  5473. }
  5474. }
  5475. }
  5476. const AutoRestoreVal autoRestoreSrc1Val(src1OriginalVal, &src1Val);
  5477. const AutoRestoreVal autoRestoreSrc2Val(src2OriginalVal, &src2Val);
  5478. if (src1Val && instr->GetSrc2() == nullptr)
  5479. {
  5480. // Unary
  5481. // Note make sure that native array StElemI gets to TypeSpecializeStElem. Do this for typed arrays, too?
  5482. int32 intConstantValue;
  5483. if (!this->IsLoopPrePass() &&
  5484. !instr->IsBranchInstr() &&
  5485. src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) &&
  5486. !(
  5487. // Nothing to fold for element stores. Go into type specialization to see if they can at least be specialized.
  5488. instr->m_opcode == Js::OpCode::StElemI_A ||
  5489. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  5490. instr->m_opcode == Js::OpCode::StElemC ||
  5491. instr->m_opcode == Js::OpCode::MultiBr ||
  5492. instr->m_opcode == Js::OpCode::InlineArrayPop
  5493. ))
  5494. {
  5495. if (OptConstFoldUnary(&instr, intConstantValue, src1Val == src1OriginalVal, pDstVal))
  5496. {
  5497. return instr;
  5498. }
  5499. }
  5500. else if (this->TypeSpecializeUnary(
  5501. &instr,
  5502. &src1Val,
  5503. pDstVal,
  5504. src1OriginalVal,
  5505. redoTypeSpecRef,
  5506. forceInvariantHoistingRef))
  5507. {
  5508. return instr;
  5509. }
  5510. else if(*redoTypeSpecRef)
  5511. {
  5512. return instr;
  5513. }
  5514. }
  5515. else if (instr->GetSrc2() && !instr->IsBranchInstr())
  5516. {
  5517. // Binary
  5518. if (!this->IsLoopPrePass())
  5519. {
  5520. if (GetIsAsmJSFunc())
  5521. {
  5522. if (CONFIG_FLAG(WasmFold))
  5523. {
  5524. bool success = instr->GetSrc1()->IsInt64() ?
  5525. this->OptConstFoldBinaryWasm<int64>(&instr, src1Val, src2Val, pDstVal) :
  5526. this->OptConstFoldBinaryWasm<int>(&instr, src1Val, src2Val, pDstVal);
  5527. if (success)
  5528. {
  5529. return instr;
  5530. }
  5531. }
  5532. }
  5533. else
  5534. {
  5535. // OptConstFoldBinary doesn't do type spec, so only deal with things we are sure are int (IntConstant and IntRange)
  5536. // and not just likely ints TypeSpecializeBinary will deal with type specializing them and fold them again
  5537. IntConstantBounds src1IntConstantBounds, src2IntConstantBounds;
  5538. if (src1Val && src1Val->GetValueInfo()->TryGetIntConstantBounds(&src1IntConstantBounds))
  5539. {
  5540. if (src2Val && src2Val->GetValueInfo()->TryGetIntConstantBounds(&src2IntConstantBounds))
  5541. {
  5542. if (this->OptConstFoldBinary(&instr, src1IntConstantBounds, src2IntConstantBounds, pDstVal))
  5543. {
  5544. return instr;
  5545. }
  5546. }
  5547. }
  5548. }
  5549. }
  5550. }
  5551. if (instr->GetSrc2() && this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2OriginalVal, redoTypeSpecRef))
  5552. {
  5553. if (!this->IsLoopPrePass() &&
  5554. instr->m_opcode != Js::OpCode::Nop &&
  5555. instr->m_opcode != Js::OpCode::Br && // We may have const fold a branch
  5556. // Cannot const-peep if the result of the operation is required for a bailout check
  5557. !(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnResultConditions))
  5558. {
  5559. if (src1Val && src1Val->GetValueInfo()->HasIntConstantValue())
  5560. {
  5561. if (this->OptConstPeep(instr, instr->GetSrc1(), pDstVal, src1Val->GetValueInfo()))
  5562. {
  5563. return instr;
  5564. }
  5565. }
  5566. else if (src2Val && src2Val->GetValueInfo()->HasIntConstantValue())
  5567. {
  5568. if (this->OptConstPeep(instr, instr->GetSrc2(), pDstVal, src2Val->GetValueInfo()))
  5569. {
  5570. return instr;
  5571. }
  5572. }
  5573. }
  5574. return instr;
  5575. }
  5576. else if(*redoTypeSpecRef)
  5577. {
  5578. return instr;
  5579. }
  5580. if (instr->IsBranchInstr() && !this->IsLoopPrePass())
  5581. {
  5582. if (this->OptConstFoldBranch(instr, src1Val, src2Val, pDstVal))
  5583. {
  5584. return instr;
  5585. }
  5586. }
  5587. // We didn't type specialize, make sure the srcs are unspecialized
  5588. IR::Opnd *src1 = instr->GetSrc1();
  5589. if (src1)
  5590. {
  5591. instr = this->ToVarUses(instr, src1, false, src1Val);
  5592. IR::Opnd *src2 = instr->GetSrc2();
  5593. if (src2)
  5594. {
  5595. instr = this->ToVarUses(instr, src2, false, src2Val);
  5596. }
  5597. }
  5598. IR::Opnd *dst = instr->GetDst();
  5599. if (dst)
  5600. {
  5601. instr = this->ToVarUses(instr, dst, true, nullptr);
  5602. // Handling for instructions other than built-ins that may require only dst type specialization
  5603. // should be added here.
  5604. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode) && !GetIsAsmJSFunc()) // don't need to do typespec for asmjs
  5605. {
  5606. this->TypeSpecializeInlineBuiltInDst(&instr, pDstVal);
  5607. return instr;
  5608. }
  5609. // Clear the int specialized bit on the dst.
  5610. if (dst->IsRegOpnd())
  5611. {
  5612. IR::RegOpnd *dstRegOpnd = dst->AsRegOpnd();
  5613. if (!dstRegOpnd->m_sym->IsTypeSpec())
  5614. {
  5615. this->ToVarRegOpnd(dstRegOpnd, this->currentBlock);
  5616. }
  5617. else if (dstRegOpnd->m_sym->IsInt32())
  5618. {
  5619. this->ToInt32Dst(instr, dstRegOpnd, this->currentBlock);
  5620. }
  5621. else if (dstRegOpnd->m_sym->IsUInt32() && GetIsAsmJSFunc())
  5622. {
  5623. this->ToUInt32Dst(instr, dstRegOpnd, this->currentBlock);
  5624. }
  5625. else if (dstRegOpnd->m_sym->IsFloat64())
  5626. {
  5627. this->ToFloat64Dst(instr, dstRegOpnd, this->currentBlock);
  5628. }
  5629. }
  5630. else if (dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym())
  5631. {
  5632. this->ToVarStackSym(dst->AsSymOpnd()->m_sym->AsStackSym(), this->currentBlock);
  5633. }
  5634. }
  5635. return instr;
  5636. }
  5637. bool
  5638. GlobOpt::OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *valuInfo)
  5639. {
  5640. int32 value;
  5641. IR::Opnd *src;
  5642. IR::Opnd *nonConstSrc = (constSrc == instr->GetSrc1() ? instr->GetSrc2() : instr->GetSrc1());
  5643. // Try to find the value from value info first
  5644. if (valuInfo->TryGetIntConstantValue(&value))
  5645. {
  5646. }
  5647. else if (constSrc->IsAddrOpnd())
  5648. {
  5649. IR::AddrOpnd *addrOpnd = constSrc->AsAddrOpnd();
  5650. #ifdef _M_X64
  5651. Assert(addrOpnd->IsVar() || Math::FitsInDWord((size_t)addrOpnd->m_address));
  5652. #else
  5653. Assert(sizeof(value) == sizeof(addrOpnd->m_address));
  5654. #endif
  5655. if (addrOpnd->IsVar())
  5656. {
  5657. value = Js::TaggedInt::ToInt32(addrOpnd->m_address);
  5658. }
  5659. else
  5660. {
  5661. // We asserted that the address will fit in a DWORD above
  5662. value = ::Math::PointerCastToIntegral<int32>(constSrc->AsAddrOpnd()->m_address);
  5663. }
  5664. }
  5665. else if (constSrc->IsIntConstOpnd())
  5666. {
  5667. value = constSrc->AsIntConstOpnd()->AsInt32();
  5668. }
  5669. else
  5670. {
  5671. return false;
  5672. }
  5673. switch(instr->m_opcode)
  5674. {
  5675. // Can't do all Add_A because of string concats.
  5676. // Sub_A cannot be transformed to a NEG_A because 0 - 0 != -0
  5677. case Js::OpCode::Add_A:
  5678. src = nonConstSrc;
  5679. if (!src->GetValueType().IsInt())
  5680. {
  5681. // 0 + -0 != -0
  5682. // "Foo" + 0 != "Foo
  5683. return false;
  5684. }
  5685. // fall-through
  5686. case Js::OpCode::Add_I4:
  5687. if (value != 0)
  5688. {
  5689. return false;
  5690. }
  5691. if (constSrc == instr->GetSrc1())
  5692. {
  5693. src = instr->GetSrc2();
  5694. }
  5695. else
  5696. {
  5697. src = instr->GetSrc1();
  5698. }
  5699. break;
  5700. case Js::OpCode::Mul_A:
  5701. case Js::OpCode::Mul_I4:
  5702. if (value == 0)
  5703. {
  5704. // -0 * 0 != 0
  5705. return false;
  5706. }
  5707. else if (value == 1)
  5708. {
  5709. src = nonConstSrc;
  5710. }
  5711. else
  5712. {
  5713. return false;
  5714. }
  5715. break;
  5716. case Js::OpCode::Div_A:
  5717. if (value == 1 && constSrc == instr->GetSrc2())
  5718. {
  5719. src = instr->GetSrc1();
  5720. }
  5721. else
  5722. {
  5723. return false;
  5724. }
  5725. break;
  5726. case Js::OpCode::Or_I4:
  5727. if (value == -1)
  5728. {
  5729. src = constSrc;
  5730. }
  5731. else if (value == 0)
  5732. {
  5733. src = nonConstSrc;
  5734. }
  5735. else
  5736. {
  5737. return false;
  5738. }
  5739. break;
  5740. case Js::OpCode::And_I4:
  5741. if (value == -1)
  5742. {
  5743. src = nonConstSrc;
  5744. }
  5745. else if (value == 0)
  5746. {
  5747. src = constSrc;
  5748. }
  5749. else
  5750. {
  5751. return false;
  5752. }
  5753. break;
  5754. case Js::OpCode::Shl_I4:
  5755. case Js::OpCode::ShrU_I4:
  5756. case Js::OpCode::Shr_I4:
  5757. if (value != 0 || constSrc != instr->GetSrc2())
  5758. {
  5759. return false;
  5760. }
  5761. src = instr->GetSrc1();
  5762. break;
  5763. default:
  5764. return false;
  5765. }
  5766. this->CaptureByteCodeSymUses(instr);
  5767. if (src == instr->GetSrc1())
  5768. {
  5769. instr->FreeSrc2();
  5770. }
  5771. else
  5772. {
  5773. Assert(src == instr->GetSrc2());
  5774. instr->ReplaceSrc1(instr->UnlinkSrc2());
  5775. }
  5776. instr->m_opcode = Js::OpCode::Ld_A;
  5777. InvalidateInductionVariables(instr);
  5778. return true;
  5779. }
  5780. Js::Var // TODO: michhol OOP JIT, shouldn't play with Vars
  5781. GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
  5782. {
  5783. ValueInfo *valueInfo = val->GetValueInfo();
  5784. if (valueInfo->IsVarConstant() && valueInfo->IsPrimitive())
  5785. {
  5786. return valueInfo->AsVarConstant()->VarValue();
  5787. }
  5788. if (opnd->IsAddrOpnd())
  5789. {
  5790. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  5791. if (addrOpnd->IsVar())
  5792. {
  5793. return addrOpnd->m_address;
  5794. }
  5795. }
  5796. else if (opnd->IsIntConstOpnd())
  5797. {
  5798. if (!Js::TaggedInt::IsOverflow(opnd->AsIntConstOpnd()->AsInt32()))
  5799. {
  5800. return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
  5801. }
  5802. }
  5803. #if FLOATVAR
  5804. else if (opnd->IsFloatConstOpnd())
  5805. {
  5806. return Js::JavascriptNumber::ToVar(opnd->AsFloatConstOpnd()->m_value);
  5807. }
  5808. #endif
  5809. else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
  5810. {
  5811. if (valueInfo->IsBoolean())
  5812. {
  5813. IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
  5814. if (defInstr->m_opcode != Js::OpCode::Ld_A || !defInstr->GetSrc1()->IsAddrOpnd())
  5815. {
  5816. return nullptr;
  5817. }
  5818. Assert(defInstr->GetSrc1()->AsAddrOpnd()->IsVar());
  5819. return defInstr->GetSrc1()->AsAddrOpnd()->m_address;
  5820. }
  5821. else if (valueInfo->IsUndefined())
  5822. {
  5823. return (Js::Var)this->func->GetScriptContextInfo()->GetUndefinedAddr();
  5824. }
  5825. else if (valueInfo->IsNull())
  5826. {
  5827. return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
  5828. }
  5829. #if FLOATVAR
  5830. else if (valueInfo->IsFloat())
  5831. {
  5832. IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
  5833. if (defInstr->m_opcode == Js::OpCode::LdC_F8_R8 && defInstr->GetSrc1()->IsFloatConstOpnd())
  5834. {
  5835. return Js::JavascriptNumber::ToVar(defInstr->GetSrc1()->AsFloatConstOpnd()->m_value);
  5836. }
  5837. }
  5838. #endif
  5839. }
  5840. return nullptr;
  5841. }
  5842. namespace
  5843. {
  5844. bool TryCompIntAndFloat(bool * result, Js::Var left, Js::Var right)
  5845. {
  5846. if (Js::TaggedInt::Is(left))
  5847. {
  5848. // If both are tagged ints we should not get here.
  5849. Assert(!Js::TaggedInt::Is(right));
  5850. if (Js::JavascriptNumber::Is_NoTaggedIntCheck(right))
  5851. {
  5852. double value = Js::JavascriptNumber::GetValue(right);
  5853. *result = (Js::TaggedInt::ToInt32(left) == value);
  5854. return true;
  5855. }
  5856. }
  5857. return false;
  5858. }
  5859. bool Op_JitEq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
  5860. {
  5861. Assert(src1Val != nullptr && src2Val != nullptr);
  5862. Assert(src1Var != nullptr && src2Var != nullptr);
  5863. if (src1Var == src2Var)
  5864. {
  5865. if (Js::TaggedInt::Is(src1Var))
  5866. {
  5867. *result = true;
  5868. return true;
  5869. }
  5870. if (!isStrict && src1Val->GetValueInfo()->IsNotFloat())
  5871. {
  5872. // If the vars are equal and they are not NaN, non-strict equal returns true. Not float guarantees not NaN.
  5873. *result = true;
  5874. return true;
  5875. }
  5876. #if FLOATVAR
  5877. if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
  5878. {
  5879. *result = !Js::JavascriptNumber::IsNan(Js::JavascriptNumber::GetValue(src1Var));
  5880. return true;
  5881. }
  5882. #endif
  5883. if (src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetTrueAddr()) ||
  5884. src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetFalseAddr()) ||
  5885. src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetNullAddr()) ||
  5886. src1Var == reinterpret_cast<Js::Var>(func->GetScriptContextInfo()->GetUndefinedAddr()))
  5887. {
  5888. *result = true;
  5889. return true;
  5890. }
  5891. // Other var comparisons require the runtime to prove.
  5892. return false;
  5893. }
  5894. #if FLOATVAR
  5895. if (TryCompIntAndFloat(result, src1Var, src2Var) || TryCompIntAndFloat(result, src2Var, src1Var))
  5896. {
  5897. return true;
  5898. }
  5899. #endif
  5900. return false;
  5901. }
  5902. bool Op_JitNeq(bool * result, Value * src1Val, Value * src2Val, Js::Var src1Var, Js::Var src2Var, Func * func, bool isStrict)
  5903. {
  5904. if (Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, func, isStrict))
  5905. {
  5906. *result = !*result;
  5907. return true;
  5908. }
  5909. return false;
  5910. }
  5911. bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
  5912. {
  5913. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  5914. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  5915. return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
  5916. (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
  5917. }
  5918. }
  5919. bool
  5920. GlobOpt::CanProveConditionalBranch(IR::Instr *instr, Value *src1Val, Value *src2Val, Js::Var src1Var, Js::Var src2Var, bool *result)
  5921. {
  5922. auto AreSourcesEqual = [&](Value * val1, Value * val2, bool undefinedCmp) -> bool
  5923. {
  5924. // NaN !== NaN, and objects can have valueOf/toString
  5925. if (val1->IsEqualTo(val2))
  5926. {
  5927. if (val1->GetValueInfo()->IsUndefined())
  5928. {
  5929. return undefinedCmp;
  5930. }
  5931. return val1->GetValueInfo()->IsPrimitive() && val1->GetValueInfo()->IsNotFloat();
  5932. }
  5933. return false;
  5934. };
  5935. // Make sure GetConstantVar only returns primitives.
  5936. // TODO: OOP JIT, enabled these asserts
  5937. //Assert(!src1Var || !Js::JavascriptOperators::IsObject(src1Var));
  5938. //Assert(!src2Var || !Js::JavascriptOperators::IsObject(src2Var));
  5939. int64 left64, right64;
  5940. int32 left, right;
  5941. int32 constVal;
  5942. switch (instr->m_opcode)
  5943. {
  5944. #define BRANCHSIGNED(OPCODE,CMP,TYPE,UNSIGNEDNESS,UNDEFINEDCMP) \
  5945. case Js::OpCode::##OPCODE: \
  5946. if (src1Val && src2Val) \
  5947. { \
  5948. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&left, UNSIGNEDNESS) && \
  5949. src2Val->GetValueInfo()->TryGetIntConstantValue(&right, UNSIGNEDNESS)) \
  5950. { \
  5951. *result = (TYPE)left CMP(TYPE)right; \
  5952. } \
  5953. if (src1Val->GetValueInfo()->TryGetInt64ConstantValue(&left64, UNSIGNEDNESS) && \
  5954. src2Val->GetValueInfo()->TryGetInt64ConstantValue(&right64, UNSIGNEDNESS)) \
  5955. { \
  5956. *result = (TYPE)left64 CMP(TYPE)right64; \
  5957. } \
  5958. else if (AreSourcesEqual(src1Val, src2Val, UNDEFINEDCMP)) \
  5959. { \
  5960. *result = 0 CMP 0; \
  5961. } \
  5962. else \
  5963. { \
  5964. return false; \
  5965. } \
  5966. } \
  5967. else \
  5968. { \
  5969. return false; \
  5970. } \
  5971. break;
  5972. BRANCHSIGNED(BrEq_I4, == , int64, false, true)
  5973. BRANCHSIGNED(BrGe_I4, >= , int64, false, false)
  5974. BRANCHSIGNED(BrGt_I4, > , int64, false, false)
  5975. BRANCHSIGNED(BrLt_I4, < , int64, false, false)
  5976. BRANCHSIGNED(BrLe_I4, <= , int64, false, false)
  5977. BRANCHSIGNED(BrNeq_I4, != , int64, false, false)
  5978. BRANCHSIGNED(BrUnGe_I4, >= , uint64, true, false)
  5979. BRANCHSIGNED(BrUnGt_I4, > , uint64, true, false)
  5980. BRANCHSIGNED(BrUnLt_I4, < , uint64, true, false)
  5981. BRANCHSIGNED(BrUnLe_I4, <= , uint64, true, false)
  5982. #undef BRANCHSIGNED
  5983. #define BRANCH(OPCODE,CMP,VARCMPFUNC,UNDEFINEDCMP) \
  5984. case Js::OpCode::##OPCODE: \
  5985. if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) && \
  5986. src2Val->GetValueInfo()->TryGetIntConstantValue(&right)) \
  5987. { \
  5988. *result = left CMP right; \
  5989. } \
  5990. else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, UNDEFINEDCMP)) \
  5991. { \
  5992. *result = 0 CMP 0; \
  5993. } \
  5994. else if (src1Var && src2Var) \
  5995. { \
  5996. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts)) \
  5997. { \
  5998. return false; \
  5999. } \
  6000. *result = VARCMPFUNC(src1Var, src2Var, this->func->GetScriptContext()); \
  6001. } \
  6002. else \
  6003. { \
  6004. return false; \
  6005. } \
  6006. break;
  6007. BRANCH(BrGe_A, >= , Js::JavascriptOperators::GreaterEqual, /*undefinedEquality*/ false)
  6008. BRANCH(BrNotGe_A, <, !Js::JavascriptOperators::GreaterEqual, false)
  6009. BRANCH(BrLt_A, <, Js::JavascriptOperators::Less, false)
  6010. BRANCH(BrNotLt_A, >= , !Js::JavascriptOperators::Less, false)
  6011. BRANCH(BrGt_A, >, Js::JavascriptOperators::Greater, false)
  6012. BRANCH(BrNotGt_A, <= , !Js::JavascriptOperators::Greater, false)
  6013. BRANCH(BrLe_A, <= , Js::JavascriptOperators::LessEqual, false)
  6014. BRANCH(BrNotLe_A, >, !Js::JavascriptOperators::LessEqual, false)
  6015. #undef BRANCH
  6016. case Js::OpCode::BrEq_A:
  6017. case Js::OpCode::BrNotNeq_A:
  6018. if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) &&
  6019. src2Val->GetValueInfo()->TryGetIntConstantValue(&right))
  6020. {
  6021. *result = left == right;
  6022. }
  6023. else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, true))
  6024. {
  6025. *result = true;
  6026. }
  6027. else if (!src1Var || !src2Var)
  6028. {
  6029. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  6030. {
  6031. *result = false;
  6032. }
  6033. else
  6034. {
  6035. return false;
  6036. }
  6037. }
  6038. else
  6039. {
  6040. if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
  6041. {
  6042. return false;
  6043. }
  6044. }
  6045. break;
  6046. case Js::OpCode::BrNeq_A:
  6047. case Js::OpCode::BrNotEq_A:
  6048. if (src1Val && src2Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&left) &&
  6049. src2Val->GetValueInfo()->TryGetIntConstantValue(&right))
  6050. {
  6051. *result = left != right;
  6052. }
  6053. else if (src1Val && src2Val && AreSourcesEqual(src1Val, src2Val, true))
  6054. {
  6055. *result = false;
  6056. }
  6057. else if (!src1Var || !src2Var)
  6058. {
  6059. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  6060. {
  6061. *result = true;
  6062. }
  6063. else
  6064. {
  6065. return false;
  6066. }
  6067. }
  6068. else
  6069. {
  6070. if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, false /* isStrict */))
  6071. {
  6072. return false;
  6073. }
  6074. }
  6075. break;
  6076. case Js::OpCode::BrSrEq_A:
  6077. case Js::OpCode::BrSrNotNeq_A:
  6078. if (!src1Var || !src2Var)
  6079. {
  6080. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  6081. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  6082. if (
  6083. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  6084. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  6085. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  6086. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  6087. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  6088. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  6089. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  6090. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  6091. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  6092. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  6093. )
  6094. {
  6095. *result = false;
  6096. }
  6097. else if (AreSourcesEqual(src1Val, src2Val, true))
  6098. {
  6099. *result = true;
  6100. }
  6101. else
  6102. {
  6103. return false;
  6104. }
  6105. }
  6106. else
  6107. {
  6108. if (!Op_JitEq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
  6109. {
  6110. return false;
  6111. }
  6112. }
  6113. break;
  6114. case Js::OpCode::BrSrNeq_A:
  6115. case Js::OpCode::BrSrNotEq_A:
  6116. if (!src1Var || !src2Var)
  6117. {
  6118. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  6119. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  6120. if (
  6121. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  6122. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  6123. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  6124. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  6125. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  6126. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  6127. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  6128. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  6129. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  6130. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  6131. )
  6132. {
  6133. *result = true;
  6134. }
  6135. else if (AreSourcesEqual(src1Val, src2Val, true))
  6136. {
  6137. *result = false;
  6138. }
  6139. else
  6140. {
  6141. return false;
  6142. }
  6143. }
  6144. else
  6145. {
  6146. if (!Op_JitNeq(result, src1Val, src2Val, src1Var, src2Var, this->func, true /* isStrict */))
  6147. {
  6148. return false;
  6149. }
  6150. }
  6151. break;
  6152. case Js::OpCode::BrFalse_A:
  6153. case Js::OpCode::BrTrue_A:
  6154. {
  6155. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  6156. if (src1ValueInfo->IsNull() || src1ValueInfo->IsUndefined())
  6157. {
  6158. *result = instr->m_opcode == Js::OpCode::BrFalse_A;
  6159. break;
  6160. }
  6161. if (src1ValueInfo->IsObject() && src1ValueInfo->GetObjectType() > ObjectType::Object)
  6162. {
  6163. // Specific object types that are tracked are equivalent to 'true'
  6164. *result = instr->m_opcode == Js::OpCode::BrTrue_A;
  6165. break;
  6166. }
  6167. if (!src1Var)
  6168. {
  6169. return false;
  6170. }
  6171. // Set *result = (evaluates true) and negate it later for BrFalse
  6172. if (src1Var == reinterpret_cast<Js::Var>(this->func->GetScriptContextInfo()->GetTrueAddr()))
  6173. {
  6174. *result = true;
  6175. }
  6176. else if (src1Var == reinterpret_cast<Js::Var>(this->func->GetScriptContextInfo()->GetFalseAddr()))
  6177. {
  6178. *result = false;
  6179. }
  6180. else if (Js::TaggedInt::Is(src1Var))
  6181. {
  6182. *result = (src1Var != reinterpret_cast<Js::Var>(Js::AtomTag_IntPtr));
  6183. }
  6184. #if FLOATVAR
  6185. else if (Js::JavascriptNumber::Is_NoTaggedIntCheck(src1Var))
  6186. {
  6187. double value = Js::JavascriptNumber::GetValue(src1Var);
  6188. *result = (!Js::JavascriptNumber::IsNan(value)) && (!Js::JavascriptNumber::IsZero(value));
  6189. }
  6190. #endif
  6191. else
  6192. {
  6193. return false;
  6194. }
  6195. if (instr->m_opcode == Js::OpCode::BrFalse_A)
  6196. {
  6197. *result = !(*result);
  6198. }
  6199. break;
  6200. }
  6201. case Js::OpCode::BrFalse_I4:
  6202. {
  6203. constVal = 0;
  6204. if (!src1Val->GetValueInfo()->TryGetIntConstantValue(&constVal))
  6205. {
  6206. return false;
  6207. }
  6208. *result = constVal == 0;
  6209. break;
  6210. }
  6211. case Js::OpCode::BrOnObject_A:
  6212. {
  6213. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  6214. if (!src1ValueInfo->IsDefinite())
  6215. {
  6216. return false;
  6217. }
  6218. *result = !src1ValueInfo->IsPrimitive();
  6219. break;
  6220. }
  6221. default:
  6222. return false;
  6223. }
  6224. return true;
  6225. }
  6226. bool
  6227. GlobOpt::OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal)
  6228. {
  6229. if (!src1Val)
  6230. {
  6231. return false;
  6232. }
  6233. Js::Var src1Var = this->GetConstantVar(instr->GetSrc1(), src1Val);
  6234. Js::Var src2Var = nullptr;
  6235. if (instr->GetSrc2())
  6236. {
  6237. if (!src2Val)
  6238. {
  6239. return false;
  6240. }
  6241. src2Var = this->GetConstantVar(instr->GetSrc2(), src2Val);
  6242. }
  6243. bool result;
  6244. if (!CanProveConditionalBranch(instr, src1Val, src2Val, src1Var, src2Var, &result))
  6245. {
  6246. return false;
  6247. }
  6248. this->OptConstFoldBr(!!result, instr);
  6249. return true;
  6250. }
  6251. bool
  6252. GlobOpt::OptConstFoldUnary(
  6253. IR::Instr * *pInstr,
  6254. const int32 intConstantValue,
  6255. const bool isUsingOriginalSrc1Value,
  6256. Value **pDstVal)
  6257. {
  6258. IR::Instr * &instr = *pInstr;
  6259. int32 value = 0;
  6260. IR::Opnd *constOpnd;
  6261. bool isInt = true;
  6262. bool doSetDstVal = true;
  6263. FloatConstType fValue = 0.0;
  6264. if (!DoConstFold())
  6265. {
  6266. return false;
  6267. }
  6268. if (instr->GetDst() && !instr->GetDst()->IsRegOpnd())
  6269. {
  6270. return false;
  6271. }
  6272. switch(instr->m_opcode)
  6273. {
  6274. case Js::OpCode::Neg_A:
  6275. if (intConstantValue == 0)
  6276. {
  6277. // Could fold to -0.0
  6278. return false;
  6279. }
  6280. if (Int32Math::Neg(intConstantValue, &value))
  6281. {
  6282. return false;
  6283. }
  6284. break;
  6285. case Js::OpCode::Not_A:
  6286. Int32Math::Not(intConstantValue, &value);
  6287. break;
  6288. case Js::OpCode::Ld_A:
  6289. if (instr->HasBailOutInfo())
  6290. {
  6291. //The profile data for switch expr can be string and in GlobOpt we realize it is an int.
  6292. if(instr->GetBailOutKind() == IR::BailOutExpectingString)
  6293. {
  6294. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  6295. }
  6296. Assert(instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  6297. instr->ClearBailOutInfo();
  6298. }
  6299. value = intConstantValue;
  6300. if(isUsingOriginalSrc1Value)
  6301. {
  6302. doSetDstVal = false; // Let OptDst do it by copying src1Val
  6303. }
  6304. break;
  6305. case Js::OpCode::Conv_Num:
  6306. case Js::OpCode::LdC_A_I4:
  6307. value = intConstantValue;
  6308. if(isUsingOriginalSrc1Value)
  6309. {
  6310. doSetDstVal = false; // Let OptDst do it by copying src1Val
  6311. }
  6312. break;
  6313. case Js::OpCode::Incr_A:
  6314. if (Int32Math::Inc(intConstantValue, &value))
  6315. {
  6316. return false;
  6317. }
  6318. break;
  6319. case Js::OpCode::Decr_A:
  6320. if (Int32Math::Dec(intConstantValue, &value))
  6321. {
  6322. return false;
  6323. }
  6324. break;
  6325. case Js::OpCode::InlineMathAcos:
  6326. fValue = Js::Math::Acos((double)intConstantValue);
  6327. isInt = false;
  6328. break;
  6329. case Js::OpCode::InlineMathAsin:
  6330. fValue = Js::Math::Asin((double)intConstantValue);
  6331. isInt = false;
  6332. break;
  6333. case Js::OpCode::InlineMathAtan:
  6334. fValue = Js::Math::Atan((double)intConstantValue);
  6335. isInt = false;
  6336. break;
  6337. case Js::OpCode::InlineMathCos:
  6338. fValue = Js::Math::Cos((double)intConstantValue);
  6339. isInt = false;
  6340. break;
  6341. case Js::OpCode::InlineMathExp:
  6342. fValue = Js::Math::Exp((double)intConstantValue);
  6343. isInt = false;
  6344. break;
  6345. case Js::OpCode::InlineMathLog:
  6346. fValue = Js::Math::Log((double)intConstantValue);
  6347. isInt = false;
  6348. break;
  6349. case Js::OpCode::InlineMathSin:
  6350. fValue = Js::Math::Sin((double)intConstantValue);
  6351. isInt = false;
  6352. break;
  6353. case Js::OpCode::InlineMathSqrt:
  6354. fValue = ::sqrt((double)intConstantValue);
  6355. isInt = false;
  6356. break;
  6357. case Js::OpCode::InlineMathTan:
  6358. fValue = ::tan((double)intConstantValue);
  6359. isInt = false;
  6360. break;
  6361. case Js::OpCode::InlineMathFround:
  6362. fValue = (double) (float) intConstantValue;
  6363. isInt = false;
  6364. break;
  6365. case Js::OpCode::InlineMathAbs:
  6366. if (intConstantValue == INT32_MIN)
  6367. {
  6368. if (instr->GetDst()->IsInt32())
  6369. {
  6370. // if dst is an int (e.g. in asm.js), we should coerce it, not convert to float
  6371. value = static_cast<int32>(2147483648U);
  6372. }
  6373. else
  6374. {
  6375. // Rejit with AggressiveIntTypeSpecDisabled for Math.abs(INT32_MIN) because it causes dst
  6376. // to be float type which could be different with previous type spec result in LoopPrePass
  6377. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  6378. }
  6379. }
  6380. else
  6381. {
  6382. value = ::abs(intConstantValue);
  6383. }
  6384. break;
  6385. case Js::OpCode::InlineMathClz:
  6386. DWORD clz;
  6387. if (_BitScanReverse(&clz, intConstantValue))
  6388. {
  6389. value = 31 - clz;
  6390. }
  6391. else
  6392. {
  6393. value = 32;
  6394. }
  6395. instr->ClearBailOutInfo();
  6396. break;
  6397. case Js::OpCode::Ctz:
  6398. Assert(func->GetJITFunctionBody()->IsWasmFunction());
  6399. Assert(!instr->HasBailOutInfo());
  6400. DWORD ctz;
  6401. if (_BitScanForward(&ctz, intConstantValue))
  6402. {
  6403. value = ctz;
  6404. }
  6405. else
  6406. {
  6407. value = 32;
  6408. }
  6409. break;
  6410. case Js::OpCode::InlineMathFloor:
  6411. value = intConstantValue;
  6412. instr->ClearBailOutInfo();
  6413. break;
  6414. case Js::OpCode::InlineMathCeil:
  6415. value = intConstantValue;
  6416. instr->ClearBailOutInfo();
  6417. break;
  6418. case Js::OpCode::InlineMathRound:
  6419. value = intConstantValue;
  6420. instr->ClearBailOutInfo();
  6421. break;
  6422. case Js::OpCode::ToVar:
  6423. if (Js::TaggedInt::IsOverflow(intConstantValue))
  6424. {
  6425. return false;
  6426. }
  6427. else
  6428. {
  6429. value = intConstantValue;
  6430. instr->ClearBailOutInfo();
  6431. break;
  6432. }
  6433. default:
  6434. return false;
  6435. }
  6436. this->CaptureByteCodeSymUses(instr);
  6437. Assert(!instr->HasBailOutInfo()); // If we are, in fact, successful in constant folding the instruction, there is no point in having the bailoutinfo around anymore.
  6438. // Make sure that it is cleared if it was initially present.
  6439. if (!isInt)
  6440. {
  6441. value = (int32)fValue;
  6442. if (fValue == (double)value)
  6443. {
  6444. isInt = true;
  6445. }
  6446. }
  6447. if (isInt)
  6448. {
  6449. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  6450. GOPT_TRACE(_u("Constant folding to %d\n"), value);
  6451. }
  6452. else
  6453. {
  6454. constOpnd = IR::FloatConstOpnd::New(fValue, TyFloat64, instr->m_func);
  6455. GOPT_TRACE(_u("Constant folding to %f\n"), fValue);
  6456. }
  6457. instr->ReplaceSrc1(constOpnd);
  6458. this->OptSrc(constOpnd, &instr);
  6459. IR::Opnd *dst = instr->GetDst();
  6460. Assert(dst->IsRegOpnd());
  6461. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  6462. if (isInt)
  6463. {
  6464. if (dstSym->IsSingleDef())
  6465. {
  6466. dstSym->SetIsIntConst(value);
  6467. }
  6468. if (doSetDstVal)
  6469. {
  6470. *pDstVal = GetIntConstantValue(value, instr, dst);
  6471. }
  6472. if (IsTypeSpecPhaseOff(this->func))
  6473. {
  6474. instr->m_opcode = Js::OpCode::LdC_A_I4;
  6475. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  6476. }
  6477. else
  6478. {
  6479. instr->m_opcode = Js::OpCode::Ld_I4;
  6480. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  6481. StackSym * currDstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  6482. if (currDstSym->IsSingleDef())
  6483. {
  6484. currDstSym->SetIsIntConst(value);
  6485. }
  6486. }
  6487. }
  6488. else
  6489. {
  6490. *pDstVal = NewFloatConstantValue(fValue, dst);
  6491. if (IsTypeSpecPhaseOff(this->func))
  6492. {
  6493. instr->m_opcode = Js::OpCode::LdC_A_R8;
  6494. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  6495. }
  6496. else
  6497. {
  6498. instr->m_opcode = Js::OpCode::LdC_F8_R8;
  6499. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  6500. }
  6501. }
  6502. InvalidateInductionVariables(instr);
  6503. return true;
  6504. }
  6505. //------------------------------------------------------------------------------------------------------
  6506. // Type specialization
  6507. //------------------------------------------------------------------------------------------------------
  6508. bool
  6509. GlobOpt::IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val)
  6510. {
  6511. Assert(src);
  6512. Assert(val);
  6513. ValueInfo *valueInfo = val->GetValueInfo();
  6514. Assert(valueInfo->IsLikelyInt());
  6515. // If it is not known that the operand is definitely an int, the operand is not already type-specialized, and it's not live
  6516. // in the loop landing pad (if we're in a loop), it's probably not worth type-specializing this instruction. The common case
  6517. // where type-specializing this would be bad is where the operations are entirely on properties or array elements, where the
  6518. // ratio of FromVars and ToVars to the number of actual operations is high, and the conversions would dominate the time
  6519. // spent. On the other hand, if we're using a function formal parameter more than once, it would probably be worth
  6520. // type-specializing it, hence the IsDead check on the operands.
  6521. return
  6522. valueInfo->IsInt() ||
  6523. valueInfo->HasIntConstantValue(true) ||
  6524. !src->GetIsDead() ||
  6525. !src->IsRegOpnd() ||
  6526. CurrentBlockData()->IsInt32TypeSpecialized(src->AsRegOpnd()->m_sym) ||
  6527. (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(src->AsRegOpnd()->m_sym));
  6528. }
  6529. bool
  6530. GlobOpt::IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst)
  6531. {
  6532. Assert(dst);
  6533. const auto sym = dst->AsRegOpnd()->m_sym;
  6534. return
  6535. CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
  6536. (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(sym));
  6537. }
  6538. bool
  6539. GlobOpt::IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val)
  6540. {
  6541. Assert(instr);
  6542. const auto src1 = instr->GetSrc1();
  6543. const auto src2 = instr->GetSrc2();
  6544. // In addition to checking each operand and the destination, if for any reason we only have to do a maximum of two
  6545. // conversions instead of the worst-case 3 conversions, it's probably worth specializing.
  6546. if (IsWorthSpecializingToInt32DueToSrc(src1, src1Val) ||
  6547. (src2Val && IsWorthSpecializingToInt32DueToSrc(src2, src2Val)))
  6548. {
  6549. return true;
  6550. }
  6551. IR::Opnd *dst = instr->GetDst();
  6552. if (!dst || IsWorthSpecializingToInt32DueToDst(dst))
  6553. {
  6554. return true;
  6555. }
  6556. if (dst->IsEqual(src1) || (src2Val && (dst->IsEqual(src2) || src1->IsEqual(src2))))
  6557. {
  6558. return true;
  6559. }
  6560. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  6561. // Skip useless Ld_A's
  6562. do
  6563. {
  6564. switch (instrNext->m_opcode)
  6565. {
  6566. case Js::OpCode::Ld_A:
  6567. if (!dst->IsEqual(instrNext->GetSrc1()))
  6568. {
  6569. goto done;
  6570. }
  6571. dst = instrNext->GetDst();
  6572. break;
  6573. case Js::OpCode::LdFld:
  6574. case Js::OpCode::LdRootFld:
  6575. case Js::OpCode::LdRootFldForTypeOf:
  6576. case Js::OpCode::LdFldForTypeOf:
  6577. case Js::OpCode::LdElemI_A:
  6578. case Js::OpCode::ByteCodeUses:
  6579. break;
  6580. default:
  6581. goto done;
  6582. }
  6583. instrNext = instrNext->GetNextRealInstrOrLabel();
  6584. } while (true);
  6585. done:
  6586. // If the next instr could also be type specialized, then it is probably worth it.
  6587. if ((instrNext->GetSrc1() && dst->IsEqual(instrNext->GetSrc1())) || (instrNext->GetSrc2() && dst->IsEqual(instrNext->GetSrc2())))
  6588. {
  6589. switch (instrNext->m_opcode)
  6590. {
  6591. case Js::OpCode::Add_A:
  6592. case Js::OpCode::Sub_A:
  6593. case Js::OpCode::Mul_A:
  6594. case Js::OpCode::Div_A:
  6595. case Js::OpCode::Rem_A:
  6596. case Js::OpCode::Xor_A:
  6597. case Js::OpCode::And_A:
  6598. case Js::OpCode::Or_A:
  6599. case Js::OpCode::Shl_A:
  6600. case Js::OpCode::Shr_A:
  6601. case Js::OpCode::Incr_A:
  6602. case Js::OpCode::Decr_A:
  6603. case Js::OpCode::Neg_A:
  6604. case Js::OpCode::Not_A:
  6605. case Js::OpCode::Conv_Num:
  6606. case Js::OpCode::BrEq_I4:
  6607. case Js::OpCode::BrTrue_I4:
  6608. case Js::OpCode::BrFalse_I4:
  6609. case Js::OpCode::BrGe_I4:
  6610. case Js::OpCode::BrGt_I4:
  6611. case Js::OpCode::BrLt_I4:
  6612. case Js::OpCode::BrLe_I4:
  6613. case Js::OpCode::BrNeq_I4:
  6614. return true;
  6615. }
  6616. }
  6617. return false;
  6618. }
  6619. bool
  6620. GlobOpt::TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal)
  6621. {
  6622. Assert(src1Val->GetValueInfo()->IsNumber());
  6623. if (this->IsLoopPrePass())
  6624. {
  6625. return false;
  6626. }
  6627. switch (instr->m_opcode)
  6628. {
  6629. case Js::OpCode::Conv_Num:
  6630. // Optimize Conv_Num away since we know this is a number
  6631. instr->m_opcode = Js::OpCode::Ld_A;
  6632. return false;
  6633. }
  6634. return false;
  6635. }
  6636. bool
  6637. GlobOpt::TypeSpecializeUnary(
  6638. IR::Instr **pInstr,
  6639. Value **pSrc1Val,
  6640. Value **pDstVal,
  6641. Value *const src1OriginalVal,
  6642. bool *redoTypeSpecRef,
  6643. bool *const forceInvariantHoistingRef)
  6644. {
  6645. Assert(pSrc1Val);
  6646. Value *&src1Val = *pSrc1Val;
  6647. Assert(src1Val);
  6648. // We don't need to do typespec for asmjs
  6649. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  6650. {
  6651. return false;
  6652. }
  6653. IR::Instr *&instr = *pInstr;
  6654. int32 min, max;
  6655. // Inline built-ins explicitly specify how srcs/dst must be specialized.
  6656. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  6657. {
  6658. TypeSpecializeInlineBuiltInUnary(pInstr, &src1Val, pDstVal, src1OriginalVal, redoTypeSpecRef);
  6659. return true;
  6660. }
  6661. // Consider: If type spec wasn't completely done, make sure that we don't type-spec the dst 2nd time.
  6662. if(instr->m_opcode == Js::OpCode::LdLen_A && TypeSpecializeLdLen(&instr, &src1Val, pDstVal, forceInvariantHoistingRef))
  6663. {
  6664. return true;
  6665. }
  6666. if (!src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, this->DoAggressiveIntTypeSpec()))
  6667. {
  6668. src1Val = src1OriginalVal;
  6669. if (src1Val->GetValueInfo()->IsLikelyFloat())
  6670. {
  6671. // Try to type specialize to float
  6672. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  6673. }
  6674. else if (src1Val->GetValueInfo()->IsNumber())
  6675. {
  6676. return TypeSpecializeNumberUnary(instr, src1Val, pDstVal);
  6677. }
  6678. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6679. }
  6680. return this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, min, max, src1OriginalVal, redoTypeSpecRef);
  6681. }
  6682. // Returns true if the built-in requested type specialization, and no further action needed,
  6683. // otherwise returns false.
  6684. void
  6685. GlobOpt::TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef)
  6686. {
  6687. IR::Instr *&instr = *pInstr;
  6688. Assert(pSrc1Val);
  6689. Value *&src1Val = *pSrc1Val;
  6690. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6691. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  6692. Assert(builtInId != Js::BuiltinFunction::None);
  6693. // Consider using different bailout for float/int FromVars, so that when the arg cannot be converted to number we don't disable
  6694. // type spec for other parts of the big function but rather just don't inline that built-in instr.
  6695. // E.g. could do that if the value is not likelyInt/likelyFloat.
  6696. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  6697. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_Args) == Js::BuiltInFlags::BIF_TypeSpecUnaryToFloat;
  6698. if (areAllArgsAlwaysFloat)
  6699. {
  6700. // InlineMathAcos, InlineMathAsin, InlineMathAtan, InlineMathCos, InlineMathExp, InlineMathLog, InlineMathSin, InlineMathSqrt, InlineMathTan.
  6701. Assert(this->DoFloatTypeSpec());
  6702. // Type-spec the src.
  6703. src1Val = src1OriginalVal;
  6704. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  6705. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized to float, but something failed during the process.");
  6706. // Type-spec the dst.
  6707. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  6708. }
  6709. else if (instr->m_opcode == Js::OpCode::InlineMathAbs)
  6710. {
  6711. // Consider the case when the value is unknown - because of bailout in abs we may disable type spec for the whole function which is too much.
  6712. // First, try int.
  6713. int minVal, maxVal;
  6714. bool shouldTypeSpecToInt = src1Val->GetValueInfo()->GetIntValMinMax(&minVal, &maxVal, /* doAggressiveIntTypeSpec = */ true);
  6715. if (shouldTypeSpecToInt)
  6716. {
  6717. Assert(this->DoAggressiveIntTypeSpec());
  6718. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, minVal, maxVal, src1OriginalVal, redoTypeSpecRef, true);
  6719. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (int), but something failed during the process.");
  6720. if (!this->IsLoopPrePass())
  6721. {
  6722. // Create bailout for INT_MIN which does not have corresponding int value on the positive side.
  6723. // Check int range: if we know the range is out of overflow, we do not need the bail out at all.
  6724. if (minVal == INT32_MIN)
  6725. {
  6726. GenerateBailAtOperation(&instr, IR::BailOnIntMin);
  6727. }
  6728. }
  6729. // Account for ::abs(INT_MIN) == INT_MIN (which is less than 0).
  6730. maxVal = ::max(
  6731. ::abs(Int32Math::NearestInRangeTo(minVal, INT_MIN + 1, INT_MAX)),
  6732. ::abs(Int32Math::NearestInRangeTo(maxVal, INT_MIN + 1, INT_MAX)));
  6733. minVal = minVal >= 0 ? minVal : 0;
  6734. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, nullptr, IR::BailOutInvalid, minVal, maxVal, pDstVal);
  6735. }
  6736. else
  6737. {
  6738. // If we couldn't do int, do float.
  6739. Assert(this->DoFloatTypeSpec());
  6740. src1Val = src1OriginalVal;
  6741. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, true);
  6742. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (float), but something failed during the process.");
  6743. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  6744. }
  6745. }
  6746. else if (instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::InlineMathRound)
  6747. {
  6748. // Type specialize src to float
  6749. src1Val = src1OriginalVal;
  6750. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  6751. AssertMsg(retVal, "For inline Math.floor and Math.ceil the src has to be type-specialized to float, but something failed during the process.");
  6752. // Type specialize dst to int
  6753. this->TypeSpecializeIntDst(
  6754. instr,
  6755. instr->m_opcode,
  6756. nullptr,
  6757. src1Val,
  6758. nullptr,
  6759. IR::BailOutInvalid,
  6760. INT32_MIN,
  6761. INT32_MAX,
  6762. pDstVal);
  6763. }
  6764. else if(instr->m_opcode == Js::OpCode::InlineArrayPop)
  6765. {
  6766. IR::Opnd *const thisOpnd = instr->GetSrc1();
  6767. Assert(thisOpnd);
  6768. // Ensure src1 (Array) is a var
  6769. this->ToVarUses(instr, thisOpnd, false, src1Val);
  6770. if(!this->IsLoopPrePass() && thisOpnd->GetValueType().IsLikelyNativeArray())
  6771. {
  6772. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during the run time.
  6773. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  6774. }
  6775. if(!instr->GetDst())
  6776. {
  6777. return;
  6778. }
  6779. // Try Type Specializing the element (return item from Pop) based on the array's profile data.
  6780. if(thisOpnd->GetValueType().IsLikelyNativeIntArray())
  6781. {
  6782. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  6783. }
  6784. else if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  6785. {
  6786. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  6787. }
  6788. else
  6789. {
  6790. // We reached here so the Element is not yet type specialized. Ensure element is a var
  6791. if(instr->GetDst()->IsRegOpnd())
  6792. {
  6793. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  6794. }
  6795. }
  6796. }
  6797. else if (instr->m_opcode == Js::OpCode::InlineMathClz)
  6798. {
  6799. Assert(this->DoAggressiveIntTypeSpec());
  6800. Assert(this->DoLossyIntTypeSpec());
  6801. //Type specialize to int
  6802. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, INT32_MIN, INT32_MAX, src1OriginalVal, redoTypeSpecRef);
  6803. AssertMsg(retVal, "For clz32, the arg has to be type-specialized to int.");
  6804. }
  6805. else
  6806. {
  6807. AssertMsg(FALSE, "Unsupported built-in!");
  6808. }
  6809. }
  6810. void
  6811. GlobOpt::TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal)
  6812. {
  6813. IR::Instr *&instr = *pInstr;
  6814. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6815. switch(instr->m_opcode)
  6816. {
  6817. case Js::OpCode::InlineMathAtan2:
  6818. {
  6819. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  6820. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  6821. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
  6822. Assert(areAllArgsAlwaysFloat);
  6823. Assert(this->DoFloatTypeSpec());
  6824. // Type-spec the src1, src2 and dst.
  6825. src1Val = src1OriginalVal;
  6826. src2Val = src2OriginalVal;
  6827. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6828. AssertMsg(retVal, "For pow and atnan2 the args have to be type-specialized to float, but something failed during the process.");
  6829. break;
  6830. }
  6831. case Js::OpCode::InlineMathPow:
  6832. {
  6833. #ifndef _M_ARM32_OR_ARM64
  6834. if (src2Val->GetValueInfo()->IsLikelyInt())
  6835. {
  6836. bool lossy = false;
  6837. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, lossy);
  6838. IR::Opnd* src1 = instr->GetSrc1();
  6839. int32 valueMin, valueMax;
  6840. if (src1Val->GetValueInfo()->IsLikelyInt() &&
  6841. this->DoPowIntIntTypeSpec() &&
  6842. src2Val->GetValueInfo()->GetIntValMinMax(&valueMin, &valueMax, this->DoAggressiveIntTypeSpec()) &&
  6843. valueMin >= 0)
  6844. {
  6845. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  6846. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  6847. if(!this->IsLoopPrePass())
  6848. {
  6849. GenerateBailAtOperation(&instr, IR::BailOutOnPowIntIntOverflow);
  6850. }
  6851. }
  6852. else
  6853. {
  6854. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  6855. TypeSpecializeFloatDst(instr, nullptr, src1Val, src2Val, pDstVal);
  6856. }
  6857. }
  6858. else
  6859. {
  6860. #endif
  6861. this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6862. #ifndef _M_ARM32_OR_ARM64
  6863. }
  6864. #endif
  6865. break;
  6866. }
  6867. case Js::OpCode::InlineMathImul:
  6868. {
  6869. Assert(this->DoAggressiveIntTypeSpec());
  6870. Assert(this->DoLossyIntTypeSpec());
  6871. //Type specialize to int
  6872. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, false /* skipDst */);
  6873. AssertMsg(retVal, "For imul, the args have to be type-specialized to int but something failed during the process.");
  6874. break;
  6875. }
  6876. case Js::OpCode::InlineMathMin:
  6877. case Js::OpCode::InlineMathMax:
  6878. {
  6879. if(src1Val->GetValueInfo()->IsLikelyInt() && src2Val->GetValueInfo()->IsLikelyInt())
  6880. {
  6881. // Compute resulting range info
  6882. int32 min1 = INT32_MIN;
  6883. int32 max1 = INT32_MAX;
  6884. int32 min2 = INT32_MIN;
  6885. int32 max2 = INT32_MAX;
  6886. int32 newMin, newMax;
  6887. Assert(this->DoAggressiveIntTypeSpec());
  6888. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  6889. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  6890. if (instr->m_opcode == Js::OpCode::InlineMathMin)
  6891. {
  6892. newMin = min(min1, min2);
  6893. newMax = min(max1, max2);
  6894. }
  6895. else
  6896. {
  6897. Assert(instr->m_opcode == Js::OpCode::InlineMathMax);
  6898. newMin = max(min1, min2);
  6899. newMax = max(max1, max2);
  6900. }
  6901. // Type specialize to int
  6902. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, newMin, newMax, false /* skipDst */);
  6903. AssertMsg(retVal, "For min and max, the args have to be type-specialized to int if any one of the sources is an int, but something failed during the process.");
  6904. }
  6905. // Couldn't type specialize to int, type specialize to float
  6906. else
  6907. {
  6908. Assert(this->DoFloatTypeSpec());
  6909. src1Val = src1OriginalVal;
  6910. src2Val = src2OriginalVal;
  6911. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6912. AssertMsg(retVal, "For min and max, the args have to be type-specialized to float if any one of the sources is a float, but something failed during the process.");
  6913. }
  6914. break;
  6915. }
  6916. case Js::OpCode::InlineArrayPush:
  6917. {
  6918. IR::Opnd *const thisOpnd = instr->GetSrc1();
  6919. Assert(thisOpnd);
  6920. if(instr->GetDst() && instr->GetDst()->IsRegOpnd())
  6921. {
  6922. // Set the dst as live here, as the built-ins return early from the TypeSpecialization functions - before the dst is marked as live.
  6923. // Also, we are not specializing the dst separately and we are skipping the dst to be handled when we specialize the instruction above.
  6924. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  6925. }
  6926. // Ensure src1 (Array) is a var
  6927. this->ToVarUses(instr, thisOpnd, false, src1Val);
  6928. if(!this->IsLoopPrePass())
  6929. {
  6930. if(thisOpnd->GetValueType().IsLikelyNativeArray())
  6931. {
  6932. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during run time.
  6933. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  6934. }
  6935. else
  6936. {
  6937. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  6938. }
  6939. }
  6940. // Try Type Specializing the element based on the array's profile data.
  6941. if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  6942. {
  6943. src1Val = src1OriginalVal;
  6944. src2Val = src2OriginalVal;
  6945. }
  6946. if((thisOpnd->GetValueType().IsLikelyNativeIntArray() && this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, true))
  6947. || (thisOpnd->GetValueType().IsLikelyNativeFloatArray() && this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal)))
  6948. {
  6949. break;
  6950. }
  6951. // The Element is not yet type specialized. Ensure element is a var
  6952. this->ToVarUses(instr, instr->GetSrc2(), false, src2Val);
  6953. break;
  6954. }
  6955. }
  6956. }
  6957. void
  6958. GlobOpt::TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal)
  6959. {
  6960. IR::Instr *&instr = *pInstr;
  6961. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6962. if (instr->m_opcode == Js::OpCode::InlineMathRandom)
  6963. {
  6964. Assert(this->DoFloatTypeSpec());
  6965. // Type specialize dst to float
  6966. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  6967. }
  6968. }
  6969. bool
  6970. GlobOpt::TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal)
  6971. {
  6972. // It has been determined that this instruction cannot be int-specialized. We need to determine whether to attempt to
  6973. // float-specialize the instruction, or leave it unspecialized.
  6974. #if !INT32VAR
  6975. Value*& src1Val = *pSrc1Val;
  6976. if(src1Val->GetValueInfo()->IsLikelyUntaggedInt())
  6977. {
  6978. // An input range is completely outside the range of an int31. Even if the operation may overflow, it is
  6979. // unlikely to overflow on these operations, so we leave it unspecialized on 64-bit platforms. However, on
  6980. // 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is significantly slower to
  6981. // use in an unspecialized operation compared to a tagged int. So, try to float-specialize the instruction.
  6982. src1Val = src1OriginalVal;
  6983. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  6984. }
  6985. #endif
  6986. return false;
  6987. }
  6988. bool
  6989. GlobOpt::TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst /* = false */)
  6990. {
  6991. // Consider moving the code for int type spec-ing binary functions here.
  6992. IR::Instr *&instr = *pInstr;
  6993. bool lossy = false;
  6994. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  6995. {
  6996. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  6997. {
  6998. int32 intConstantValue;
  6999. bool isIntConstMissingItem = src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue);
  7000. if(isIntConstMissingItem)
  7001. {
  7002. isIntConstMissingItem = Js::SparseArraySegment<int>::IsMissingItem(&intConstantValue);
  7003. }
  7004. // Don't specialize if the element is not likelyInt or an IntConst which is a missing item value.
  7005. if(!(src2Val->GetValueInfo()->IsLikelyInt()) || isIntConstMissingItem)
  7006. {
  7007. return false;
  7008. }
  7009. // We don't want to specialize both the source operands, though it is a binary instr.
  7010. IR::Opnd * elementOpnd = instr->GetSrc2();
  7011. this->ToInt32(instr, elementOpnd, this->currentBlock, src2Val, nullptr, lossy);
  7012. }
  7013. else
  7014. {
  7015. IR::Opnd *src1 = instr->GetSrc1();
  7016. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  7017. IR::Opnd *src2 = instr->GetSrc2();
  7018. this->ToInt32(instr, src2, this->currentBlock, src2Val, nullptr, lossy);
  7019. }
  7020. if(!skipDst)
  7021. {
  7022. IR::Opnd *dst = instr->GetDst();
  7023. if (dst)
  7024. {
  7025. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, min, max, pDstVal);
  7026. }
  7027. }
  7028. return true;
  7029. }
  7030. else
  7031. {
  7032. AssertMsg(false, "Yet to move code for other binary functions here");
  7033. return false;
  7034. }
  7035. }
  7036. bool
  7037. GlobOpt::TypeSpecializeIntUnary(
  7038. IR::Instr **pInstr,
  7039. Value **pSrc1Val,
  7040. Value **pDstVal,
  7041. int32 min,
  7042. int32 max,
  7043. Value *const src1OriginalVal,
  7044. bool *redoTypeSpecRef,
  7045. bool skipDst /* = false */)
  7046. {
  7047. IR::Instr *&instr = *pInstr;
  7048. Assert(pSrc1Val);
  7049. Value *&src1Val = *pSrc1Val;
  7050. bool isTransfer = false;
  7051. Js::OpCode opcode;
  7052. int32 newMin, newMax;
  7053. bool lossy = false;
  7054. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  7055. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  7056. bool ignoredNegativeZero = false;
  7057. bool checkTypeSpecWorth = false;
  7058. if(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber)
  7059. {
  7060. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7061. }
  7062. AddSubConstantInfo addSubConstantInfo;
  7063. switch(instr->m_opcode)
  7064. {
  7065. case Js::OpCode::Ld_A:
  7066. if (instr->GetSrc1()->IsRegOpnd())
  7067. {
  7068. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  7069. if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
  7070. {
  7071. // Type specializing an Ld_A isn't worth it, unless the src
  7072. // is already type specialized.
  7073. return false;
  7074. }
  7075. }
  7076. newMin = min;
  7077. newMax = max;
  7078. opcode = Js::OpCode::Ld_I4;
  7079. isTransfer = true;
  7080. break;
  7081. case Js::OpCode::Conv_Num:
  7082. newMin = min;
  7083. newMax = max;
  7084. opcode = Js::OpCode::Ld_I4;
  7085. isTransfer = true;
  7086. break;
  7087. case Js::OpCode::LdC_A_I4:
  7088. newMin = newMax = instr->GetSrc1()->AsIntConstOpnd()->AsInt32();
  7089. opcode = Js::OpCode::Ld_I4;
  7090. break;
  7091. case Js::OpCode::Neg_A:
  7092. if (min <= 0 && max >= 0)
  7093. {
  7094. if(instr->ShouldCheckForNegativeZero())
  7095. {
  7096. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  7097. if(!DoAggressiveIntTypeSpec())
  7098. {
  7099. // May result in -0
  7100. // Consider adding a dynamic check for src1 == 0
  7101. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7102. }
  7103. if(min == 0 && max == 0)
  7104. {
  7105. // Always results in -0
  7106. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7107. }
  7108. bailOutKind |= IR::BailOutOnNegativeZero;
  7109. }
  7110. else
  7111. {
  7112. ignoredNegativeZero = true;
  7113. }
  7114. }
  7115. if (Int32Math::Neg(min, &newMax))
  7116. {
  7117. if(instr->ShouldCheckForIntOverflow())
  7118. {
  7119. if(!DoAggressiveIntTypeSpec())
  7120. {
  7121. // May overflow
  7122. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7123. }
  7124. if(min == max)
  7125. {
  7126. // Always overflows
  7127. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7128. }
  7129. bailOutKind |= IR::BailOutOnOverflow;
  7130. newMax = INT32_MAX;
  7131. }
  7132. else
  7133. {
  7134. ignoredIntOverflow = true;
  7135. }
  7136. }
  7137. if (Int32Math::Neg(max, &newMin))
  7138. {
  7139. if(instr->ShouldCheckForIntOverflow())
  7140. {
  7141. if(!DoAggressiveIntTypeSpec())
  7142. {
  7143. // May overflow
  7144. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7145. }
  7146. bailOutKind |= IR::BailOutOnOverflow;
  7147. newMin = INT32_MAX;
  7148. }
  7149. else
  7150. {
  7151. ignoredIntOverflow = true;
  7152. }
  7153. }
  7154. if(!instr->ShouldCheckForIntOverflow() && newMin > newMax)
  7155. {
  7156. // When ignoring overflow, the range needs to account for overflow. Since MIN_INT is the only int32 value that
  7157. // overflows on Neg, and the value resulting from overflow is also MIN_INT, if calculating only the new min or new
  7158. // max overflowed but not both, then the new min will be greater than the new max. In that case we need to consider
  7159. // the full range of int32s as possible resulting values.
  7160. newMin = INT32_MIN;
  7161. newMax = INT32_MAX;
  7162. }
  7163. opcode = Js::OpCode::Neg_I4;
  7164. checkTypeSpecWorth = true;
  7165. break;
  7166. case Js::OpCode::Not_A:
  7167. if(!DoLossyIntTypeSpec())
  7168. {
  7169. return false;
  7170. }
  7171. this->PropagateIntRangeForNot(min, max, &newMin, &newMax);
  7172. opcode = Js::OpCode::Not_I4;
  7173. lossy = true;
  7174. break;
  7175. case Js::OpCode::Incr_A:
  7176. do // while(false)
  7177. {
  7178. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  7179. {
  7180. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7181. return
  7182. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  7183. src1ValueInfo->IsIntBounded() &&
  7184. src1ValueInfo->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(1);
  7185. };
  7186. if (Int32Math::Inc(min, &newMin))
  7187. {
  7188. if(CannotOverflowBasedOnRelativeBounds())
  7189. {
  7190. newMin = INT32_MAX;
  7191. }
  7192. else if(instr->ShouldCheckForIntOverflow())
  7193. {
  7194. // Always overflows
  7195. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7196. }
  7197. else
  7198. {
  7199. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  7200. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  7201. // we use the full range of int32s.
  7202. ignoredIntOverflow = true;
  7203. newMin = INT32_MIN;
  7204. newMax = INT32_MAX;
  7205. break;
  7206. }
  7207. }
  7208. if (Int32Math::Inc(max, &newMax))
  7209. {
  7210. if(CannotOverflowBasedOnRelativeBounds())
  7211. {
  7212. newMax = INT32_MAX;
  7213. }
  7214. else if(instr->ShouldCheckForIntOverflow())
  7215. {
  7216. if(!DoAggressiveIntTypeSpec())
  7217. {
  7218. // May overflow
  7219. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7220. }
  7221. bailOutKind |= IR::BailOutOnOverflow;
  7222. newMax = INT32_MAX;
  7223. }
  7224. else
  7225. {
  7226. // See comment about ignoring overflow above
  7227. ignoredIntOverflow = true;
  7228. newMin = INT32_MIN;
  7229. newMax = INT32_MAX;
  7230. break;
  7231. }
  7232. }
  7233. } while(false);
  7234. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  7235. {
  7236. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, 1);
  7237. }
  7238. opcode = Js::OpCode::Add_I4;
  7239. if (!this->IsLoopPrePass())
  7240. {
  7241. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  7242. }
  7243. checkTypeSpecWorth = true;
  7244. break;
  7245. case Js::OpCode::Decr_A:
  7246. do // while(false)
  7247. {
  7248. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  7249. {
  7250. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7251. return
  7252. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  7253. src1ValueInfo->IsIntBounded() &&
  7254. src1ValueInfo->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(1);
  7255. };
  7256. if (Int32Math::Dec(max, &newMax))
  7257. {
  7258. if(CannotOverflowBasedOnRelativeBounds())
  7259. {
  7260. newMax = INT32_MIN;
  7261. }
  7262. else if(instr->ShouldCheckForIntOverflow())
  7263. {
  7264. // Always overflows
  7265. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7266. }
  7267. else
  7268. {
  7269. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  7270. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints, we
  7271. // use the full range of int32s.
  7272. ignoredIntOverflow = true;
  7273. newMin = INT32_MIN;
  7274. newMax = INT32_MAX;
  7275. break;
  7276. }
  7277. }
  7278. if (Int32Math::Dec(min, &newMin))
  7279. {
  7280. if(CannotOverflowBasedOnRelativeBounds())
  7281. {
  7282. newMin = INT32_MIN;
  7283. }
  7284. else if(instr->ShouldCheckForIntOverflow())
  7285. {
  7286. if(!DoAggressiveIntTypeSpec())
  7287. {
  7288. // May overflow
  7289. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7290. }
  7291. bailOutKind |= IR::BailOutOnOverflow;
  7292. newMin = INT32_MIN;
  7293. }
  7294. else
  7295. {
  7296. // See comment about ignoring overflow above
  7297. ignoredIntOverflow = true;
  7298. newMin = INT32_MIN;
  7299. newMax = INT32_MAX;
  7300. break;
  7301. }
  7302. }
  7303. } while(false);
  7304. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  7305. {
  7306. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, -1);
  7307. }
  7308. opcode = Js::OpCode::Sub_I4;
  7309. if (!this->IsLoopPrePass())
  7310. {
  7311. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  7312. }
  7313. checkTypeSpecWorth = true;
  7314. break;
  7315. case Js::OpCode::BrFalse_A:
  7316. case Js::OpCode::BrTrue_A:
  7317. {
  7318. if(DoConstFold() && !IsLoopPrePass() && TryOptConstFoldBrFalse(instr, src1Val, min, max))
  7319. {
  7320. return true;
  7321. }
  7322. bool specialize = true;
  7323. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  7324. {
  7325. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  7326. if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
  7327. {
  7328. // Type specializing a BrTrue_A/BrFalse_A isn't worth it, unless the src
  7329. // is already type specialized
  7330. specialize = false;
  7331. }
  7332. }
  7333. if(instr->m_opcode == Js::OpCode::BrTrue_A)
  7334. {
  7335. UpdateIntBoundsForNotEqualBranch(src1Val, nullptr, 0);
  7336. opcode = Js::OpCode::BrTrue_I4;
  7337. }
  7338. else
  7339. {
  7340. UpdateIntBoundsForEqualBranch(src1Val, nullptr, 0);
  7341. opcode = Js::OpCode::BrFalse_I4;
  7342. }
  7343. if(!specialize)
  7344. {
  7345. return false;
  7346. }
  7347. newMin = 2; newMax = 1; // We'll assert if we make a range where min > max
  7348. break;
  7349. }
  7350. case Js::OpCode::MultiBr:
  7351. newMin = min;
  7352. newMax = max;
  7353. opcode = instr->m_opcode;
  7354. break;
  7355. case Js::OpCode::StElemI_A:
  7356. case Js::OpCode::StElemI_A_Strict:
  7357. case Js::OpCode::StElemC:
  7358. if(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyAnyArrayWithNativeFloatValues())
  7359. {
  7360. src1Val = src1OriginalVal;
  7361. }
  7362. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  7363. case Js::OpCode::NewScArray:
  7364. case Js::OpCode::NewScArrayWithMissingValues:
  7365. case Js::OpCode::InitFld:
  7366. case Js::OpCode::InitRootFld:
  7367. case Js::OpCode::StSlot:
  7368. case Js::OpCode::StSlotChkUndecl:
  7369. #if !FLOATVAR
  7370. case Js::OpCode::StSlotBoxTemp:
  7371. #endif
  7372. case Js::OpCode::StFld:
  7373. case Js::OpCode::StRootFld:
  7374. case Js::OpCode::StFldStrict:
  7375. case Js::OpCode::StRootFldStrict:
  7376. case Js::OpCode::ArgOut_A:
  7377. case Js::OpCode::ArgOut_A_Inline:
  7378. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  7379. case Js::OpCode::ArgOut_A_Dynamic:
  7380. case Js::OpCode::ArgOut_A_FromStackArgs:
  7381. case Js::OpCode::ArgOut_A_SpreadArg:
  7382. // For this one we need to implement type specialization
  7383. //case Js::OpCode::ArgOut_A_InlineBuiltIn:
  7384. case Js::OpCode::Ret:
  7385. case Js::OpCode::LdElemUndef:
  7386. case Js::OpCode::LdElemUndefScoped:
  7387. return false;
  7388. default:
  7389. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  7390. {
  7391. newMin = min;
  7392. newMax = max;
  7393. opcode = instr->m_opcode;
  7394. break; // Note: we must keep checkTypeSpecWorth = false to make sure we never return false from this function.
  7395. }
  7396. return false;
  7397. }
  7398. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it (won't
  7399. // leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value needs to be
  7400. // guaranteed to be an int
  7401. if(checkTypeSpecWorth &&
  7402. !ignoredIntOverflow &&
  7403. !ignoredNegativeZero &&
  7404. instr->ShouldCheckForIntOverflow() &&
  7405. !IsWorthSpecializingToInt32(instr, src1Val))
  7406. {
  7407. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  7408. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  7409. // the following cases.
  7410. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to be
  7411. // an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  7412. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  7413. // that case, especially if the dst sym is live on the back-edge.
  7414. if(bailOutKind == IR::BailOutInvalid &&
  7415. instr->GetDst() &&
  7416. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  7417. {
  7418. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, nullptr);
  7419. }
  7420. if(instr->GetSrc2())
  7421. {
  7422. instr->FreeSrc2();
  7423. }
  7424. return false;
  7425. }
  7426. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  7427. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  7428. {
  7429. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  7430. Value *src1IndirIndexVal = nullptr;
  7431. Value *src2Val = nullptr;
  7432. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  7433. {
  7434. *redoTypeSpecRef = true;
  7435. return false;
  7436. }
  7437. }
  7438. const Js::OpCode originalOpCode = instr->m_opcode;
  7439. if (!this->IsLoopPrePass())
  7440. {
  7441. // No re-write on prepass
  7442. instr->m_opcode = opcode;
  7443. }
  7444. Value *src1ValueToSpecialize = src1Val;
  7445. if(lossy)
  7446. {
  7447. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  7448. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  7449. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  7450. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  7451. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  7452. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  7453. src1ValueToSpecialize = src1OriginalVal;
  7454. }
  7455. // Make sure the srcs are specialized
  7456. IR::Opnd *src1 = instr->GetSrc1();
  7457. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, lossy);
  7458. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  7459. {
  7460. GenerateBailAtOperation(&instr, bailOutKind);
  7461. }
  7462. if (!skipDst)
  7463. {
  7464. IR::Opnd *dst = instr->GetDst();
  7465. if (dst)
  7466. {
  7467. AssertMsg(!(isTransfer && !this->IsLoopPrePass()) || min == newMin && max == newMax, "If this is just a copy, old/new min/max should be the same");
  7468. TypeSpecializeIntDst(
  7469. instr,
  7470. originalOpCode,
  7471. isTransfer ? src1Val : nullptr,
  7472. src1Val,
  7473. nullptr,
  7474. bailOutKind,
  7475. newMin,
  7476. newMax,
  7477. pDstVal,
  7478. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  7479. }
  7480. }
  7481. if(bailOutKind == IR::BailOutInvalid)
  7482. {
  7483. GOPT_TRACE(_u("Type specialized to INT\n"));
  7484. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7485. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7486. {
  7487. Output::Print(_u("Type specialized to INT: "));
  7488. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7489. }
  7490. #endif
  7491. }
  7492. else
  7493. {
  7494. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  7495. if(bailOutKind & IR::BailOutOnOverflow)
  7496. {
  7497. GOPT_TRACE(_u(" Overflow\n"));
  7498. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7499. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7500. {
  7501. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  7502. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7503. }
  7504. #endif
  7505. }
  7506. if(bailOutKind & IR::BailOutOnNegativeZero)
  7507. {
  7508. GOPT_TRACE(_u(" Zero\n"));
  7509. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7510. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7511. {
  7512. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  7513. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7514. }
  7515. #endif
  7516. }
  7517. }
  7518. return true;
  7519. }
  7520. void
  7521. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7522. {
  7523. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, ValueType::GetInt(IntConstantBounds(newMin, newMax).IsLikelyTaggable()), newMin, newMax, pDstVal, addSubConstantInfo);
  7524. }
  7525. void
  7526. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7527. {
  7528. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, valueType, 0, 0, pDstVal, addSubConstantInfo);
  7529. }
  7530. void
  7531. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7532. {
  7533. Assert(valueType.IsInt() || (valueType.IsNumber() && valueType.IsLikelyInt() && newMin == 0 && newMax == 0));
  7534. Assert(!valToTransfer || valToTransfer == src1Value);
  7535. Assert(!addSubConstantInfo || addSubConstantInfo->HasInfo());
  7536. IR::Opnd *dst = instr->GetDst();
  7537. Assert(dst);
  7538. bool isValueInfoPrecise;
  7539. if(IsLoopPrePass())
  7540. {
  7541. isValueInfoPrecise = IsPrepassSrcValueInfoPrecise(instr, src1Value, src2Value);
  7542. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, isValueInfoPrecise);
  7543. }
  7544. else
  7545. {
  7546. isValueInfoPrecise = true;
  7547. }
  7548. // If dst has a circular reference in a loop, it probably won't get specialized. Don't mark the dst as type-specialized on
  7549. // the pre-pass. With aggressive int spec though, it will take care of bailing out if necessary so there's no need to assume
  7550. // that the dst will be a var even if it's live on the back-edge. Also if the op always produces an int32, then there's no
  7551. // ambiguity in the dst's value type even in the prepass.
  7552. if (!DoAggressiveIntTypeSpec() && this->IsLoopPrePass() && !valueType.IsInt())
  7553. {
  7554. if (dst->IsRegOpnd())
  7555. {
  7556. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  7557. }
  7558. return;
  7559. }
  7560. const IntBounds *dstBounds = nullptr;
  7561. if(addSubConstantInfo && !addSubConstantInfo->SrcValueIsLikelyConstant() && DoTrackRelativeIntBounds())
  7562. {
  7563. Assert(!ignoredIntOverflowForCurrentInstr);
  7564. // Track bounds for add or sub with a constant. For instance, consider (b = a + 2). The value of 'b' should track that
  7565. // it is equal to (the value of 'a') + 2. Additionally, the value of 'b' should inherit the bounds of 'a', offset by
  7566. // the constant value.
  7567. if(!valueType.IsInt() || !isValueInfoPrecise)
  7568. {
  7569. newMin = INT32_MIN;
  7570. newMax = INT32_MAX;
  7571. }
  7572. dstBounds =
  7573. IntBounds::Add(
  7574. addSubConstantInfo->SrcValue(),
  7575. addSubConstantInfo->Offset(),
  7576. isValueInfoPrecise,
  7577. IntConstantBounds(newMin, newMax),
  7578. alloc);
  7579. }
  7580. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  7581. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  7582. // pre-pass.
  7583. if (valToTransfer)
  7584. {
  7585. // If this is just a copy, no need for creating a new value.
  7586. Assert(!addSubConstantInfo);
  7587. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  7588. CurrentBlockData()->InsertNewValue(*pDstVal, dst);
  7589. }
  7590. else if (valueType.IsInt() && isValueInfoPrecise)
  7591. {
  7592. bool wasNegativeZeroPreventedByBailout = false;
  7593. if(newMin <= 0 && newMax >= 0)
  7594. {
  7595. switch(originalOpCode)
  7596. {
  7597. case Js::OpCode::Add_A:
  7598. // -0 + -0 == -0
  7599. Assert(src1Value);
  7600. Assert(src2Value);
  7601. wasNegativeZeroPreventedByBailout =
  7602. src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout() &&
  7603. src2Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  7604. break;
  7605. case Js::OpCode::Sub_A:
  7606. // -0 - 0 == -0
  7607. Assert(src1Value);
  7608. wasNegativeZeroPreventedByBailout = src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  7609. break;
  7610. case Js::OpCode::Neg_A:
  7611. case Js::OpCode::Mul_A:
  7612. case Js::OpCode::Div_A:
  7613. case Js::OpCode::Rem_A:
  7614. wasNegativeZeroPreventedByBailout = !!(bailOutKind & IR::BailOutOnNegativeZero);
  7615. break;
  7616. }
  7617. }
  7618. *pDstVal =
  7619. dstBounds
  7620. ? NewIntBoundedValue(valueType, dstBounds, wasNegativeZeroPreventedByBailout, nullptr)
  7621. : NewIntRangeValue(newMin, newMax, wasNegativeZeroPreventedByBailout, nullptr);
  7622. }
  7623. else
  7624. {
  7625. *pDstVal = dstBounds ? NewIntBoundedValue(valueType, dstBounds, false, nullptr) : NewGenericValue(valueType);
  7626. }
  7627. if(addSubConstantInfo || updateInductionVariableValueNumber)
  7628. {
  7629. TrackIntSpecializedAddSubConstant(instr, addSubConstantInfo, *pDstVal, !!dstBounds);
  7630. }
  7631. CurrentBlockData()->SetValue(*pDstVal, dst);
  7632. AssertMsg(dst->IsRegOpnd(), "What else?");
  7633. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  7634. }
  7635. bool
  7636. GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef)
  7637. {
  7638. IR::Instr *&instr = *pInstr;
  7639. int32 min1 = INT32_MIN, max1 = INT32_MAX, min2 = INT32_MIN, max2 = INT32_MAX, newMin, newMax, tmp;
  7640. Js::OpCode opcode;
  7641. Value *&src1Val = *pSrc1Val;
  7642. Value *&src2Val = *pSrc2Val;
  7643. // We don't need to do typespec for asmjs
  7644. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  7645. {
  7646. return false;
  7647. }
  7648. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  7649. {
  7650. this->TypeSpecializeInlineBuiltInBinary(pInstr, src1Val, src2Val, pDstVal, src1OriginalVal, src2OriginalVal);
  7651. return true;
  7652. }
  7653. if (src1Val)
  7654. {
  7655. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  7656. }
  7657. if (src2Val)
  7658. {
  7659. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  7660. }
  7661. // Type specialize binary operators to int32
  7662. bool src1Lossy = true;
  7663. bool src2Lossy = true;
  7664. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  7665. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  7666. bool ignoredNegativeZero = false;
  7667. bool skipSrc2 = false;
  7668. bool skipDst = false;
  7669. bool needsBoolConv = false;
  7670. AddSubConstantInfo addSubConstantInfo;
  7671. switch (instr->m_opcode)
  7672. {
  7673. case Js::OpCode::Or_A:
  7674. if (!DoLossyIntTypeSpec())
  7675. {
  7676. return false;
  7677. }
  7678. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7679. opcode = Js::OpCode::Or_I4;
  7680. break;
  7681. case Js::OpCode::And_A:
  7682. if (!DoLossyIntTypeSpec())
  7683. {
  7684. return false;
  7685. }
  7686. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7687. opcode = Js::OpCode::And_I4;
  7688. break;
  7689. case Js::OpCode::Xor_A:
  7690. if (!DoLossyIntTypeSpec())
  7691. {
  7692. return false;
  7693. }
  7694. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7695. opcode = Js::OpCode::Xor_I4;
  7696. break;
  7697. case Js::OpCode::Shl_A:
  7698. if (!DoLossyIntTypeSpec())
  7699. {
  7700. return false;
  7701. }
  7702. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7703. opcode = Js::OpCode::Shl_I4;
  7704. break;
  7705. case Js::OpCode::Shr_A:
  7706. if (!DoLossyIntTypeSpec())
  7707. {
  7708. return false;
  7709. }
  7710. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7711. opcode = Js::OpCode::Shr_I4;
  7712. break;
  7713. case Js::OpCode::ShrU_A:
  7714. if (!DoLossyIntTypeSpec())
  7715. {
  7716. return false;
  7717. }
  7718. if (min1 < 0 && IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  7719. {
  7720. // Src1 may be too large to represent as a signed int32, and src2 may be zero. Unless the resulting value is only
  7721. // used as a signed int32 (hence allowing us to ignore the result's sign), don't specialize the instruction.
  7722. if (!instr->ignoreIntOverflow)
  7723. return false;
  7724. ignoredIntOverflow = true;
  7725. }
  7726. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7727. opcode = Js::OpCode::ShrU_I4;
  7728. break;
  7729. case Js::OpCode::BrUnLe_A:
  7730. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7731. // int already, so require that both sources are likely int for folding.
  7732. if (DoConstFold() &&
  7733. !IsLoopPrePass() &&
  7734. TryOptConstFoldBrUnsignedGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  7735. {
  7736. return true;
  7737. }
  7738. if (min1 >= 0 && min2 >= 0)
  7739. {
  7740. // Only handle positive values since this is unsigned...
  7741. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7742. // (INT32_MIN, INT32_MAX), so we're good.
  7743. Assert(src1Val);
  7744. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7745. Assert(src2Val);
  7746. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7747. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  7748. }
  7749. if (!DoLossyIntTypeSpec())
  7750. {
  7751. return false;
  7752. }
  7753. newMin = newMax = 0;
  7754. opcode = Js::OpCode::BrUnLe_I4;
  7755. break;
  7756. case Js::OpCode::BrUnLt_A:
  7757. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7758. // int already, so require that both sources are likely int for folding.
  7759. if (DoConstFold() &&
  7760. !IsLoopPrePass() &&
  7761. TryOptConstFoldBrUnsignedLessThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  7762. {
  7763. return true;
  7764. }
  7765. if (min1 >= 0 && min2 >= 0)
  7766. {
  7767. // Only handle positive values since this is unsigned...
  7768. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7769. // (INT32_MIN, INT32_MAX), so we're good.
  7770. Assert(src1Val);
  7771. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7772. Assert(src2Val);
  7773. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7774. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  7775. }
  7776. if (!DoLossyIntTypeSpec())
  7777. {
  7778. return false;
  7779. }
  7780. newMin = newMax = 0;
  7781. opcode = Js::OpCode::BrUnLt_I4;
  7782. break;
  7783. case Js::OpCode::BrUnGe_A:
  7784. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7785. // int already, so require that both sources are likely int for folding.
  7786. if (DoConstFold() &&
  7787. !IsLoopPrePass() &&
  7788. TryOptConstFoldBrUnsignedLessThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  7789. {
  7790. return true;
  7791. }
  7792. if (min1 >= 0 && min2 >= 0)
  7793. {
  7794. // Only handle positive values since this is unsigned...
  7795. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7796. // (INT32_MIN, INT32_MAX), so we're good.
  7797. Assert(src1Val);
  7798. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7799. Assert(src2Val);
  7800. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7801. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  7802. }
  7803. if (!DoLossyIntTypeSpec())
  7804. {
  7805. return false;
  7806. }
  7807. newMin = newMax = 0;
  7808. opcode = Js::OpCode::BrUnGe_I4;
  7809. break;
  7810. case Js::OpCode::BrUnGt_A:
  7811. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7812. // int already, so require that both sources are likely int for folding.
  7813. if (DoConstFold() &&
  7814. !IsLoopPrePass() &&
  7815. TryOptConstFoldBrUnsignedGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  7816. {
  7817. return true;
  7818. }
  7819. if (min1 >= 0 && min2 >= 0)
  7820. {
  7821. // Only handle positive values since this is unsigned...
  7822. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7823. // (INT32_MIN, INT32_MAX), so we're good.
  7824. Assert(src1Val);
  7825. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7826. Assert(src2Val);
  7827. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7828. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  7829. }
  7830. if (!DoLossyIntTypeSpec())
  7831. {
  7832. return false;
  7833. }
  7834. newMin = newMax = 0;
  7835. opcode = Js::OpCode::BrUnGt_I4;
  7836. break;
  7837. case Js::OpCode::CmUnLe_A:
  7838. if (!DoLossyIntTypeSpec())
  7839. {
  7840. return false;
  7841. }
  7842. newMin = 0;
  7843. newMax = 1;
  7844. opcode = Js::OpCode::CmUnLe_I4;
  7845. needsBoolConv = true;
  7846. break;
  7847. case Js::OpCode::CmUnLt_A:
  7848. if (!DoLossyIntTypeSpec())
  7849. {
  7850. return false;
  7851. }
  7852. newMin = 0;
  7853. newMax = 1;
  7854. opcode = Js::OpCode::CmUnLt_I4;
  7855. needsBoolConv = true;
  7856. break;
  7857. case Js::OpCode::CmUnGe_A:
  7858. if (!DoLossyIntTypeSpec())
  7859. {
  7860. return false;
  7861. }
  7862. newMin = 0;
  7863. newMax = 1;
  7864. opcode = Js::OpCode::CmUnGe_I4;
  7865. needsBoolConv = true;
  7866. break;
  7867. case Js::OpCode::CmUnGt_A:
  7868. if (!DoLossyIntTypeSpec())
  7869. {
  7870. return false;
  7871. }
  7872. newMin = 0;
  7873. newMax = 1;
  7874. opcode = Js::OpCode::CmUnGt_I4;
  7875. needsBoolConv = true;
  7876. break;
  7877. case Js::OpCode::Expo_A:
  7878. {
  7879. src1Val = src1OriginalVal;
  7880. src2Val = src2OriginalVal;
  7881. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7882. }
  7883. case Js::OpCode::Div_A:
  7884. {
  7885. ValueType specializedValueType = GetDivValueType(instr, src1Val, src2Val, true);
  7886. if (specializedValueType.IsFloat())
  7887. {
  7888. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  7889. // Note: We should really constant fold cst1%cst2...
  7890. src1Val = src1OriginalVal;
  7891. src2Val = src2OriginalVal;
  7892. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7893. }
  7894. #ifdef _M_ARM
  7895. if (!AutoSystemInfo::Data.ArmDivAvailable())
  7896. {
  7897. return false;
  7898. }
  7899. #endif
  7900. if (specializedValueType.IsInt())
  7901. {
  7902. if (max2 == 0x80000000 || (min2 == 0 && max2 == 00))
  7903. {
  7904. return false;
  7905. }
  7906. if (min1 == 0x80000000 && min2 <= -1 && max2 >= -1)
  7907. {
  7908. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  7909. // Or we know we are dividing by zero (which is weird to have because the profile data
  7910. // say we got an int)
  7911. bailOutKind = IR::BailOutOnDivOfMinInt;
  7912. }
  7913. src1Lossy = false; // Detect -0 on the sources
  7914. src2Lossy = false;
  7915. opcode = Js::OpCode::Div_I4;
  7916. Assert(!instr->GetSrc1()->IsUnsigned());
  7917. bailOutKind |= IR::BailOnDivResultNotInt;
  7918. if (max2 >= 0 && min2 <= 0)
  7919. {
  7920. // Need to check for divide by zero if the denominator range includes 0
  7921. bailOutKind |= IR::BailOutOnDivByZero;
  7922. }
  7923. if (max1 >= 0 && min1 <= 0)
  7924. {
  7925. // Numerator contains 0 so the result contains 0
  7926. newMin = 0;
  7927. newMax = 0;
  7928. if (min2 < 0)
  7929. {
  7930. // Denominator may be negative, so the result could be negative 0
  7931. if (instr->ShouldCheckForNegativeZero())
  7932. {
  7933. bailOutKind |= IR::BailOutOnNegativeZero;
  7934. }
  7935. else
  7936. {
  7937. ignoredNegativeZero = true;
  7938. }
  7939. }
  7940. }
  7941. else
  7942. {
  7943. // Initialize to invalid value, one of the condition below will update it correctly
  7944. newMin = INT_MAX;
  7945. newMax = INT_MIN;
  7946. }
  7947. // Deal with the positive and negative range separately for both the numerator and the denominator,
  7948. // and integrate to the overall min and max.
  7949. // If the result is positive (positive/positive or negative/negative):
  7950. // The min should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  7951. // divided by ---------------------------------------------------------------
  7952. // largest magnitude denominator (positive_Max2 | negative_Min2)
  7953. //
  7954. // The max should be the largest magnitude numerator (positive_Max1 | negative_Max1)
  7955. // divided by ---------------------------------------------------------------
  7956. // smallest magnitude denominator (positive_Min2 | negative_Max2)
  7957. // If the result is negative (positive/negative or positive/negative):
  7958. // The min should be the largest magnitude numerator (positive_Max1 | negative_Min1)
  7959. // divided by ---------------------------------------------------------------
  7960. // smallest magnitude denominator (negative_Max2 | positive_Min2)
  7961. //
  7962. // The max should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  7963. // divided by ---------------------------------------------------------------
  7964. // largest magnitude denominator (negative_Min2 | positive_Max2)
  7965. // Consider: The range can be slightly more precise if we take care of the rounding
  7966. if (max1 > 0)
  7967. {
  7968. // Take only the positive numerator range
  7969. int32 positive_Min1 = max(1, min1);
  7970. int32 positive_Max1 = max1;
  7971. if (max2 > 0)
  7972. {
  7973. // Take only the positive denominator range
  7974. int32 positive_Min2 = max(1, min2);
  7975. int32 positive_Max2 = max2;
  7976. // Positive / Positive
  7977. int32 quadrant1_Min = positive_Min1 <= positive_Max2? 1 : positive_Min1 / positive_Max2;
  7978. int32 quadrant1_Max = positive_Max1 <= positive_Min2? 1 : positive_Max1 / positive_Min2;
  7979. Assert(1 <= quadrant1_Min && quadrant1_Min <= quadrant1_Max);
  7980. // The result should positive
  7981. newMin = min(newMin, quadrant1_Min);
  7982. newMax = max(newMax, quadrant1_Max);
  7983. }
  7984. if (min2 < 0)
  7985. {
  7986. // Take only the negative denominator range
  7987. int32 negative_Min2 = min2;
  7988. int32 negative_Max2 = min(-1, max2);
  7989. // Positive / Negative
  7990. int32 quadrant2_Min = -positive_Max1 >= negative_Max2? -1 : positive_Max1 / negative_Max2;
  7991. int32 quadrant2_Max = -positive_Min1 >= negative_Min2? -1 : positive_Min1 / negative_Min2;
  7992. // The result should negative
  7993. Assert(quadrant2_Min <= quadrant2_Max && quadrant2_Max <= -1);
  7994. newMin = min(newMin, quadrant2_Min);
  7995. newMax = max(newMax, quadrant2_Max);
  7996. }
  7997. }
  7998. if (min1 < 0)
  7999. {
  8000. // Take only the native numerator range
  8001. int32 negative_Min1 = min1;
  8002. int32 negative_Max1 = min(-1, max1);
  8003. if (max2 > 0)
  8004. {
  8005. // Take only the positive denominator range
  8006. int32 positive_Min2 = max(1, min2);
  8007. int32 positive_Max2 = max2;
  8008. // Negative / Positive
  8009. int32 quadrant4_Min = negative_Min1 >= -positive_Min2? -1 : negative_Min1 / positive_Min2;
  8010. int32 quadrant4_Max = negative_Max1 >= -positive_Max2? -1 : negative_Max1 / positive_Max2;
  8011. // The result should negative
  8012. Assert(quadrant4_Min <= quadrant4_Max && quadrant4_Max <= -1);
  8013. newMin = min(newMin, quadrant4_Min);
  8014. newMax = max(newMax, quadrant4_Max);
  8015. }
  8016. if (min2 < 0)
  8017. {
  8018. // Take only the negative denominator range
  8019. int32 negative_Min2 = min2;
  8020. int32 negative_Max2 = min(-1, max2);
  8021. int32 quadrant3_Min;
  8022. int32 quadrant3_Max;
  8023. // Negative / Negative
  8024. if (negative_Max1 == 0x80000000 && negative_Min2 == -1)
  8025. {
  8026. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : (negative_Max1+1) / negative_Min2;
  8027. }
  8028. else
  8029. {
  8030. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : negative_Max1 / negative_Min2;
  8031. }
  8032. if (negative_Min1 == 0x80000000 && negative_Max2 == -1)
  8033. {
  8034. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : (negative_Min1+1) / negative_Max2;
  8035. }
  8036. else
  8037. {
  8038. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : negative_Min1 / negative_Max2;
  8039. }
  8040. // The result should positive
  8041. Assert(1 <= quadrant3_Min && quadrant3_Min <= quadrant3_Max);
  8042. newMin = min(newMin, quadrant3_Min);
  8043. newMax = max(newMax, quadrant3_Max);
  8044. }
  8045. }
  8046. Assert(newMin <= newMax);
  8047. // Continue to int type spec
  8048. break;
  8049. }
  8050. }
  8051. // fall-through
  8052. default:
  8053. {
  8054. const bool involesLargeInt32 =
  8055. (src1Val && src1Val->GetValueInfo()->IsLikelyUntaggedInt()) ||
  8056. (src2Val && src2Val->GetValueInfo()->IsLikelyUntaggedInt());
  8057. const auto trySpecializeToFloat =
  8058. [&](const bool mayOverflow) -> bool
  8059. {
  8060. // It has been determined that this instruction cannot be int-specialized. Need to determine whether to attempt
  8061. // to float-specialize the instruction, or leave it unspecialized.
  8062. if((involesLargeInt32
  8063. #if INT32VAR
  8064. && mayOverflow
  8065. #endif
  8066. ) || (instr->m_opcode == Js::OpCode::Mul_A && !this->DoAggressiveMulIntTypeSpec())
  8067. )
  8068. {
  8069. // An input range is completely outside the range of an int31 and the operation is likely to overflow.
  8070. // Additionally, on 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is
  8071. // significantly slower to use in an unspecialized operation compared to a tagged int. So, try to
  8072. // float-specialize the instruction.
  8073. src1Val = src1OriginalVal;
  8074. src2Val = src2OriginalVal;
  8075. return TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  8076. }
  8077. return false;
  8078. };
  8079. if (instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  8080. {
  8081. if ((src1Val && src1Val->GetValueInfo()->IsLikelyFloat()) || (src2Val && src2Val->GetValueInfo()->IsLikelyFloat()))
  8082. {
  8083. // Try to type specialize to float
  8084. src1Val = src1OriginalVal;
  8085. src2Val = src2OriginalVal;
  8086. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  8087. }
  8088. if (src1Val == nullptr ||
  8089. src2Val == nullptr ||
  8090. !src1Val->GetValueInfo()->IsLikelyInt() ||
  8091. !src2Val->GetValueInfo()->IsLikelyInt() ||
  8092. (
  8093. !DoAggressiveIntTypeSpec() &&
  8094. (
  8095. !(src1Val->GetValueInfo()->IsInt() || CurrentBlockData()->IsSwitchInt32TypeSpecialized(instr)) ||
  8096. !src2Val->GetValueInfo()->IsInt()
  8097. )
  8098. ) ||
  8099. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotNumber) ||
  8100. (instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->m_isNotNumber))
  8101. {
  8102. return trySpecializeToFloat(true);
  8103. }
  8104. }
  8105. // Try to type specialize to int32
  8106. // If one of the values is a float constant with a value that fits in a uint32 but not an int32,
  8107. // and the instruction can ignore int overflow, the source value for the purposes of int specialization
  8108. // would have been changed to an int constant value by ignoring overflow. But, the conversion is still lossy.
  8109. if (!(src1OriginalVal && src1OriginalVal->GetValueInfo()->IsFloatConstant() && src1Val && src1Val->GetValueInfo()->HasIntConstantValue()))
  8110. {
  8111. src1Lossy = false;
  8112. }
  8113. if (!(src2OriginalVal && src2OriginalVal->GetValueInfo()->IsFloatConstant() && src2Val && src2Val->GetValueInfo()->HasIntConstantValue()))
  8114. {
  8115. src2Lossy = false;
  8116. }
  8117. switch(instr->m_opcode)
  8118. {
  8119. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  8120. // If the src is already type-specialized, if we don't type-specialize ArgOut_A_InlineBuiltIn instr, we'll get additional ToVar.
  8121. // So, to avoid that, type-specialize the ArgOut_A_InlineBuiltIn instr.
  8122. // Else we don't need to type-specialize the instr, we are fine with src being Var.
  8123. if (instr->GetSrc1()->IsRegOpnd())
  8124. {
  8125. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  8126. if (CurrentBlockData()->IsInt32TypeSpecialized(sym))
  8127. {
  8128. opcode = instr->m_opcode;
  8129. skipDst = true; // We should keep dst as is, otherwise the link opnd for next ArgOut/InlineBuiltInStart would be broken.
  8130. skipSrc2 = true; // src2 is linkOpnd. We don't need to type-specialize it.
  8131. newMin = min1; newMax = max1; // Values don't matter, these are unused.
  8132. goto LOutsideSwitch; // Continue to int-type-specialize.
  8133. }
  8134. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  8135. {
  8136. src1Val = src1OriginalVal;
  8137. src2Val = src2OriginalVal;
  8138. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  8139. }
  8140. }
  8141. return false;
  8142. case Js::OpCode::Add_A:
  8143. do // while(false)
  8144. {
  8145. const auto CannotOverflowBasedOnRelativeBounds = [&](int32 *const constantValueRef)
  8146. {
  8147. Assert(constantValueRef);
  8148. if(min2 == max2 &&
  8149. src1Val->GetValueInfo()->IsIntBounded() &&
  8150. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min2))
  8151. {
  8152. *constantValueRef = min2;
  8153. return true;
  8154. }
  8155. else if(
  8156. min1 == max1 &&
  8157. src2Val->GetValueInfo()->IsIntBounded() &&
  8158. src2Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min1))
  8159. {
  8160. *constantValueRef = min1;
  8161. return true;
  8162. }
  8163. return false;
  8164. };
  8165. if (Int32Math::Add(min1, min2, &newMin))
  8166. {
  8167. int32 constantSrcValue;
  8168. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  8169. {
  8170. newMin = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  8171. }
  8172. else if(instr->ShouldCheckForIntOverflow())
  8173. {
  8174. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8175. {
  8176. // May overflow
  8177. return trySpecializeToFloat(true);
  8178. }
  8179. bailOutKind |= IR::BailOutOnOverflow;
  8180. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  8181. }
  8182. else
  8183. {
  8184. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since
  8185. // overflow causes the value to wrap around, and we don't have a way to specify a lower and upper
  8186. // range of ints, we use the full range of int32s.
  8187. ignoredIntOverflow = true;
  8188. newMin = INT32_MIN;
  8189. newMax = INT32_MAX;
  8190. break;
  8191. }
  8192. }
  8193. if (Int32Math::Add(max1, max2, &newMax))
  8194. {
  8195. int32 constantSrcValue;
  8196. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  8197. {
  8198. newMax = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  8199. }
  8200. else if(instr->ShouldCheckForIntOverflow())
  8201. {
  8202. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8203. {
  8204. // May overflow
  8205. return trySpecializeToFloat(true);
  8206. }
  8207. bailOutKind |= IR::BailOutOnOverflow;
  8208. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  8209. }
  8210. else
  8211. {
  8212. // See comment about ignoring overflow above
  8213. ignoredIntOverflow = true;
  8214. newMin = INT32_MIN;
  8215. newMax = INT32_MAX;
  8216. break;
  8217. }
  8218. }
  8219. if(bailOutKind & IR::BailOutOnOverflow)
  8220. {
  8221. Assert(bailOutKind == IR::BailOutOnOverflow);
  8222. Assert(instr->ShouldCheckForIntOverflow());
  8223. int32 temp;
  8224. if(Int32Math::Add(
  8225. Int32Math::NearestInRangeTo(0, min1, max1),
  8226. Int32Math::NearestInRangeTo(0, min2, max2),
  8227. &temp))
  8228. {
  8229. // Always overflows
  8230. return trySpecializeToFloat(true);
  8231. }
  8232. }
  8233. } while(false);
  8234. if (!this->IsLoopPrePass() && newMin == newMax && bailOutKind == IR::BailOutInvalid)
  8235. {
  8236. // Take care of Add with zero here, since we know we're dealing with 2 numbers.
  8237. this->CaptureByteCodeSymUses(instr);
  8238. IR::Opnd *src;
  8239. bool isAddZero = true;
  8240. int32 intConstantValue;
  8241. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  8242. {
  8243. src = instr->UnlinkSrc2();
  8244. instr->FreeSrc1();
  8245. }
  8246. else if (src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  8247. {
  8248. src = instr->UnlinkSrc1();
  8249. instr->FreeSrc2();
  8250. }
  8251. else
  8252. {
  8253. // This should have been handled by const folding, unless:
  8254. // - A source's value was substituted with a different value here, which is after const folding happened
  8255. // - A value is not definitely int, but once converted to definite int, it would be zero due to a
  8256. // condition in the source code such as if(a === 0). Ideally, we would specialize the sources and
  8257. // remove the add, but doesn't seem too important for now.
  8258. Assert(
  8259. !DoConstFold() ||
  8260. src1Val != src1OriginalVal ||
  8261. src2Val != src2OriginalVal ||
  8262. !src1Val->GetValueInfo()->IsInt() ||
  8263. !src2Val->GetValueInfo()->IsInt());
  8264. isAddZero = false;
  8265. src = nullptr;
  8266. }
  8267. if (isAddZero)
  8268. {
  8269. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Ld_A, instr->UnlinkDst(), src, instr->m_func);
  8270. newInstr->SetByteCodeOffset(instr);
  8271. instr->m_opcode = Js::OpCode::Nop;
  8272. this->currentBlock->InsertInstrAfter(newInstr, instr);
  8273. return true;
  8274. }
  8275. }
  8276. if(!ignoredIntOverflow)
  8277. {
  8278. if(min2 == max2 &&
  8279. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  8280. instr->GetSrc1()->IsRegOpnd())
  8281. {
  8282. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, min2);
  8283. }
  8284. else if(
  8285. min1 == max1 &&
  8286. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val)) &&
  8287. instr->GetSrc2()->IsRegOpnd())
  8288. {
  8289. addSubConstantInfo.Set(instr->GetSrc2()->AsRegOpnd()->m_sym, src2Val, min2 == max2, min1);
  8290. }
  8291. }
  8292. opcode = Js::OpCode::Add_I4;
  8293. break;
  8294. case Js::OpCode::Sub_A:
  8295. do // while(false)
  8296. {
  8297. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  8298. {
  8299. return
  8300. min2 == max2 &&
  8301. src1Val->GetValueInfo()->IsIntBounded() &&
  8302. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(min2);
  8303. };
  8304. if (Int32Math::Sub(min1, max2, &newMin))
  8305. {
  8306. if(CannotOverflowBasedOnRelativeBounds())
  8307. {
  8308. Assert(min2 == max2);
  8309. newMin = min2 >= 0 ? INT32_MIN : INT32_MAX;
  8310. }
  8311. else if(instr->ShouldCheckForIntOverflow())
  8312. {
  8313. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8314. {
  8315. // May overflow
  8316. return trySpecializeToFloat(true);
  8317. }
  8318. bailOutKind |= IR::BailOutOnOverflow;
  8319. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  8320. }
  8321. else
  8322. {
  8323. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  8324. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  8325. // we use the full range of int32s.
  8326. ignoredIntOverflow = true;
  8327. newMin = INT32_MIN;
  8328. newMax = INT32_MAX;
  8329. break;
  8330. }
  8331. }
  8332. if (Int32Math::Sub(max1, min2, &newMax))
  8333. {
  8334. if(CannotOverflowBasedOnRelativeBounds())
  8335. {
  8336. Assert(min2 == max2);
  8337. newMax = min2 >= 0 ? INT32_MIN: INT32_MAX;
  8338. }
  8339. else if(instr->ShouldCheckForIntOverflow())
  8340. {
  8341. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8342. {
  8343. // May overflow
  8344. return trySpecializeToFloat(true);
  8345. }
  8346. bailOutKind |= IR::BailOutOnOverflow;
  8347. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  8348. }
  8349. else
  8350. {
  8351. // See comment about ignoring overflow above
  8352. ignoredIntOverflow = true;
  8353. newMin = INT32_MIN;
  8354. newMax = INT32_MAX;
  8355. break;
  8356. }
  8357. }
  8358. if(bailOutKind & IR::BailOutOnOverflow)
  8359. {
  8360. Assert(bailOutKind == IR::BailOutOnOverflow);
  8361. Assert(instr->ShouldCheckForIntOverflow());
  8362. int32 temp;
  8363. if(Int32Math::Sub(
  8364. Int32Math::NearestInRangeTo(-1, min1, max1),
  8365. Int32Math::NearestInRangeTo(0, min2, max2),
  8366. &temp))
  8367. {
  8368. // Always overflows
  8369. return trySpecializeToFloat(true);
  8370. }
  8371. }
  8372. } while(false);
  8373. if(!ignoredIntOverflow &&
  8374. min2 == max2 &&
  8375. min2 != INT32_MIN &&
  8376. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  8377. instr->GetSrc1()->IsRegOpnd())
  8378. {
  8379. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, -min2);
  8380. }
  8381. opcode = Js::OpCode::Sub_I4;
  8382. break;
  8383. case Js::OpCode::Mul_A:
  8384. {
  8385. bool isConservativeMulInt = !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec();
  8386. // Be conservative about predicting Mul overflow in prepass.
  8387. // Operands that are live on back edge may be denied lossless-conversion to int32 and
  8388. // trigger rejit with AggressiveIntTypeSpec off.
  8389. // Besides multiplying a variable in a loop can overflow in just a few iterations even in simple cases like v *= 2
  8390. // So, make sure we definitely know the source max/min values, otherwise assume the full range.
  8391. if (isConservativeMulInt && IsLoopPrePass())
  8392. {
  8393. if (!IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val))
  8394. {
  8395. max1 = INT32_MAX;
  8396. min1 = INT32_MIN;
  8397. }
  8398. if (!IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val))
  8399. {
  8400. max2 = INT32_MAX;
  8401. min2 = INT32_MIN;
  8402. }
  8403. }
  8404. if (Int32Math::Mul(min1, min2, &newMin))
  8405. {
  8406. if (involesLargeInt32 || isConservativeMulInt)
  8407. {
  8408. // May overflow
  8409. return trySpecializeToFloat(true);
  8410. }
  8411. bailOutKind |= IR::BailOutOnMulOverflow;
  8412. newMin = (min1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  8413. }
  8414. newMax = newMin;
  8415. if (Int32Math::Mul(max1, max2, &tmp))
  8416. {
  8417. if (involesLargeInt32 || isConservativeMulInt)
  8418. {
  8419. // May overflow
  8420. return trySpecializeToFloat(true);
  8421. }
  8422. bailOutKind |= IR::BailOutOnMulOverflow;
  8423. tmp = (max1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  8424. }
  8425. newMin = min(newMin, tmp);
  8426. newMax = max(newMax, tmp);
  8427. if (Int32Math::Mul(min1, max2, &tmp))
  8428. {
  8429. if (involesLargeInt32 || isConservativeMulInt)
  8430. {
  8431. // May overflow
  8432. return trySpecializeToFloat(true);
  8433. }
  8434. bailOutKind |= IR::BailOutOnMulOverflow;
  8435. tmp = (min1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  8436. }
  8437. newMin = min(newMin, tmp);
  8438. newMax = max(newMax, tmp);
  8439. if (Int32Math::Mul(max1, min2, &tmp))
  8440. {
  8441. if (involesLargeInt32 || isConservativeMulInt)
  8442. {
  8443. // May overflow
  8444. return trySpecializeToFloat(true);
  8445. }
  8446. bailOutKind |= IR::BailOutOnMulOverflow;
  8447. tmp = (max1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  8448. }
  8449. newMin = min(newMin, tmp);
  8450. newMax = max(newMax, tmp);
  8451. if (bailOutKind & IR::BailOutOnMulOverflow)
  8452. {
  8453. // CSE only if two MULs have the same overflow check behavior.
  8454. // Currently this is set to be ignore int32 overflow, but not 53-bit, or int32 overflow matters.
  8455. if (!instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  8456. {
  8457. // If we allow int to overflow then there can be anything in the resulting int
  8458. newMin = IntConstMin;
  8459. newMax = IntConstMax;
  8460. ignoredIntOverflow = true;
  8461. }
  8462. int32 temp, overflowValue;
  8463. if (Int32Math::Mul(
  8464. Int32Math::NearestInRangeTo(0, min1, max1),
  8465. Int32Math::NearestInRangeTo(0, min2, max2),
  8466. &temp,
  8467. &overflowValue))
  8468. {
  8469. Assert(instr->ignoreOverflowBitCount >= 32);
  8470. int overflowMatters = 64 - instr->ignoreOverflowBitCount;
  8471. if (!ignoredIntOverflow ||
  8472. // Use shift to check high bits in case its negative
  8473. ((overflowValue << overflowMatters) >> overflowMatters) != overflowValue
  8474. )
  8475. {
  8476. // Always overflows
  8477. return trySpecializeToFloat(true);
  8478. }
  8479. }
  8480. }
  8481. if (newMin <= 0 && newMax >= 0 && // New range crosses zero
  8482. (min1 < 0 || min2 < 0) && // An operand's range contains a negative integer
  8483. !(min1 > 0 || min2 > 0) && // Neither operand's range contains only positive integers
  8484. !instr->GetSrc1()->IsEqual(instr->GetSrc2())) // The operands don't have the same value
  8485. {
  8486. if (instr->ShouldCheckForNegativeZero())
  8487. {
  8488. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  8489. if (!DoAggressiveIntTypeSpec())
  8490. {
  8491. // May result in -0
  8492. return trySpecializeToFloat(false);
  8493. }
  8494. if (((min1 == 0 && max1 == 0) || (min2 == 0 && max2 == 0)) && (max1 < 0 || max2 < 0))
  8495. {
  8496. // Always results in -0
  8497. return trySpecializeToFloat(false);
  8498. }
  8499. bailOutKind |= IR::BailOutOnNegativeZero;
  8500. }
  8501. else
  8502. {
  8503. ignoredNegativeZero = true;
  8504. }
  8505. }
  8506. opcode = Js::OpCode::Mul_I4;
  8507. break;
  8508. }
  8509. case Js::OpCode::Rem_A:
  8510. {
  8511. IR::Opnd* src2 = instr->GetSrc2();
  8512. if (!this->IsLoopPrePass() && min2 == max2 && min1 >= 0)
  8513. {
  8514. int32 value = min2;
  8515. if (value == (1 << Math::Log2(value)) && src2->IsAddrOpnd())
  8516. {
  8517. Assert(src2->AsAddrOpnd()->IsVar());
  8518. instr->m_opcode = Js::OpCode::And_A;
  8519. src2->AsAddrOpnd()->SetAddress(Js::TaggedInt::ToVarUnchecked(value - 1),
  8520. IR::AddrOpndKindConstantVar);
  8521. *pSrc2Val = GetIntConstantValue(value - 1, instr);
  8522. src2Val = *pSrc2Val;
  8523. return this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2Val, redoTypeSpecRef);
  8524. }
  8525. }
  8526. #ifdef _M_ARM
  8527. if (!AutoSystemInfo::Data.ArmDivAvailable())
  8528. {
  8529. return false;
  8530. }
  8531. #endif
  8532. if (min1 < 0)
  8533. {
  8534. // The most negative it can be is min1, unless limited by min2/max2
  8535. int32 negMaxAbs2;
  8536. if (min2 == INT32_MIN)
  8537. {
  8538. negMaxAbs2 = INT32_MIN;
  8539. }
  8540. else
  8541. {
  8542. negMaxAbs2 = -max(abs(min2), abs(max2)) + 1;
  8543. }
  8544. newMin = max(min1, negMaxAbs2);
  8545. }
  8546. else
  8547. {
  8548. newMin = 0;
  8549. }
  8550. bool isModByPowerOf2 = (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo() &&
  8551. instr->m_func->GetReadOnlyProfileInfo()->IsModulusOpByPowerOf2(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId)));
  8552. if(isModByPowerOf2)
  8553. {
  8554. Assert(bailOutKind == IR::BailOutInvalid);
  8555. bailOutKind = IR::BailOnModByPowerOf2;
  8556. newMin = 0;
  8557. }
  8558. else
  8559. {
  8560. if (min2 <= 0 && max2 >= 0)
  8561. {
  8562. // Consider: We could handle the zero case with a check and bailout...
  8563. return false;
  8564. }
  8565. if (min1 == 0x80000000 && (min2 <= -1 && max2 >= -1))
  8566. {
  8567. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  8568. return false;
  8569. }
  8570. if (min1 < 0)
  8571. {
  8572. if(instr->ShouldCheckForNegativeZero())
  8573. {
  8574. if (!DoAggressiveIntTypeSpec())
  8575. {
  8576. return false;
  8577. }
  8578. bailOutKind |= IR::BailOutOnNegativeZero;
  8579. }
  8580. else
  8581. {
  8582. ignoredNegativeZero = true;
  8583. }
  8584. }
  8585. }
  8586. {
  8587. int32 absMax2;
  8588. if (min2 == INT32_MIN)
  8589. {
  8590. // abs(INT32_MIN) == INT32_MAX because of overflow
  8591. absMax2 = INT32_MAX;
  8592. }
  8593. else
  8594. {
  8595. absMax2 = max(abs(min2), abs(max2)) - 1;
  8596. }
  8597. newMax = min(absMax2, max(max1, 0));
  8598. newMax = max(newMin, newMax);
  8599. }
  8600. opcode = Js::OpCode::Rem_I4;
  8601. Assert(!instr->GetSrc1()->IsUnsigned());
  8602. break;
  8603. }
  8604. case Js::OpCode::CmEq_A:
  8605. case Js::OpCode::CmSrEq_A:
  8606. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8607. {
  8608. return false;
  8609. }
  8610. newMin = 0;
  8611. newMax = 1;
  8612. opcode = Js::OpCode::CmEq_I4;
  8613. needsBoolConv = true;
  8614. break;
  8615. case Js::OpCode::CmNeq_A:
  8616. case Js::OpCode::CmSrNeq_A:
  8617. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8618. {
  8619. return false;
  8620. }
  8621. newMin = 0;
  8622. newMax = 1;
  8623. opcode = Js::OpCode::CmNeq_I4;
  8624. needsBoolConv = true;
  8625. break;
  8626. case Js::OpCode::CmLe_A:
  8627. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8628. {
  8629. return false;
  8630. }
  8631. newMin = 0;
  8632. newMax = 1;
  8633. opcode = Js::OpCode::CmLe_I4;
  8634. needsBoolConv = true;
  8635. break;
  8636. case Js::OpCode::CmLt_A:
  8637. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8638. {
  8639. return false;
  8640. }
  8641. newMin = 0;
  8642. newMax = 1;
  8643. opcode = Js::OpCode::CmLt_I4;
  8644. needsBoolConv = true;
  8645. break;
  8646. case Js::OpCode::CmGe_A:
  8647. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8648. {
  8649. return false;
  8650. }
  8651. newMin = 0;
  8652. newMax = 1;
  8653. opcode = Js::OpCode::CmGe_I4;
  8654. needsBoolConv = true;
  8655. break;
  8656. case Js::OpCode::CmGt_A:
  8657. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8658. {
  8659. return false;
  8660. }
  8661. newMin = 0;
  8662. newMax = 1;
  8663. opcode = Js::OpCode::CmGt_I4;
  8664. needsBoolConv = true;
  8665. break;
  8666. case Js::OpCode::BrSrEq_A:
  8667. case Js::OpCode::BrEq_A:
  8668. case Js::OpCode::BrNotNeq_A:
  8669. case Js::OpCode::BrSrNotNeq_A:
  8670. {
  8671. if(DoConstFold() &&
  8672. !IsLoopPrePass() &&
  8673. TryOptConstFoldBrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8674. {
  8675. return true;
  8676. }
  8677. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8678. UpdateIntBoundsForEqualBranch(src1Val, src2Val);
  8679. if(!specialize)
  8680. {
  8681. return false;
  8682. }
  8683. opcode = Js::OpCode::BrEq_I4;
  8684. // We'll get a warning if we don't assign a value to these...
  8685. // We'll assert if we use them and make a range where min > max
  8686. newMin = 2; newMax = 1;
  8687. break;
  8688. }
  8689. case Js::OpCode::BrSrNeq_A:
  8690. case Js::OpCode::BrNeq_A:
  8691. case Js::OpCode::BrSrNotEq_A:
  8692. case Js::OpCode::BrNotEq_A:
  8693. {
  8694. if(DoConstFold() &&
  8695. !IsLoopPrePass() &&
  8696. TryOptConstFoldBrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8697. {
  8698. return true;
  8699. }
  8700. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8701. UpdateIntBoundsForNotEqualBranch(src1Val, src2Val);
  8702. if(!specialize)
  8703. {
  8704. return false;
  8705. }
  8706. opcode = Js::OpCode::BrNeq_I4;
  8707. // We'll get a warning if we don't assign a value to these...
  8708. // We'll assert if we use them and make a range where min > max
  8709. newMin = 2; newMax = 1;
  8710. break;
  8711. }
  8712. case Js::OpCode::BrGt_A:
  8713. case Js::OpCode::BrNotLe_A:
  8714. {
  8715. if(DoConstFold() &&
  8716. !IsLoopPrePass() &&
  8717. TryOptConstFoldBrGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8718. {
  8719. return true;
  8720. }
  8721. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8722. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  8723. if(!specialize)
  8724. {
  8725. return false;
  8726. }
  8727. opcode = Js::OpCode::BrGt_I4;
  8728. // We'll get a warning if we don't assign a value to these...
  8729. // We'll assert if we use them and make a range where min > max
  8730. newMin = 2; newMax = 1;
  8731. break;
  8732. }
  8733. case Js::OpCode::BrGe_A:
  8734. case Js::OpCode::BrNotLt_A:
  8735. {
  8736. if(DoConstFold() &&
  8737. !IsLoopPrePass() &&
  8738. TryOptConstFoldBrGreaterThanOrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8739. {
  8740. return true;
  8741. }
  8742. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8743. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  8744. if(!specialize)
  8745. {
  8746. return false;
  8747. }
  8748. opcode = Js::OpCode::BrGe_I4;
  8749. // We'll get a warning if we don't assign a value to these...
  8750. // We'll assert if we use them and make a range where min > max
  8751. newMin = 2; newMax = 1;
  8752. break;
  8753. }
  8754. case Js::OpCode::BrLt_A:
  8755. case Js::OpCode::BrNotGe_A:
  8756. {
  8757. if(DoConstFold() &&
  8758. !IsLoopPrePass() &&
  8759. TryOptConstFoldBrGreaterThanOrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8760. {
  8761. return true;
  8762. }
  8763. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8764. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  8765. if(!specialize)
  8766. {
  8767. return false;
  8768. }
  8769. opcode = Js::OpCode::BrLt_I4;
  8770. // We'll get a warning if we don't assign a value to these...
  8771. // We'll assert if we use them and make a range where min > max
  8772. newMin = 2; newMax = 1;
  8773. break;
  8774. }
  8775. case Js::OpCode::BrLe_A:
  8776. case Js::OpCode::BrNotGt_A:
  8777. {
  8778. if(DoConstFold() &&
  8779. !IsLoopPrePass() &&
  8780. TryOptConstFoldBrGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8781. {
  8782. return true;
  8783. }
  8784. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8785. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  8786. if(!specialize)
  8787. {
  8788. return false;
  8789. }
  8790. opcode = Js::OpCode::BrLe_I4;
  8791. // We'll get a warning if we don't assign a value to these...
  8792. // We'll assert if we use them and make a range where min > max
  8793. newMin = 2; newMax = 1;
  8794. break;
  8795. }
  8796. default:
  8797. return false;
  8798. }
  8799. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it
  8800. // (won't leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value
  8801. // needs to be guaranteed to be an int
  8802. if(!ignoredIntOverflow &&
  8803. !ignoredNegativeZero &&
  8804. !needsBoolConv &&
  8805. instr->ShouldCheckForIntOverflow() &&
  8806. !IsWorthSpecializingToInt32(instr, src1Val, src2Val))
  8807. {
  8808. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  8809. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  8810. // the following cases.
  8811. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to
  8812. // be an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  8813. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  8814. // that case, especially if the dst sym is live on the back-edge.
  8815. if(bailOutKind == IR::BailOutInvalid &&
  8816. instr->GetDst() &&
  8817. src1Val->GetValueInfo()->IsInt() &&
  8818. src2Val->GetValueInfo()->IsInt() &&
  8819. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  8820. {
  8821. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  8822. }
  8823. return false;
  8824. }
  8825. } // case default
  8826. } // switch
  8827. LOutsideSwitch:
  8828. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  8829. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  8830. {
  8831. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  8832. Value *src1IndirIndexVal = nullptr;
  8833. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  8834. {
  8835. *redoTypeSpecRef = true;
  8836. return false;
  8837. }
  8838. }
  8839. const Js::OpCode originalOpCode = instr->m_opcode;
  8840. if (!this->IsLoopPrePass())
  8841. {
  8842. // No re-write on prepass
  8843. instr->m_opcode = opcode;
  8844. }
  8845. Value *src1ValueToSpecialize = src1Val, *src2ValueToSpecialize = src2Val;
  8846. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  8847. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  8848. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  8849. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  8850. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  8851. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  8852. if(src1Lossy)
  8853. {
  8854. src1ValueToSpecialize = src1OriginalVal;
  8855. }
  8856. if (src2Lossy)
  8857. {
  8858. src2ValueToSpecialize = src2OriginalVal;
  8859. }
  8860. // Make sure the srcs are specialized
  8861. IR::Opnd* src1 = instr->GetSrc1();
  8862. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, src1Lossy);
  8863. if (!skipSrc2)
  8864. {
  8865. IR::Opnd* src2 = instr->GetSrc2();
  8866. this->ToInt32(instr, src2, this->currentBlock, src2ValueToSpecialize, nullptr, src2Lossy);
  8867. }
  8868. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  8869. {
  8870. GenerateBailAtOperation(&instr, bailOutKind);
  8871. }
  8872. if (!skipDst && instr->GetDst())
  8873. {
  8874. if (needsBoolConv)
  8875. {
  8876. IR::RegOpnd *varDst;
  8877. if (this->IsLoopPrePass())
  8878. {
  8879. varDst = instr->GetDst()->AsRegOpnd();
  8880. this->ToVarRegOpnd(varDst, this->currentBlock);
  8881. }
  8882. else
  8883. {
  8884. // Generate:
  8885. // t1.i = CmCC t2.i, t3.i
  8886. // t1.v = Conv_bool t1.i
  8887. //
  8888. // If the only uses of t1 are ints, the conv_bool will get dead-stored
  8889. TypeSpecializeIntDst(instr, originalOpCode, nullptr, src1Val, src2Val, bailOutKind, newMin, newMax, pDstVal);
  8890. IR::RegOpnd *intDst = instr->GetDst()->AsRegOpnd();
  8891. intDst->SetIsJITOptimizedReg(true);
  8892. varDst = IR::RegOpnd::New(intDst->m_sym->GetVarEquivSym(this->func), TyVar, this->func);
  8893. IR::Instr *convBoolInstr = IR::Instr::New(Js::OpCode::Conv_Bool, varDst, intDst, this->func);
  8894. // In some cases (e.g. unsigned compare peep code), a comparison will use variables
  8895. // other than the ones initially intended for it, if we can determine that we would
  8896. // arrive at the same result. This means that we get a ByteCodeUses operation after
  8897. // the actual comparison. Since Inserting the Conv_bool just after the compare, and
  8898. // just before the ByteCodeUses, would cause issues later on with register lifetime
  8899. // calculation, we want to insert the Conv_bool after the whole compare instruction
  8900. // block.
  8901. IR::Instr *putAfter = instr;
  8902. while (putAfter->m_next && putAfter->m_next->IsByteCodeUsesInstrFor(instr))
  8903. {
  8904. putAfter = putAfter->m_next;
  8905. }
  8906. putAfter->InsertAfter(convBoolInstr);
  8907. convBoolInstr->SetByteCodeOffset(instr);
  8908. this->ToVarRegOpnd(varDst, this->currentBlock);
  8909. CurrentBlockData()->liveInt32Syms->Set(varDst->m_sym->m_id);
  8910. CurrentBlockData()->liveLossyInt32Syms->Set(varDst->m_sym->m_id);
  8911. }
  8912. *pDstVal = this->NewGenericValue(ValueType::Boolean, varDst);
  8913. }
  8914. else
  8915. {
  8916. TypeSpecializeIntDst(
  8917. instr,
  8918. originalOpCode,
  8919. nullptr,
  8920. src1Val,
  8921. src2Val,
  8922. bailOutKind,
  8923. newMin,
  8924. newMax,
  8925. pDstVal,
  8926. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  8927. }
  8928. }
  8929. if(bailOutKind == IR::BailOutInvalid)
  8930. {
  8931. GOPT_TRACE(_u("Type specialized to INT\n"));
  8932. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8933. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8934. {
  8935. Output::Print(_u("Type specialized to INT: "));
  8936. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8937. }
  8938. #endif
  8939. }
  8940. else
  8941. {
  8942. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  8943. if(bailOutKind & (IR::BailOutOnOverflow | IR::BailOutOnMulOverflow) )
  8944. {
  8945. GOPT_TRACE(_u(" Overflow\n"));
  8946. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8947. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8948. {
  8949. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  8950. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8951. }
  8952. #endif
  8953. }
  8954. if(bailOutKind & IR::BailOutOnNegativeZero)
  8955. {
  8956. GOPT_TRACE(_u(" Zero\n"));
  8957. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8958. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8959. {
  8960. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  8961. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8962. }
  8963. #endif
  8964. }
  8965. }
  8966. return true;
  8967. }
  8968. bool
  8969. GlobOpt::IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const
  8970. {
  8971. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  8972. {
  8973. StackSym const *sym1 = instr->GetSrc1()->AsRegOpnd()->m_sym;
  8974. if (CurrentBlockData()->IsInt32TypeSpecialized(sym1) == false)
  8975. {
  8976. if (!src2Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc2()->IsRegOpnd())
  8977. {
  8978. StackSym const *sym2 = instr->GetSrc2()->AsRegOpnd()->m_sym;
  8979. if (CurrentBlockData()->IsInt32TypeSpecialized(sym2) == false)
  8980. {
  8981. // Type specializing a Br itself isn't worth it, unless one src
  8982. // is already type specialized
  8983. return false;
  8984. }
  8985. }
  8986. }
  8987. }
  8988. return true;
  8989. }
  8990. bool
  8991. GlobOpt::TryOptConstFoldBrFalse(
  8992. IR::Instr *const instr,
  8993. Value *const srcValue,
  8994. const int32 min,
  8995. const int32 max)
  8996. {
  8997. Assert(instr);
  8998. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  8999. Assert(srcValue);
  9000. if(!(DoAggressiveIntTypeSpec() ? srcValue->GetValueInfo()->IsLikelyInt() : srcValue->GetValueInfo()->IsInt()))
  9001. {
  9002. return false;
  9003. }
  9004. if(ValueInfo::IsEqualTo(srcValue, min, max, nullptr, 0, 0))
  9005. {
  9006. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrFalse_A, instr, srcValue);
  9007. return true;
  9008. }
  9009. if(ValueInfo::IsNotEqualTo(srcValue, min, max, nullptr, 0, 0))
  9010. {
  9011. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrTrue_A, instr, srcValue);
  9012. return true;
  9013. }
  9014. return false;
  9015. }
  9016. bool
  9017. GlobOpt::TryOptConstFoldBrEqual(
  9018. IR::Instr *const instr,
  9019. const bool branchOnEqual,
  9020. Value *const src1Value,
  9021. const int32 min1,
  9022. const int32 max1,
  9023. Value *const src2Value,
  9024. const int32 min2,
  9025. const int32 max2)
  9026. {
  9027. Assert(instr);
  9028. Assert(src1Value);
  9029. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  9030. Assert(src2Value);
  9031. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  9032. if(ValueInfo::IsEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  9033. {
  9034. OptConstFoldBr(branchOnEqual, instr, src1Value, src2Value);
  9035. return true;
  9036. }
  9037. if(ValueInfo::IsNotEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  9038. {
  9039. OptConstFoldBr(!branchOnEqual, instr, src1Value, src2Value);
  9040. return true;
  9041. }
  9042. return false;
  9043. }
  9044. bool
  9045. GlobOpt::TryOptConstFoldBrGreaterThan(
  9046. IR::Instr *const instr,
  9047. const bool branchOnGreaterThan,
  9048. Value *const src1Value,
  9049. const int32 min1,
  9050. const int32 max1,
  9051. Value *const src2Value,
  9052. const int32 min2,
  9053. const int32 max2)
  9054. {
  9055. Assert(instr);
  9056. Assert(src1Value);
  9057. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  9058. Assert(src2Value);
  9059. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  9060. if(ValueInfo::IsGreaterThan(src1Value, min1, max1, src2Value, min2, max2))
  9061. {
  9062. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  9063. return true;
  9064. }
  9065. if(ValueInfo::IsLessThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  9066. {
  9067. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  9068. return true;
  9069. }
  9070. return false;
  9071. }
  9072. bool
  9073. GlobOpt::TryOptConstFoldBrGreaterThanOrEqual(
  9074. IR::Instr *const instr,
  9075. const bool branchOnGreaterThanOrEqual,
  9076. Value *const src1Value,
  9077. const int32 min1,
  9078. const int32 max1,
  9079. Value *const src2Value,
  9080. const int32 min2,
  9081. const int32 max2)
  9082. {
  9083. Assert(instr);
  9084. Assert(src1Value);
  9085. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  9086. Assert(src2Value);
  9087. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  9088. if(ValueInfo::IsGreaterThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  9089. {
  9090. OptConstFoldBr(branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  9091. return true;
  9092. }
  9093. if(ValueInfo::IsLessThan(src1Value, min1, max1, src2Value, min2, max2))
  9094. {
  9095. OptConstFoldBr(!branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  9096. return true;
  9097. }
  9098. return false;
  9099. }
  9100. bool
  9101. GlobOpt::TryOptConstFoldBrUnsignedLessThan(
  9102. IR::Instr *const instr,
  9103. const bool branchOnLessThan,
  9104. Value *const src1Value,
  9105. const int32 min1,
  9106. const int32 max1,
  9107. Value *const src2Value,
  9108. const int32 min2,
  9109. const int32 max2)
  9110. {
  9111. Assert(DoConstFold());
  9112. Assert(!IsLoopPrePass());
  9113. if(!src1Value ||
  9114. !src2Value ||
  9115. !(
  9116. DoAggressiveIntTypeSpec()
  9117. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  9118. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  9119. ))
  9120. {
  9121. return false;
  9122. }
  9123. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  9124. uint uMax1 = max((uint)min1, (uint)max1);
  9125. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  9126. uint uMax2 = max((uint)min2, (uint)max2);
  9127. if (uMax1 < uMin2)
  9128. {
  9129. // Range 1 is always lesser than Range 2
  9130. OptConstFoldBr(branchOnLessThan, instr, src1Value, src2Value);
  9131. return true;
  9132. }
  9133. if (uMin1 >= uMax2)
  9134. {
  9135. // Range 2 is always lesser than Range 1
  9136. OptConstFoldBr(!branchOnLessThan, instr, src1Value, src2Value);
  9137. return true;
  9138. }
  9139. return false;
  9140. }
  9141. bool
  9142. GlobOpt::TryOptConstFoldBrUnsignedGreaterThan(
  9143. IR::Instr *const instr,
  9144. const bool branchOnGreaterThan,
  9145. Value *const src1Value,
  9146. const int32 min1,
  9147. const int32 max1,
  9148. Value *const src2Value,
  9149. const int32 min2,
  9150. const int32 max2)
  9151. {
  9152. Assert(DoConstFold());
  9153. Assert(!IsLoopPrePass());
  9154. if(!src1Value ||
  9155. !src2Value ||
  9156. !(
  9157. DoAggressiveIntTypeSpec()
  9158. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  9159. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  9160. ))
  9161. {
  9162. return false;
  9163. }
  9164. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  9165. uint uMax1 = max((uint)min1, (uint)max1);
  9166. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  9167. uint uMax2 = max((uint)min2, (uint)max2);
  9168. if (uMin1 > uMax2)
  9169. {
  9170. // Range 1 is always greater than Range 2
  9171. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  9172. return true;
  9173. }
  9174. if (uMax1 <= uMin2)
  9175. {
  9176. // Range 2 is always greater than Range 1
  9177. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  9178. return true;
  9179. }
  9180. return false;
  9181. }
  9182. void
  9183. GlobOpt::SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info)
  9184. {
  9185. Assert(this->currentBlock->GetSuccList()->Count() == 2);
  9186. IR::Instr * fallthrough = this->currentBlock->GetNext()->GetFirstInstr();
  9187. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->currentBlock->GetSuccList())
  9188. {
  9189. if (conditionToBranch == (edge->GetSucc()->GetFirstInstr() != fallthrough))
  9190. {
  9191. edge->SetPathDependentInfo(info, alloc);
  9192. return;
  9193. }
  9194. }
  9195. NEXT_SLISTBASECOUNTED_ENTRY;
  9196. // In case flowgraph peeps is disabled, we could have conditional branch to next instr
  9197. Assert(this->func->HasTry() || PHASE_OFF(Js::FGPeepsPhase, this->func));
  9198. }
  9199. PathDependentInfoToRestore
  9200. GlobOpt::UpdatePathDependentInfo(PathDependentInfo *const info)
  9201. {
  9202. Assert(info);
  9203. if(!info->HasInfo())
  9204. {
  9205. return PathDependentInfoToRestore();
  9206. }
  9207. decltype(&GlobOpt::UpdateIntBoundsForEqual) UpdateIntBoundsForLeftValue, UpdateIntBoundsForRightValue;
  9208. switch(info->Relationship())
  9209. {
  9210. case PathDependentRelationship::Equal:
  9211. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForEqual;
  9212. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForEqual;
  9213. break;
  9214. case PathDependentRelationship::NotEqual:
  9215. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  9216. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  9217. break;
  9218. case PathDependentRelationship::GreaterThanOrEqual:
  9219. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  9220. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  9221. break;
  9222. case PathDependentRelationship::GreaterThan:
  9223. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  9224. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThan;
  9225. break;
  9226. case PathDependentRelationship::LessThanOrEqual:
  9227. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  9228. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  9229. break;
  9230. case PathDependentRelationship::LessThan:
  9231. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThan;
  9232. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  9233. break;
  9234. default:
  9235. Assert(false);
  9236. __assume(false);
  9237. }
  9238. ValueInfo *leftValueInfo = info->LeftValue()->GetValueInfo();
  9239. IntConstantBounds leftConstantBounds;
  9240. AssertVerify(leftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  9241. ValueInfo *rightValueInfo;
  9242. IntConstantBounds rightConstantBounds;
  9243. if(info->RightValue())
  9244. {
  9245. rightValueInfo = info->RightValue()->GetValueInfo();
  9246. AssertVerify(rightValueInfo->TryGetIntConstantBounds(&rightConstantBounds, true));
  9247. }
  9248. else
  9249. {
  9250. rightValueInfo = nullptr;
  9251. rightConstantBounds = IntConstantBounds(info->RightConstantValue(), info->RightConstantValue());
  9252. }
  9253. ValueInfo *const newLeftValueInfo =
  9254. (this->*UpdateIntBoundsForLeftValue)(
  9255. info->LeftValue(),
  9256. leftConstantBounds,
  9257. info->RightValue(),
  9258. rightConstantBounds,
  9259. true);
  9260. if(newLeftValueInfo)
  9261. {
  9262. ChangeValueInfo(nullptr, info->LeftValue(), newLeftValueInfo);
  9263. AssertVerify(newLeftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  9264. }
  9265. else
  9266. {
  9267. leftValueInfo = nullptr;
  9268. }
  9269. ValueInfo *const newRightValueInfo =
  9270. (this->*UpdateIntBoundsForRightValue)(
  9271. info->RightValue(),
  9272. rightConstantBounds,
  9273. info->LeftValue(),
  9274. leftConstantBounds,
  9275. true);
  9276. if(newRightValueInfo)
  9277. {
  9278. ChangeValueInfo(nullptr, info->RightValue(), newRightValueInfo);
  9279. }
  9280. else
  9281. {
  9282. rightValueInfo = nullptr;
  9283. }
  9284. return PathDependentInfoToRestore(leftValueInfo, rightValueInfo);
  9285. }
  9286. void
  9287. GlobOpt::RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore)
  9288. {
  9289. Assert(info);
  9290. if(infoToRestore.LeftValueInfo())
  9291. {
  9292. Assert(info->LeftValue());
  9293. ChangeValueInfo(nullptr, info->LeftValue(), infoToRestore.LeftValueInfo());
  9294. }
  9295. if(infoToRestore.RightValueInfo())
  9296. {
  9297. Assert(info->RightValue());
  9298. ChangeValueInfo(nullptr, info->RightValue(), infoToRestore.RightValueInfo());
  9299. }
  9300. }
  9301. bool
  9302. GlobOpt::TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst /* = false */)
  9303. {
  9304. IR::Instr *&instr = *pInstr;
  9305. IR::Opnd *src1;
  9306. IR::Opnd *dst;
  9307. Js::OpCode opcode = instr->m_opcode;
  9308. Value *valueToTransfer = nullptr;
  9309. Assert(src1Val && src1Val->GetValueInfo()->IsLikelyNumber() || OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9310. if (!this->DoFloatTypeSpec())
  9311. {
  9312. return false;
  9313. }
  9314. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  9315. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9316. {
  9317. switch (opcode)
  9318. {
  9319. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9320. skipDst = true;
  9321. // fall-through
  9322. case Js::OpCode::Ld_A:
  9323. case Js::OpCode::BrTrue_A:
  9324. case Js::OpCode::BrFalse_A:
  9325. if (instr->GetSrc1()->IsRegOpnd())
  9326. {
  9327. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9328. if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
  9329. {
  9330. // Type specializing an Ld_A isn't worth it, unless the src
  9331. // is already type specialized
  9332. return false;
  9333. }
  9334. }
  9335. if (instr->m_opcode == Js::OpCode::Ld_A)
  9336. {
  9337. valueToTransfer = src1Val;
  9338. }
  9339. break;
  9340. case Js::OpCode::Neg_A:
  9341. break;
  9342. case Js::OpCode::Conv_Num:
  9343. Assert(src1Val);
  9344. opcode = Js::OpCode::Ld_A;
  9345. valueToTransfer = src1Val;
  9346. if (!src1Val->GetValueInfo()->IsNumber())
  9347. {
  9348. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9349. valueToTransfer = NewGenericValue(ValueType::Float, instr->GetDst()->GetStackSym());
  9350. if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
  9351. {
  9352. // Set the dst as a nonDeadStore. We want to keep the Ld_A to prevent the FromVar from
  9353. // being dead-stored, as it could cause implicit calls.
  9354. dst = instr->GetDst();
  9355. dst->AsRegOpnd()->m_dontDeadStore = true;
  9356. }
  9357. }
  9358. break;
  9359. case Js::OpCode::StElemI_A:
  9360. case Js::OpCode::StElemI_A_Strict:
  9361. case Js::OpCode::StElemC:
  9362. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  9363. default:
  9364. return false;
  9365. }
  9366. }
  9367. // Make sure the srcs are specialized
  9368. src1 = instr->GetSrc1();
  9369. // Use original val when calling toFloat64 as this is what we'll use to try hoisting the fromVar if we're in a loop.
  9370. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  9371. if (!skipDst)
  9372. {
  9373. dst = instr->GetDst();
  9374. if (dst)
  9375. {
  9376. this->TypeSpecializeFloatDst(instr, valueToTransfer, src1Val, nullptr, pDstVal);
  9377. if (!this->IsLoopPrePass())
  9378. {
  9379. instr->m_opcode = opcode;
  9380. }
  9381. }
  9382. }
  9383. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  9384. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9385. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  9386. {
  9387. Output::Print(_u("Type specialized to FLOAT: "));
  9388. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9389. }
  9390. #endif
  9391. return true;
  9392. }
  9393. // Unconditionally type-spec dst to float.
  9394. void
  9395. GlobOpt::TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal)
  9396. {
  9397. IR::Opnd* dst = instr->GetDst();
  9398. Assert(dst);
  9399. AssertMsg(dst->IsRegOpnd(), "What else?");
  9400. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9401. if(valToTransfer)
  9402. {
  9403. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  9404. CurrentBlockData()->InsertNewValue(*pDstVal, dst);
  9405. }
  9406. else
  9407. {
  9408. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Value, src2Value);
  9409. }
  9410. }
  9411. bool
  9412. GlobOpt::TypeSpecializeLdLen(
  9413. IR::Instr * *const instrRef,
  9414. Value * *const src1ValueRef,
  9415. Value * *const dstValueRef,
  9416. bool *const forceInvariantHoistingRef)
  9417. {
  9418. Assert(instrRef);
  9419. IR::Instr *&instr = *instrRef;
  9420. Assert(instr);
  9421. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  9422. Assert(src1ValueRef);
  9423. Value *&src1Value = *src1ValueRef;
  9424. Assert(dstValueRef);
  9425. Value *&dstValue = *dstValueRef;
  9426. Assert(forceInvariantHoistingRef);
  9427. bool &forceInvariantHoisting = *forceInvariantHoistingRef;
  9428. if(!DoLdLenIntSpec(instr, instr->GetSrc1()->GetValueType()))
  9429. {
  9430. return false;
  9431. }
  9432. IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  9433. if(!IsLoopPrePass())
  9434. {
  9435. IR::RegOpnd *const baseOpnd = instr->GetSrc1()->AsRegOpnd();
  9436. if(baseOpnd->IsArrayRegOpnd())
  9437. {
  9438. StackSym *const lengthSym = baseOpnd->AsArrayRegOpnd()->LengthSym();
  9439. if(lengthSym)
  9440. {
  9441. CaptureByteCodeSymUses(instr);
  9442. instr->m_opcode = Js::OpCode::Ld_I4;
  9443. instr->ReplaceSrc1(IR::RegOpnd::New(lengthSym, lengthSym->GetType(), func));
  9444. instr->ClearBailOutInfo();
  9445. // Find the hoisted length value
  9446. Value *const lengthValue = CurrentBlockData()->FindValue(lengthSym);
  9447. Assert(lengthValue);
  9448. src1Value = lengthValue;
  9449. ValueInfo *const lengthValueInfo = lengthValue->GetValueInfo();
  9450. IntConstantBounds lengthConstantBounds;
  9451. AssertVerify(lengthValueInfo->TryGetIntConstantBounds(&lengthConstantBounds));
  9452. Assert(lengthConstantBounds.LowerBound() >= 0);
  9453. if (lengthValueInfo->GetSymStore() == lengthSym)
  9454. {
  9455. // When type specializing the dst below, we will end up inserting lengthSym.u32 as symstore for a var
  9456. // Clear the symstore here, so that we dont end up with problems with copyprop later on
  9457. lengthValueInfo->SetSymStore(nullptr);
  9458. }
  9459. // Int-specialize, and transfer the value to the dst
  9460. TypeSpecializeIntDst(
  9461. instr,
  9462. Js::OpCode::LdLen_A,
  9463. src1Value,
  9464. src1Value,
  9465. nullptr,
  9466. bailOutKind,
  9467. lengthConstantBounds.LowerBound(),
  9468. lengthConstantBounds.UpperBound(),
  9469. &dstValue);
  9470. // Try to force hoisting the Ld_I4 so that the length will have an invariant sym store that can be
  9471. // copy-propped. Invariant hoisting does not automatically hoist Ld_I4.
  9472. forceInvariantHoisting = true;
  9473. return true;
  9474. }
  9475. }
  9476. if (instr->HasBailOutInfo())
  9477. {
  9478. Assert(instr->GetBailOutKind() == IR::BailOutMarkTempObject);
  9479. bailOutKind = IR::BailOutOnIrregularLength | IR::BailOutMarkTempObject;
  9480. instr->SetBailOutKind(bailOutKind);
  9481. }
  9482. else
  9483. {
  9484. Assert(bailOutKind == IR::BailOutOnIrregularLength);
  9485. GenerateBailAtOperation(&instr, bailOutKind);
  9486. }
  9487. }
  9488. TypeSpecializeIntDst(
  9489. instr,
  9490. Js::OpCode::LdLen_A,
  9491. nullptr,
  9492. nullptr,
  9493. nullptr,
  9494. bailOutKind,
  9495. 0,
  9496. INT32_MAX,
  9497. &dstValue);
  9498. return true;
  9499. }
  9500. bool
  9501. GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal)
  9502. {
  9503. IR::Opnd *src1;
  9504. IR::Opnd *src2;
  9505. IR::Opnd *dst;
  9506. bool allowUndefinedOrNullSrc1 = true;
  9507. bool allowUndefinedOrNullSrc2 = true;
  9508. bool skipSrc1 = false;
  9509. bool skipSrc2 = false;
  9510. bool skipDst = false;
  9511. bool convertDstToBool = false;
  9512. if (!this->DoFloatTypeSpec())
  9513. {
  9514. return false;
  9515. }
  9516. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  9517. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9518. {
  9519. switch (instr->m_opcode)
  9520. {
  9521. case Js::OpCode::Sub_A:
  9522. case Js::OpCode::Mul_A:
  9523. case Js::OpCode::Div_A:
  9524. case Js::OpCode::Expo_A:
  9525. // Avoid if one source is known not to be a number.
  9526. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9527. {
  9528. return false;
  9529. }
  9530. break;
  9531. case Js::OpCode::BrSrEq_A:
  9532. case Js::OpCode::BrSrNeq_A:
  9533. case Js::OpCode::BrEq_A:
  9534. case Js::OpCode::BrNeq_A:
  9535. case Js::OpCode::BrSrNotEq_A:
  9536. case Js::OpCode::BrNotEq_A:
  9537. case Js::OpCode::BrSrNotNeq_A:
  9538. case Js::OpCode::BrNotNeq_A:
  9539. // Avoid if one source is known not to be a number.
  9540. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9541. {
  9542. return false;
  9543. }
  9544. // Undef == Undef, but +Undef != +Undef
  9545. // 0.0 != null, but 0.0 == +null
  9546. //
  9547. // So Bailout on anything but numbers for both src1 and src2
  9548. allowUndefinedOrNullSrc1 = false;
  9549. allowUndefinedOrNullSrc2 = false;
  9550. break;
  9551. case Js::OpCode::BrGt_A:
  9552. case Js::OpCode::BrGe_A:
  9553. case Js::OpCode::BrLt_A:
  9554. case Js::OpCode::BrLe_A:
  9555. case Js::OpCode::BrNotGt_A:
  9556. case Js::OpCode::BrNotGe_A:
  9557. case Js::OpCode::BrNotLt_A:
  9558. case Js::OpCode::BrNotLe_A:
  9559. // Avoid if one source is known not to be a number.
  9560. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9561. {
  9562. return false;
  9563. }
  9564. break;
  9565. case Js::OpCode::Add_A:
  9566. // For Add, we need both sources to be Numbers, otherwise it could be a string concat
  9567. if (!src1Val || !src2Val || !(src1Val->GetValueInfo()->IsLikelyNumber() && src2Val->GetValueInfo()->IsLikelyNumber()))
  9568. {
  9569. return false;
  9570. }
  9571. break;
  9572. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9573. skipSrc2 = true;
  9574. skipDst = true;
  9575. break;
  9576. case Js::OpCode::CmEq_A:
  9577. case Js::OpCode::CmSrEq_A:
  9578. case Js::OpCode::CmNeq_A:
  9579. case Js::OpCode::CmSrNeq_A:
  9580. {
  9581. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9582. {
  9583. return false;
  9584. }
  9585. allowUndefinedOrNullSrc1 = false;
  9586. allowUndefinedOrNullSrc2 = false;
  9587. convertDstToBool = true;
  9588. break;
  9589. }
  9590. case Js::OpCode::CmLe_A:
  9591. case Js::OpCode::CmLt_A:
  9592. case Js::OpCode::CmGe_A:
  9593. case Js::OpCode::CmGt_A:
  9594. {
  9595. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9596. {
  9597. return false;
  9598. }
  9599. convertDstToBool = true;
  9600. break;
  9601. }
  9602. default:
  9603. return false;
  9604. }
  9605. }
  9606. else
  9607. {
  9608. switch (instr->m_opcode)
  9609. {
  9610. case Js::OpCode::InlineArrayPush:
  9611. bool isFloatConstMissingItem = src2Val->GetValueInfo()->IsFloatConstant();
  9612. if(isFloatConstMissingItem)
  9613. {
  9614. FloatConstType floatValue = src2Val->GetValueInfo()->AsFloatConstant()->FloatValue();
  9615. isFloatConstMissingItem = Js::SparseArraySegment<double>::IsMissingItem(&floatValue);
  9616. }
  9617. // Don't specialize if the element is not likelyNumber - we will surely bailout
  9618. if(!(src2Val->GetValueInfo()->IsLikelyNumber()) || isFloatConstMissingItem)
  9619. {
  9620. return false;
  9621. }
  9622. // Only specialize the Second source - element
  9623. skipSrc1 = true;
  9624. skipDst = true;
  9625. allowUndefinedOrNullSrc2 = false;
  9626. break;
  9627. }
  9628. }
  9629. // Make sure the srcs are specialized
  9630. if(!skipSrc1)
  9631. {
  9632. src1 = instr->GetSrc1();
  9633. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, (allowUndefinedOrNullSrc1 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  9634. }
  9635. if (!skipSrc2)
  9636. {
  9637. src2 = instr->GetSrc2();
  9638. this->ToFloat64(instr, src2, this->currentBlock, src2Val, nullptr, (allowUndefinedOrNullSrc2 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  9639. }
  9640. if (!skipDst)
  9641. {
  9642. dst = instr->GetDst();
  9643. if (dst)
  9644. {
  9645. if (convertDstToBool)
  9646. {
  9647. *pDstVal = CreateDstUntransferredValue(ValueType::Boolean, instr, src1Val, src2Val);
  9648. ToVarRegOpnd(dst->AsRegOpnd(), currentBlock);
  9649. }
  9650. else
  9651. {
  9652. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
  9653. AssertMsg(dst->IsRegOpnd(), "What else?");
  9654. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9655. }
  9656. }
  9657. }
  9658. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  9659. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9660. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  9661. {
  9662. Output::Print(_u("Type specialized to FLOAT: "));
  9663. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9664. }
  9665. #endif
  9666. return true;
  9667. }
  9668. bool
  9669. GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstVal)
  9670. {
  9671. IR::Instr *&instr = *pInstr;
  9672. IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9673. ValueType baseValueType(baseOpnd->GetValueType());
  9674. if (instr->DoStackArgsOpt() ||
  9675. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  9676. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  9677. !(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
  9678. {
  9679. GOPT_TRACE_INSTR(instr, _u("Didn't type specialize array access, because typed array type specialization is disabled, or base is not an optimized typed array.\n"));
  9680. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9681. {
  9682. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9683. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9684. baseValueType.ToString(baseValueTypeStr);
  9685. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because %s.\n"),
  9686. this->func->GetJITFunctionBody()->GetDisplayName(),
  9687. this->func->GetDebugNumberSet(debugStringBuffer),
  9688. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9689. baseValueTypeStr,
  9690. instr->DoStackArgsOpt() ?
  9691. _u("instruction uses the arguments object") :
  9692. _u("typed array type specialization is disabled, or base is not an optimized typed array"));
  9693. Output::Flush();
  9694. }
  9695. return false;
  9696. }
  9697. Assert(instr->GetSrc1()->IsRegOpnd() || (src1Val && src1Val->GetValueInfo()->HasIntConstantValue()));
  9698. StackSym *sym = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd()->m_sym : nullptr;
  9699. // Only type specialize the source of store element if the source symbol is already type specialized to int or float.
  9700. if (sym)
  9701. {
  9702. if (baseValueType.IsLikelyNativeArray())
  9703. {
  9704. // Gently coerce these src's into native if it seems likely to work.
  9705. // Otherwise we can't use the fast path to store.
  9706. // But don't try to put a float-specialized number into an int array this way.
  9707. if (!(
  9708. CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
  9709. (
  9710. src1Val &&
  9711. (
  9712. DoAggressiveIntTypeSpec()
  9713. ? src1Val->GetValueInfo()->IsLikelyInt()
  9714. : src1Val->GetValueInfo()->IsInt()
  9715. )
  9716. )
  9717. ))
  9718. {
  9719. if (!(
  9720. CurrentBlockData()->IsFloat64TypeSpecialized(sym) ||
  9721. (src1Val && src1Val->GetValueInfo()->IsLikelyNumber())
  9722. ) ||
  9723. baseValueType.HasIntElements())
  9724. {
  9725. return false;
  9726. }
  9727. }
  9728. }
  9729. else if (!CurrentBlockData()->IsInt32TypeSpecialized(sym) && !CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9730. {
  9731. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because src is not type specialized.\n"));
  9732. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9733. {
  9734. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9735. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9736. baseValueType.ToString(baseValueTypeStr);
  9737. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because src is not specialized.\n"),
  9738. this->func->GetJITFunctionBody()->GetDisplayName(),
  9739. this->func->GetDebugNumberSet(debugStringBuffer),
  9740. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9741. baseValueTypeStr);
  9742. Output::Flush();
  9743. }
  9744. return false;
  9745. }
  9746. }
  9747. int32 src1IntConstantValue;
  9748. if(baseValueType.IsLikelyNativeIntArray() && src1Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue))
  9749. {
  9750. if(Js::SparseArraySegment<int32>::IsMissingItem(&src1IntConstantValue))
  9751. {
  9752. return false;
  9753. }
  9754. }
  9755. // Note: doing ToVarUses to make sure we do get the int32 version of the index before trying to access its value in
  9756. // ShouldExpectConventionalArrayIndexValue. Not sure why that never gave us a problem before.
  9757. Assert(instr->GetDst()->IsIndirOpnd());
  9758. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  9759. // Make sure we use the int32 version of the index operand symbol, if available. Otherwise, ensure the var symbol is live (by
  9760. // potentially inserting a ToVar).
  9761. this->ToVarUses(instr, dst, /* isDst = */ true, nullptr);
  9762. if (!ShouldExpectConventionalArrayIndexValue(dst))
  9763. {
  9764. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because index is negative or likely not int.\n"));
  9765. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9766. {
  9767. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9768. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9769. baseValueType.ToString(baseValueTypeStr);
  9770. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because index is negative or likely not int.\n"),
  9771. this->func->GetJITFunctionBody()->GetDisplayName(),
  9772. this->func->GetDebugNumberSet(debugStringBuffer),
  9773. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9774. baseValueTypeStr);
  9775. Output::Flush();
  9776. }
  9777. return false;
  9778. }
  9779. IRType toType = TyVar;
  9780. bool isLossyAllowed = true;
  9781. IR::BailOutKind arrayBailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  9782. switch(baseValueType.GetObjectType())
  9783. {
  9784. case ObjectType::Int8Array:
  9785. case ObjectType::Uint8Array:
  9786. case ObjectType::Int16Array:
  9787. case ObjectType::Uint16Array:
  9788. case ObjectType::Int32Array:
  9789. case ObjectType::Int8VirtualArray:
  9790. case ObjectType::Uint8VirtualArray:
  9791. case ObjectType::Int16VirtualArray:
  9792. case ObjectType::Uint16VirtualArray:
  9793. case ObjectType::Int32VirtualArray:
  9794. case ObjectType::Int8MixedArray:
  9795. case ObjectType::Uint8MixedArray:
  9796. case ObjectType::Int16MixedArray:
  9797. case ObjectType::Uint16MixedArray:
  9798. case ObjectType::Int32MixedArray:
  9799. Int32Array:
  9800. if (this->DoAggressiveIntTypeSpec() || this->DoFloatTypeSpec())
  9801. {
  9802. toType = TyInt32;
  9803. }
  9804. break;
  9805. case ObjectType::Uint32Array:
  9806. case ObjectType::Uint32VirtualArray:
  9807. case ObjectType::Uint32MixedArray:
  9808. // Uint32Arrays may store values that overflow int32. If the value being stored comes from a symbol that's
  9809. // already losslessly type specialized to int32, we'll use it. Otherwise, if we only have a float64 specialized
  9810. // value, we don't want to force bailout if it doesn't fit in int32. Instead, we'll emit conversion in the
  9811. // lowerer, and handle overflow, if necessary.
  9812. if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
  9813. {
  9814. toType = TyInt32;
  9815. }
  9816. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9817. {
  9818. toType = TyFloat64;
  9819. }
  9820. break;
  9821. case ObjectType::Float32Array:
  9822. case ObjectType::Float64Array:
  9823. case ObjectType::Float32VirtualArray:
  9824. case ObjectType::Float32MixedArray:
  9825. case ObjectType::Float64VirtualArray:
  9826. case ObjectType::Float64MixedArray:
  9827. Float64Array:
  9828. if (this->DoFloatTypeSpec())
  9829. {
  9830. toType = TyFloat64;
  9831. }
  9832. break;
  9833. case ObjectType::Uint8ClampedArray:
  9834. case ObjectType::Uint8ClampedVirtualArray:
  9835. case ObjectType::Uint8ClampedMixedArray:
  9836. // Uint8ClampedArray requires rounding (as opposed to truncation) of floating point values. If source symbol is
  9837. // float type specialized, type specialize this instruction to float as well, and handle rounding in the
  9838. // lowerer.
  9839. if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
  9840. {
  9841. toType = TyInt32;
  9842. isLossyAllowed = false;
  9843. }
  9844. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9845. {
  9846. toType = TyFloat64;
  9847. }
  9848. break;
  9849. default:
  9850. Assert(baseValueType.IsLikelyNativeArray());
  9851. isLossyAllowed = false;
  9852. arrayBailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  9853. if(baseValueType.HasIntElements())
  9854. {
  9855. goto Int32Array;
  9856. }
  9857. Assert(baseValueType.HasFloatElements());
  9858. goto Float64Array;
  9859. }
  9860. if (toType != TyVar)
  9861. {
  9862. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  9863. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9864. {
  9865. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9866. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9867. baseValueType.ToString(baseValueTypeStr);
  9868. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s.\n"),
  9869. this->func->GetJITFunctionBody()->GetDisplayName(),
  9870. this->func->GetDebugNumberSet(debugStringBuffer),
  9871. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9872. baseValueTypeStr,
  9873. toType == TyInt32 ? _u("int32") : _u("float64"));
  9874. Output::Flush();
  9875. }
  9876. IR::BailOutKind bailOutKind = ((toType == TyInt32) ? IR::BailOutIntOnly : IR::BailOutNumberOnly);
  9877. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, toType, bailOutKind, /* lossy = */ isLossyAllowed);
  9878. if (!this->IsLoopPrePass())
  9879. {
  9880. bool bConvertToBailoutInstr = true;
  9881. // Definite StElemC doesn't need bailout, because it can't fail or cause conversion.
  9882. if (instr->m_opcode == Js::OpCode::StElemC && baseValueType.IsObject())
  9883. {
  9884. if (baseValueType.HasIntElements())
  9885. {
  9886. //Native int array requires a missing element check & bailout
  9887. int32 min = INT32_MIN;
  9888. int32 max = INT32_MAX;
  9889. if (src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, false))
  9890. {
  9891. bConvertToBailoutInstr = ((min <= Js::JavascriptNativeIntArray::MissingItem) && (max >= Js::JavascriptNativeIntArray::MissingItem));
  9892. }
  9893. }
  9894. else
  9895. {
  9896. bConvertToBailoutInstr = false;
  9897. }
  9898. }
  9899. if (bConvertToBailoutInstr)
  9900. {
  9901. if(instr->HasBailOutInfo())
  9902. {
  9903. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  9904. Assert(
  9905. (
  9906. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  9907. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  9908. ) &&
  9909. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  9910. if(arrayBailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  9911. {
  9912. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  9913. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  9914. // bails out for the right reason.
  9915. instr->SetBailOutKind(
  9916. arrayBailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  9917. }
  9918. else
  9919. {
  9920. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  9921. // calls to occur, so it must be merged in to eliminate generating the helper call.
  9922. Assert(arrayBailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  9923. instr->SetBailOutKind(oldBailOutKind | arrayBailOutKind);
  9924. }
  9925. }
  9926. else
  9927. {
  9928. GenerateBailAtOperation(&instr, arrayBailOutKind);
  9929. }
  9930. }
  9931. }
  9932. }
  9933. else
  9934. {
  9935. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because the source was not already specialized.\n"));
  9936. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9937. {
  9938. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9939. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9940. baseValueType.ToString(baseValueTypeStr);
  9941. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because of array type.\n"),
  9942. this->func->GetJITFunctionBody()->GetDisplayName(),
  9943. this->func->GetDebugNumberSet(debugStringBuffer),
  9944. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9945. baseValueTypeStr);
  9946. Output::Flush();
  9947. }
  9948. }
  9949. return toType != TyVar;
  9950. }
  9951. IR::Instr *
  9952. GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
  9953. {
  9954. Sym *sym;
  9955. switch (opnd->GetKind())
  9956. {
  9957. case IR::OpndKindReg:
  9958. if (!isDst && !CurrentBlockData()->liveVarSyms->Test(opnd->AsRegOpnd()->m_sym->m_id))
  9959. {
  9960. instr = this->ToVar(instr, opnd->AsRegOpnd(), this->currentBlock, val, true);
  9961. }
  9962. break;
  9963. case IR::OpndKindSym:
  9964. sym = opnd->AsSymOpnd()->m_sym;
  9965. if (sym->IsPropertySym() && !CurrentBlockData()->liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id)
  9966. && sym->AsPropertySym()->m_stackSym->IsVar())
  9967. {
  9968. StackSym *propertyBase = sym->AsPropertySym()->m_stackSym;
  9969. IR::RegOpnd *newOpnd = IR::RegOpnd::New(propertyBase, TyVar, instr->m_func);
  9970. instr = this->ToVar(instr, newOpnd, this->currentBlock, CurrentBlockData()->FindValue(propertyBase), true);
  9971. }
  9972. break;
  9973. case IR::OpndKindIndir:
  9974. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  9975. if (!CurrentBlockData()->liveVarSyms->Test(baseOpnd->m_sym->m_id))
  9976. {
  9977. instr = this->ToVar(instr, baseOpnd, this->currentBlock, CurrentBlockData()->FindValue(baseOpnd->m_sym), true);
  9978. }
  9979. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  9980. if (indexOpnd && !indexOpnd->m_sym->IsTypeSpec())
  9981. {
  9982. instr = ToTypeSpecIndex(instr, indexOpnd, opnd->AsIndirOpnd());
  9983. }
  9984. break;
  9985. }
  9986. return instr;
  9987. }
  9988. IR::Instr *
  9989. GlobOpt::ToTypeSpecIndex(IR::Instr * instr, IR::RegOpnd * indexOpnd, IR::IndirOpnd * indirOpnd)
  9990. {
  9991. Assert(indirOpnd != nullptr || indexOpnd == instr->GetSrc1());
  9992. bool isGetterOrSetter = instr->m_opcode == Js::OpCode::InitGetElemI ||
  9993. instr->m_opcode == Js::OpCode::InitSetElemI ||
  9994. instr->m_opcode == Js::OpCode::InitClassMemberGetComputedName ||
  9995. instr->m_opcode == Js::OpCode::InitClassMemberSetComputedName;
  9996. if (!isGetterOrSetter // typespec is disabled for getters, setters
  9997. && (indexOpnd->GetValueType().IsInt()
  9998. ? !IsTypeSpecPhaseOff(func)
  9999. : indexOpnd->GetValueType().IsLikelyInt() && DoAggressiveIntTypeSpec())
  10000. && !GetIsAsmJSFunc()) // typespec is disabled for asmjs
  10001. {
  10002. StackSym *const indexVarSym = indexOpnd->m_sym;
  10003. Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
  10004. Assert(indexValue);
  10005. Assert(indexValue->GetValueInfo()->IsLikelyInt());
  10006. ToInt32(instr, indexOpnd, currentBlock, indexValue, indirOpnd, false);
  10007. Assert(indexValue->GetValueInfo()->IsInt() || IsLoopPrePass());
  10008. if (!IsLoopPrePass())
  10009. {
  10010. IR::Opnd * intOpnd = indirOpnd ? indirOpnd->GetIndexOpnd() : instr->GetSrc1();
  10011. if (intOpnd != nullptr)
  10012. {
  10013. Assert(!intOpnd->IsRegOpnd() || intOpnd->AsRegOpnd()->m_sym->IsTypeSpec());
  10014. IntConstantBounds indexConstantBounds;
  10015. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  10016. if (ValueInfo::IsGreaterThanOrEqualTo(
  10017. indexValue,
  10018. indexConstantBounds.LowerBound(),
  10019. indexConstantBounds.UpperBound(),
  10020. nullptr,
  10021. 0,
  10022. 0))
  10023. {
  10024. intOpnd->SetType(TyUint32);
  10025. }
  10026. }
  10027. }
  10028. }
  10029. else if (!CurrentBlockData()->liveVarSyms->Test(indexOpnd->m_sym->m_id))
  10030. {
  10031. instr = this->ToVar(instr, indexOpnd, this->currentBlock, CurrentBlockData()->FindValue(indexOpnd->m_sym), true);
  10032. }
  10033. return instr;
  10034. }
  10035. IR::Instr *
  10036. GlobOpt::ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *value, bool needsUpdate)
  10037. {
  10038. IR::Instr *newInstr;
  10039. StackSym *varSym = regOpnd->m_sym;
  10040. if (IsTypeSpecPhaseOff(this->func))
  10041. {
  10042. return instr;
  10043. }
  10044. if (this->IsLoopPrePass())
  10045. {
  10046. block->globOptData.liveVarSyms->Set(varSym->m_id);
  10047. return instr;
  10048. }
  10049. if (block->globOptData.liveVarSyms->Test(varSym->m_id))
  10050. {
  10051. // Already live, nothing to do
  10052. return instr;
  10053. }
  10054. if (!varSym->IsVar())
  10055. {
  10056. Assert(!varSym->IsTypeSpec());
  10057. // Leave non-vars alone.
  10058. return instr;
  10059. }
  10060. Assert(block->globOptData.IsTypeSpecialized(varSym));
  10061. if (!value)
  10062. {
  10063. value = block->globOptData.FindValue(varSym);
  10064. }
  10065. ValueInfo *valueInfo = value ? value->GetValueInfo() : nullptr;
  10066. if(valueInfo && valueInfo->IsInt())
  10067. {
  10068. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  10069. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  10070. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  10071. // lossy state.
  10072. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10073. }
  10074. IRType fromType = TyIllegal;
  10075. StackSym *typeSpecSym = nullptr;
  10076. if (block->globOptData.liveInt32Syms->Test(varSym->m_id) && !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id))
  10077. {
  10078. fromType = TyInt32;
  10079. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  10080. Assert(valueInfo);
  10081. Assert(valueInfo->IsInt());
  10082. }
  10083. else if (block->globOptData.liveFloat64Syms->Test(varSym->m_id))
  10084. {
  10085. fromType = TyFloat64;
  10086. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  10087. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any non-number
  10088. // value, even ones that have already been generated before. Float-specialized non-number values cannot be converted
  10089. // back to Var since they will not go back to the original non-number value. The dead-store pass will update the bailout
  10090. // kind on already-generated FromVars based on this bit.
  10091. typeSpecSym->m_requiresBailOnNotNumber = true;
  10092. // A previous float conversion may have used BailOutPrimitiveButString, which does not change the value type to say
  10093. // definitely float, since it can also be a non-string primitive. The convert back to Var though, will cause that
  10094. // bailout kind to be changed to BailOutNumberOnly in the dead-store phase, so from the point of the initial conversion
  10095. // to float, that the value is definitely number. Since we don't know where the FromVar is, change the value type here.
  10096. if(valueInfo)
  10097. {
  10098. if(!valueInfo->IsNumber())
  10099. {
  10100. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10101. ChangeValueInfo(block, value, valueInfo);
  10102. regOpnd->SetValueType(valueInfo->Type());
  10103. }
  10104. }
  10105. else
  10106. {
  10107. value = NewGenericValue(ValueType::Float);
  10108. valueInfo = value->GetValueInfo();
  10109. block->globOptData.SetValue(value, varSym);
  10110. regOpnd->SetValueType(valueInfo->Type());
  10111. }
  10112. }
  10113. else
  10114. {
  10115. Assert(UNREACHED);
  10116. }
  10117. AssertOrFailFast(valueInfo);
  10118. int32 intConstantValue;
  10119. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  10120. {
  10121. // Lower will tag or create a number directly
  10122. newInstr = IR::Instr::New(Js::OpCode::LdC_A_I4, regOpnd,
  10123. IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func), instr->m_func);
  10124. }
  10125. else
  10126. {
  10127. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, fromType, instr->m_func);
  10128. Js::OpCode opcode = Js::OpCode::ToVar;
  10129. regNew->SetIsJITOptimizedReg(true);
  10130. newInstr = IR::Instr::New(opcode, regOpnd, regNew, instr->m_func);
  10131. }
  10132. newInstr->SetByteCodeOffset(instr);
  10133. newInstr->GetDst()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  10134. ValueType valueType = valueInfo->Type();
  10135. if(fromType == TyInt32)
  10136. {
  10137. #if !INT32VAR // All 32-bit ints are taggable on 64-bit architectures
  10138. IntConstantBounds constantBounds;
  10139. AssertVerify(valueInfo->TryGetIntConstantBounds(&constantBounds));
  10140. if(constantBounds.IsTaggable())
  10141. #endif
  10142. {
  10143. // The value is within the taggable range, so set the opnd value types to TaggedInt to avoid the overflow check
  10144. valueType = ValueType::GetTaggedInt();
  10145. }
  10146. }
  10147. newInstr->GetDst()->SetValueType(valueType);
  10148. newInstr->GetSrc1()->SetValueType(valueType);
  10149. IR::Instr *insertAfterInstr = instr->m_prev;
  10150. if (instr == block->GetLastInstr() &&
  10151. (instr->IsBranchInstr() || instr->m_opcode == Js::OpCode::BailTarget))
  10152. {
  10153. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  10154. while(insertAfterInstr->m_opcode == Js::OpCode::ByteCodeUses)
  10155. {
  10156. insertAfterInstr = insertAfterInstr->m_prev;
  10157. }
  10158. }
  10159. block->InsertInstrAfter(newInstr, insertAfterInstr);
  10160. block->globOptData.liveVarSyms->Set(varSym->m_id);
  10161. GOPT_TRACE_OPND(regOpnd, _u("Converting to var\n"));
  10162. if (block->loop)
  10163. {
  10164. Assert(!this->IsLoopPrePass());
  10165. this->TryHoistInvariant(newInstr, block, value, value, nullptr, false);
  10166. }
  10167. if (needsUpdate)
  10168. {
  10169. // Make sure that the kill effect of the ToVar instruction is tracked and that the kill of a property
  10170. // type is reflected in the current instruction.
  10171. this->ProcessKills(newInstr);
  10172. this->ValueNumberObjectType(newInstr->GetDst(), newInstr);
  10173. if (instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  10174. {
  10175. // Reprocess the load source. We need to reset the PropertySymOpnd fields first.
  10176. IR::PropertySymOpnd *propertySymOpnd = instr->GetSrc1()->AsPropertySymOpnd();
  10177. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  10178. {
  10179. propertySymOpnd->SetTypeChecked(false);
  10180. propertySymOpnd->SetTypeAvailable(false);
  10181. propertySymOpnd->SetWriteGuardChecked(false);
  10182. }
  10183. this->FinishOptPropOp(instr, propertySymOpnd);
  10184. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  10185. }
  10186. }
  10187. return instr;
  10188. }
  10189. IR::Instr *
  10190. GlobOpt::ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy)
  10191. {
  10192. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyInt32, IR::BailOutIntOnly, lossy);
  10193. }
  10194. IR::Instr *
  10195. GlobOpt::ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind)
  10196. {
  10197. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyFloat64, bailOutKind);
  10198. }
  10199. IR::Instr *
  10200. GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  10201. {
  10202. Assert(bailOutKind != IR::BailOutInvalid);
  10203. IR::Instr *newInstr;
  10204. if (!val && opnd->IsRegOpnd())
  10205. {
  10206. val = block->globOptData.FindValue(opnd->AsRegOpnd()->m_sym);
  10207. }
  10208. ValueInfo *valueInfo = val ? val->GetValueInfo() : nullptr;
  10209. bool needReplaceSrc = false;
  10210. bool updateBlockLastInstr = false;
  10211. if (instr)
  10212. {
  10213. needReplaceSrc = true;
  10214. if (!insertBeforeInstr)
  10215. {
  10216. insertBeforeInstr = instr;
  10217. }
  10218. }
  10219. else if (!insertBeforeInstr)
  10220. {
  10221. // Insert it at the end of the block
  10222. insertBeforeInstr = block->GetLastInstr();
  10223. if (insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  10224. {
  10225. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  10226. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  10227. {
  10228. insertBeforeInstr = insertBeforeInstr->m_prev;
  10229. }
  10230. }
  10231. else
  10232. {
  10233. insertBeforeInstr = insertBeforeInstr->m_next;
  10234. updateBlockLastInstr = true;
  10235. }
  10236. }
  10237. // Int constant values will be propagated into the instruction. For ArgOut_A_InlineBuiltIn, there's no benefit from
  10238. // const-propping, so those are excluded.
  10239. if (opnd->IsRegOpnd() &&
  10240. !(
  10241. valueInfo &&
  10242. (valueInfo->HasIntConstantValue() || valueInfo->IsFloatConstant()) &&
  10243. (!instr || instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  10244. ))
  10245. {
  10246. IR::RegOpnd *regSrc = opnd->AsRegOpnd();
  10247. StackSym *varSym = regSrc->m_sym;
  10248. Js::OpCode opcode = Js::OpCode::FromVar;
  10249. if (varSym->IsTypeSpec() || !block->globOptData.liveVarSyms->Test(varSym->m_id))
  10250. {
  10251. // Conversion between int32 and float64
  10252. if (varSym->IsTypeSpec())
  10253. {
  10254. varSym = varSym->GetVarEquivSym(this->func);
  10255. }
  10256. opcode = Js::OpCode::Conv_Prim;
  10257. }
  10258. Assert(block->globOptData.liveVarSyms->Test(varSym->m_id) || block->globOptData.IsTypeSpecialized(varSym));
  10259. StackSym *typeSpecSym = nullptr;
  10260. BOOL isLive = FALSE;
  10261. BVSparse<JitArenaAllocator> *livenessBv = nullptr;
  10262. if(valueInfo && valueInfo->IsInt())
  10263. {
  10264. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  10265. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  10266. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  10267. // lossy state.
  10268. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10269. }
  10270. if (toType == TyInt32)
  10271. {
  10272. // Need to determine whether the conversion is actually lossy or lossless. If the value is an int, then it's a
  10273. // lossless conversion despite the type of conversion requested. The liveness of the converted int32 sym needs to be
  10274. // set to reflect the actual type of conversion done. Also, a lossless conversion needs the value to determine
  10275. // whether the conversion may need to bail out.
  10276. Assert(valueInfo);
  10277. if(valueInfo->IsInt())
  10278. {
  10279. lossy = false;
  10280. }
  10281. else
  10282. {
  10283. Assert(IsLoopPrePass() || !block->globOptData.IsInt32TypeSpecialized(varSym));
  10284. }
  10285. livenessBv = block->globOptData.liveInt32Syms;
  10286. isLive = livenessBv->Test(varSym->m_id) && (lossy || !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id));
  10287. if (this->IsLoopPrePass())
  10288. {
  10289. if (!isLive)
  10290. {
  10291. livenessBv->Set(varSym->m_id);
  10292. if (lossy)
  10293. {
  10294. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10295. }
  10296. else
  10297. {
  10298. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10299. }
  10300. }
  10301. return instr;
  10302. }
  10303. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  10304. if (!isLive)
  10305. {
  10306. if (!opnd->IsVar() ||
  10307. !block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  10308. (block->globOptData.liveFloat64Syms->Test(varSym->m_id) && valueInfo && valueInfo->IsLikelyFloat()))
  10309. {
  10310. Assert(block->globOptData.liveFloat64Syms->Test(varSym->m_id));
  10311. if(!lossy && !valueInfo->IsInt())
  10312. {
  10313. // Shouldn't try to do a lossless conversion from float64 to int32 when the value is not known to be an
  10314. // int. There are cases where we need more than two passes over loops to flush out all dependencies.
  10315. // It's possible for the loop prepass to think that a sym s1 remains an int because it acquires the
  10316. // value of another sym s2 that is an int in the prepass at that time. However, s2 can become a float
  10317. // later in the loop body, in which case s1 would become a float on the second iteration of the loop. By
  10318. // that time, we would have already committed to having s1 live as a lossless int on entry into the
  10319. // loop, and we end up having to compensate by doing a lossless conversion from float to int, which will
  10320. // need a bailout and will most likely bail out.
  10321. //
  10322. // If s2 becomes a var instead of a float, then the compensation is legal although not ideal. After
  10323. // enough bailouts, rejit would be triggered with aggressive int type spec turned off. For the
  10324. // float-to-int conversion though, there's no point in emitting a bailout because we already know that
  10325. // the value is a float and has high probability of bailing out (whereas a var has a chance to be a
  10326. // tagged int), and so currently lossless conversion from float to int with bailout is not supported.
  10327. //
  10328. // So, treating this case as a compile-time bailout. The exception will trigger the jit work item to be
  10329. // restarted with aggressive int type specialization disabled.
  10330. if(bailOutKind == IR::BailOutExpectingInteger)
  10331. {
  10332. Assert(IsSwitchOptEnabledForIntTypeSpec());
  10333. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  10334. }
  10335. else
  10336. {
  10337. Assert(DoAggressiveIntTypeSpec());
  10338. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  10339. {
  10340. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  10341. Output::Print(
  10342. _u("BailOut (compile-time): function: %s (%s) varSym: "),
  10343. this->func->GetJITFunctionBody()->GetDisplayName(),
  10344. this->func->GetDebugNumberSet(debugStringBuffer),
  10345. varSym->m_id);
  10346. #if DBG_DUMP
  10347. varSym->Dump();
  10348. #else
  10349. Output::Print(_u("s%u"), varSym->m_id);
  10350. #endif
  10351. if(varSym->HasByteCodeRegSlot())
  10352. {
  10353. Output::Print(_u(" byteCodeReg: R%u"), varSym->GetByteCodeRegSlot());
  10354. }
  10355. Output::Print(_u(" (lossless conversion from float64 to int32)\n"));
  10356. Output::Flush();
  10357. }
  10358. if(!DoAggressiveIntTypeSpec())
  10359. {
  10360. // Aggressive int type specialization is already off for some reason. Prevent trying to rejit again
  10361. // because it won't help and the same thing will happen again. Just abort jitting this function.
  10362. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  10363. {
  10364. Output::Print(_u(" Aborting JIT because AggressiveIntTypeSpec is already off\n"));
  10365. Output::Flush();
  10366. }
  10367. throw Js::OperationAbortedException();
  10368. }
  10369. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  10370. }
  10371. }
  10372. if(opnd->IsVar())
  10373. {
  10374. regSrc->SetType(TyFloat64);
  10375. regSrc->m_sym = varSym->GetFloat64EquivSym(this->func);
  10376. opcode = Js::OpCode::Conv_Prim;
  10377. }
  10378. else
  10379. {
  10380. Assert(regSrc->IsFloat64());
  10381. Assert(regSrc->m_sym->IsFloat64());
  10382. Assert(opcode == Js::OpCode::Conv_Prim);
  10383. }
  10384. }
  10385. }
  10386. GOPT_TRACE_OPND(regSrc, _u("Converting to int32\n"));
  10387. }
  10388. else if (toType == TyFloat64)
  10389. {
  10390. // float64
  10391. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  10392. if(!IsLoopPrePass() && typeSpecSym->m_requiresBailOnNotNumber && block->globOptData.IsFloat64TypeSpecialized(varSym))
  10393. {
  10394. // This conversion is already protected by a BailOutNumberOnly bailout (or at least it will be after the
  10395. // dead-store phase). Since 'requiresBailOnNotNumber' is not flow-based, change the value to definitely float.
  10396. if(valueInfo)
  10397. {
  10398. if(!valueInfo->IsNumber())
  10399. {
  10400. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10401. ChangeValueInfo(block, val, valueInfo);
  10402. opnd->SetValueType(valueInfo->Type());
  10403. }
  10404. }
  10405. else
  10406. {
  10407. val = NewGenericValue(ValueType::Float);
  10408. valueInfo = val->GetValueInfo();
  10409. block->globOptData.SetValue(val, varSym);
  10410. opnd->SetValueType(valueInfo->Type());
  10411. }
  10412. }
  10413. if(bailOutKind == IR::BailOutNumberOnly)
  10414. {
  10415. if(!IsLoopPrePass())
  10416. {
  10417. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any
  10418. // non-number value, even ones that have already been generated before. The dead-store pass will update the
  10419. // bailout kind on already-generated FromVars based on this bit.
  10420. typeSpecSym->m_requiresBailOnNotNumber = true;
  10421. }
  10422. }
  10423. else if(typeSpecSym->m_requiresBailOnNotNumber)
  10424. {
  10425. Assert(bailOutKind == IR::BailOutPrimitiveButString);
  10426. bailOutKind = IR::BailOutNumberOnly;
  10427. }
  10428. livenessBv = block->globOptData.liveFloat64Syms;
  10429. isLive = livenessBv->Test(varSym->m_id);
  10430. if (this->IsLoopPrePass())
  10431. {
  10432. if(!isLive)
  10433. {
  10434. livenessBv->Set(varSym->m_id);
  10435. }
  10436. if (this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  10437. {
  10438. this->prePassLoop->forceFloat64SymsOnEntry->Set(varSym->m_id);
  10439. }
  10440. else
  10441. {
  10442. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  10443. if (symStore && symStore != varSym
  10444. && this->OptIsInvariant(symStore, block, this->prePassLoop, block->globOptData.FindValue(symStore), false, true))
  10445. {
  10446. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  10447. // outside the loop.
  10448. this->prePassLoop->forceFloat64SymsOnEntry->Set(symStore->m_id);
  10449. }
  10450. }
  10451. return instr;
  10452. }
  10453. if (!isLive && regSrc->IsVar())
  10454. {
  10455. if (!block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  10456. (
  10457. block->globOptData.liveInt32Syms->Test(varSym->m_id) &&
  10458. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  10459. valueInfo &&
  10460. valueInfo->IsLikelyInt()
  10461. ))
  10462. {
  10463. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  10464. Assert(!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)); // Shouldn't try to convert a lossy int32 to anything
  10465. regSrc->SetType(TyInt32);
  10466. regSrc->m_sym = varSym->GetInt32EquivSym(this->func);
  10467. opcode = Js::OpCode::Conv_Prim;
  10468. }
  10469. }
  10470. GOPT_TRACE_OPND(regSrc, _u("Converting to float64\n"));
  10471. }
  10472. bool needLoad = false;
  10473. if (needReplaceSrc)
  10474. {
  10475. bool wasDead = regSrc->GetIsDead();
  10476. // needReplaceSrc means we are type specializing a use, and need to replace the src on the instr
  10477. if (!isLive)
  10478. {
  10479. needLoad = true;
  10480. // ReplaceSrc will delete it.
  10481. regSrc = regSrc->Copy(instr->m_func)->AsRegOpnd();
  10482. }
  10483. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  10484. if(valueInfo)
  10485. {
  10486. regNew->SetValueType(valueInfo->Type());
  10487. regNew->m_wasNegativeZeroPreventedByBailout = valueInfo->WasNegativeZeroPreventedByBailout();
  10488. }
  10489. regNew->SetIsDead(wasDead);
  10490. regNew->SetIsJITOptimizedReg(true);
  10491. this->CaptureByteCodeSymUses(instr);
  10492. if (indir == nullptr)
  10493. {
  10494. instr->ReplaceSrc(opnd, regNew);
  10495. }
  10496. else
  10497. {
  10498. indir->ReplaceIndexOpnd(regNew);
  10499. }
  10500. opnd = regNew;
  10501. if (!needLoad)
  10502. {
  10503. Assert(isLive);
  10504. return instr;
  10505. }
  10506. }
  10507. else
  10508. {
  10509. // We just need to insert a load of a type spec sym
  10510. if(isLive)
  10511. {
  10512. return instr;
  10513. }
  10514. // Insert it before the specified instruction
  10515. instr = insertBeforeInstr;
  10516. }
  10517. IR::RegOpnd *regDst = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  10518. bool isBailout = false;
  10519. bool isHoisted = false;
  10520. bool isInLandingPad = (block->next && !block->next->isDeleted && block->next->isLoopHeader);
  10521. if (isInLandingPad)
  10522. {
  10523. Loop *loop = block->next->loop;
  10524. Assert(loop && loop->landingPad == block);
  10525. Assert(loop->bailOutInfo);
  10526. }
  10527. if (opcode == Js::OpCode::FromVar)
  10528. {
  10529. if (toType == TyInt32)
  10530. {
  10531. Assert(valueInfo);
  10532. if (lossy)
  10533. {
  10534. if (!valueInfo->IsPrimitive() && !block->globOptData.IsTypeSpecialized(varSym))
  10535. {
  10536. // Lossy conversions to int32 on non-primitive values may have implicit calls to toString or valueOf, which
  10537. // may be overridden to have a side effect. The side effect needs to happen every time the conversion is
  10538. // supposed to happen, so the resulting lossy int32 value cannot be reused. Bail out on implicit calls.
  10539. Assert(DoLossyIntTypeSpec());
  10540. bailOutKind = IR::BailOutOnNotPrimitive;
  10541. isBailout = true;
  10542. }
  10543. }
  10544. else if (!valueInfo->IsInt())
  10545. {
  10546. // The operand is likely an int (hence the request to convert to int), so bail out if it's not an int. Only
  10547. // bail out if a lossless conversion to int is requested. Lossy conversions to int such as in (a | 0) don't
  10548. // need to bail out.
  10549. if (bailOutKind == IR::BailOutExpectingInteger)
  10550. {
  10551. Assert(IsSwitchOptEnabledForIntTypeSpec());
  10552. }
  10553. else
  10554. {
  10555. Assert(DoAggressiveIntTypeSpec());
  10556. }
  10557. isBailout = true;
  10558. }
  10559. }
  10560. else if (toType == TyFloat64 &&
  10561. (!valueInfo || !valueInfo->IsNumber()))
  10562. {
  10563. // Bailout if converting vars to float if we can't prove they are floats:
  10564. // x = str + float; -> need to bailout if str is a string
  10565. //
  10566. // x = obj * 0.1;
  10567. // y = obj * 0.2; -> if obj has valueof, we'll only call valueof once on the FromVar conversion...
  10568. Assert(bailOutKind != IR::BailOutInvalid);
  10569. isBailout = true;
  10570. }
  10571. }
  10572. if (isBailout)
  10573. {
  10574. if (isInLandingPad)
  10575. {
  10576. Loop *loop = block->next->loop;
  10577. this->EnsureBailTarget(loop);
  10578. instr = loop->bailOutInfo->bailOutInstr;
  10579. updateBlockLastInstr = false;
  10580. newInstr = IR::BailOutInstr::New(opcode, bailOutKind, loop->bailOutInfo, instr->m_func);
  10581. newInstr->SetDst(regDst);
  10582. newInstr->SetSrc1(regSrc);
  10583. }
  10584. else
  10585. {
  10586. newInstr = IR::BailOutInstr::New(opcode, regDst, regSrc, bailOutKind, instr, instr->m_func);
  10587. }
  10588. }
  10589. else
  10590. {
  10591. newInstr = IR::Instr::New(opcode, regDst, regSrc, instr->m_func);
  10592. }
  10593. newInstr->SetByteCodeOffset(instr);
  10594. instr->InsertBefore(newInstr);
  10595. if (updateBlockLastInstr)
  10596. {
  10597. block->SetLastInstr(newInstr);
  10598. }
  10599. regDst->SetIsJITOptimizedReg(true);
  10600. newInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  10601. ValueInfo *const oldValueInfo = valueInfo;
  10602. if(valueInfo)
  10603. {
  10604. newInstr->GetSrc1()->SetValueType(valueInfo->Type());
  10605. }
  10606. if(isBailout)
  10607. {
  10608. Assert(opcode == Js::OpCode::FromVar);
  10609. if(toType == TyInt32)
  10610. {
  10611. Assert(valueInfo);
  10612. if(!lossy)
  10613. {
  10614. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  10615. valueInfo = valueInfo->SpecializeToInt32(alloc, isPerformingLoopBackEdgeCompensation);
  10616. ChangeValueInfo(nullptr, val, valueInfo);
  10617. int32 intConstantValue;
  10618. if(indir && needReplaceSrc && valueInfo->TryGetIntConstantValue(&intConstantValue))
  10619. {
  10620. // A likely-int value can have constant bounds due to conditional branches narrowing its range. Now that
  10621. // the sym has been proven to be an int, the likely-int value, after specialization, will be constant.
  10622. // Replace the index opnd in the indir with an offset.
  10623. Assert(opnd == indir->GetIndexOpnd());
  10624. Assert(indir->GetScale() == 0);
  10625. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  10626. opnd = nullptr;
  10627. indir->SetOffset(intConstantValue);
  10628. }
  10629. }
  10630. }
  10631. else if (toType == TyFloat64)
  10632. {
  10633. if(bailOutKind == IR::BailOutNumberOnly)
  10634. {
  10635. if(valueInfo)
  10636. {
  10637. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10638. ChangeValueInfo(block, val, valueInfo);
  10639. }
  10640. else
  10641. {
  10642. val = NewGenericValue(ValueType::Float);
  10643. valueInfo = val->GetValueInfo();
  10644. block->globOptData.SetValue(val, varSym);
  10645. }
  10646. }
  10647. }
  10648. else
  10649. {
  10650. Assert(UNREACHED);
  10651. }
  10652. }
  10653. if(valueInfo)
  10654. {
  10655. newInstr->GetDst()->SetValueType(valueInfo->Type());
  10656. if(needReplaceSrc && opnd)
  10657. {
  10658. opnd->SetValueType(valueInfo->Type());
  10659. }
  10660. }
  10661. if (block->loop)
  10662. {
  10663. Assert(!this->IsLoopPrePass());
  10664. isHoisted = this->TryHoistInvariant(newInstr, block, val, val, nullptr, false, lossy, false, bailOutKind);
  10665. }
  10666. if (isBailout)
  10667. {
  10668. if (!isHoisted && !isInLandingPad)
  10669. {
  10670. if(valueInfo)
  10671. {
  10672. // Since this is a pre-op bailout, the old value info should be used for the purposes of bailout. For
  10673. // instance, the value info could be LikelyInt but with a constant range. Once specialized to int, the value
  10674. // info would be an int constant. However, the int constant is only guaranteed if the value is actually an
  10675. // int, which this conversion is verifying, so bailout cannot assume the constant value.
  10676. if(oldValueInfo)
  10677. {
  10678. val->SetValueInfo(oldValueInfo);
  10679. }
  10680. else
  10681. {
  10682. block->globOptData.ClearSymValue(varSym);
  10683. }
  10684. }
  10685. // Fill in bail out info if the FromVar is a bailout instr, and it wasn't hoisted as invariant.
  10686. // If it was hoisted, the invariant code will fill out the bailout info with the loop landing pad bailout info.
  10687. this->FillBailOutInfo(block, newInstr);
  10688. if(valueInfo)
  10689. {
  10690. // Restore the new value info after filling the bailout info
  10691. if(oldValueInfo)
  10692. {
  10693. val->SetValueInfo(valueInfo);
  10694. }
  10695. else
  10696. {
  10697. block->globOptData.SetValue(val, varSym);
  10698. }
  10699. }
  10700. }
  10701. }
  10702. // Now that we've captured the liveness in the bailout info, we can mark this as live.
  10703. // This type specialized sym isn't live if the FromVar bails out.
  10704. livenessBv->Set(varSym->m_id);
  10705. if(toType == TyInt32)
  10706. {
  10707. if(lossy)
  10708. {
  10709. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10710. }
  10711. else
  10712. {
  10713. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10714. }
  10715. }
  10716. }
  10717. else
  10718. {
  10719. Assert(valueInfo);
  10720. if(opnd->IsRegOpnd() && valueInfo->IsInt())
  10721. {
  10722. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  10723. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  10724. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  10725. // lossy state.
  10726. block->globOptData.liveLossyInt32Syms->Clear(opnd->AsRegOpnd()->m_sym->m_id);
  10727. if(toType == TyInt32)
  10728. {
  10729. lossy = false;
  10730. }
  10731. }
  10732. if (this->IsLoopPrePass())
  10733. {
  10734. if(opnd->IsRegOpnd())
  10735. {
  10736. StackSym *const sym = opnd->AsRegOpnd()->m_sym;
  10737. if(toType == TyInt32)
  10738. {
  10739. Assert(!sym->IsTypeSpec());
  10740. block->globOptData.liveInt32Syms->Set(sym->m_id);
  10741. if(lossy)
  10742. {
  10743. block->globOptData.liveLossyInt32Syms->Set(sym->m_id);
  10744. }
  10745. else
  10746. {
  10747. block->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  10748. }
  10749. }
  10750. else
  10751. {
  10752. Assert(toType == TyFloat64);
  10753. AnalysisAssert(instr);
  10754. StackSym *const varSym = sym->IsTypeSpec() ? sym->GetVarEquivSym(instr->m_func) : sym;
  10755. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  10756. }
  10757. }
  10758. return instr;
  10759. }
  10760. if (!needReplaceSrc)
  10761. {
  10762. instr = insertBeforeInstr;
  10763. }
  10764. IR::Opnd *constOpnd;
  10765. int32 intConstantValue;
  10766. if(valueInfo->TryGetIntConstantValue(&intConstantValue))
  10767. {
  10768. if(toType == TyInt32)
  10769. {
  10770. constOpnd = IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func);
  10771. }
  10772. else
  10773. {
  10774. Assert(toType == TyFloat64);
  10775. constOpnd = IR::FloatConstOpnd::New(static_cast<FloatConstType>(intConstantValue), TyFloat64, instr->m_func);
  10776. }
  10777. }
  10778. else if(valueInfo->IsFloatConstant())
  10779. {
  10780. const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
  10781. if(toType == TyInt32)
  10782. {
  10783. Assert(lossy);
  10784. constOpnd =
  10785. IR::IntConstOpnd::New(
  10786. Js::JavascriptMath::ToInt32(floatValue),
  10787. TyInt32,
  10788. instr->m_func);
  10789. }
  10790. else
  10791. {
  10792. Assert(toType == TyFloat64);
  10793. constOpnd = IR::FloatConstOpnd::New(floatValue, TyFloat64, instr->m_func);
  10794. }
  10795. }
  10796. else
  10797. {
  10798. Assert(opnd->IsVar());
  10799. Assert(opnd->IsAddrOpnd());
  10800. AssertMsg(opnd->AsAddrOpnd()->IsVar(), "We only expect to see addr that are var before lower.");
  10801. // Don't need to capture uses, we are only replacing an addr opnd
  10802. if(toType == TyInt32)
  10803. {
  10804. constOpnd = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opnd->AsAddrOpnd()->m_address), TyInt32, instr->m_func);
  10805. }
  10806. else
  10807. {
  10808. Assert(toType == TyFloat64);
  10809. constOpnd = IR::FloatConstOpnd::New(Js::TaggedInt::ToDouble(opnd->AsAddrOpnd()->m_address), TyFloat64, instr->m_func);
  10810. }
  10811. }
  10812. if (toType == TyInt32)
  10813. {
  10814. if (needReplaceSrc)
  10815. {
  10816. CaptureByteCodeSymUses(instr);
  10817. if(indir)
  10818. {
  10819. Assert(opnd == indir->GetIndexOpnd());
  10820. Assert(indir->GetScale() == 0);
  10821. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  10822. indir->SetOffset(constOpnd->AsIntConstOpnd()->AsInt32());
  10823. }
  10824. else
  10825. {
  10826. instr->ReplaceSrc(opnd, constOpnd);
  10827. }
  10828. }
  10829. else
  10830. {
  10831. StackSym *varSym = opnd->AsRegOpnd()->m_sym;
  10832. if(varSym->IsTypeSpec())
  10833. {
  10834. varSym = varSym->GetVarEquivSym(nullptr);
  10835. Assert(varSym);
  10836. }
  10837. if(block->globOptData.liveInt32Syms->TestAndSet(varSym->m_id))
  10838. {
  10839. Assert(!!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) == lossy);
  10840. }
  10841. else
  10842. {
  10843. if(lossy)
  10844. {
  10845. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10846. }
  10847. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  10848. IR::RegOpnd *int32Reg = IR::RegOpnd::New(int32Sym, TyInt32, instr->m_func);
  10849. int32Reg->SetIsJITOptimizedReg(true);
  10850. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, int32Reg, constOpnd, instr->m_func);
  10851. newInstr->SetByteCodeOffset(instr);
  10852. instr->InsertBefore(newInstr);
  10853. if (updateBlockLastInstr)
  10854. {
  10855. block->SetLastInstr(newInstr);
  10856. }
  10857. }
  10858. }
  10859. }
  10860. else
  10861. {
  10862. StackSym *floatSym;
  10863. bool newFloatSym = false;
  10864. StackSym* varSym;
  10865. if (opnd->IsRegOpnd())
  10866. {
  10867. varSym = opnd->AsRegOpnd()->m_sym;
  10868. if (varSym->IsTypeSpec())
  10869. {
  10870. varSym = varSym->GetVarEquivSym(nullptr);
  10871. Assert(varSym);
  10872. }
  10873. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  10874. }
  10875. else
  10876. {
  10877. varSym = block->globOptData.GetCopyPropSym(nullptr, val);
  10878. if(!varSym)
  10879. {
  10880. // Clear the symstore to ensure it's set below to this new symbol
  10881. this->SetSymStoreDirect(val->GetValueInfo(), nullptr);
  10882. varSym = StackSym::New(TyVar, instr->m_func);
  10883. newFloatSym = true;
  10884. }
  10885. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  10886. }
  10887. IR::RegOpnd *floatReg = IR::RegOpnd::New(floatSym, TyFloat64, instr->m_func);
  10888. floatReg->SetIsJITOptimizedReg(true);
  10889. // If the value is not live - let's load it.
  10890. if(!block->globOptData.liveFloat64Syms->TestAndSet(varSym->m_id))
  10891. {
  10892. newInstr = IR::Instr::New(Js::OpCode::LdC_F8_R8, floatReg, constOpnd, instr->m_func);
  10893. newInstr->SetByteCodeOffset(instr);
  10894. instr->InsertBefore(newInstr);
  10895. if (updateBlockLastInstr)
  10896. {
  10897. block->SetLastInstr(newInstr);
  10898. }
  10899. if(newFloatSym)
  10900. {
  10901. block->globOptData.SetValue(val, varSym);
  10902. }
  10903. // Src is always invariant, but check if the dst is, and then hoist.
  10904. if (block->loop &&
  10905. (
  10906. (newFloatSym && block->loop->CanHoistInvariants()) ||
  10907. this->OptIsInvariant(floatReg, block, block->loop, val, false, false)
  10908. ))
  10909. {
  10910. Assert(!this->IsLoopPrePass());
  10911. this->OptHoistInvariant(newInstr, block, block->loop, val, val, nullptr, false);
  10912. }
  10913. }
  10914. if (needReplaceSrc)
  10915. {
  10916. CaptureByteCodeSymUses(instr);
  10917. instr->ReplaceSrc(opnd, floatReg);
  10918. }
  10919. }
  10920. return instr;
  10921. }
  10922. return newInstr;
  10923. }
  10924. void
  10925. GlobOpt::ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block)
  10926. {
  10927. ToVarStackSym(dst->m_sym, block);
  10928. }
  10929. void
  10930. GlobOpt::ToVarStackSym(StackSym *varSym, BasicBlock *block)
  10931. {
  10932. //added another check for sym , in case of asmjs there is mostly no var syms and hence added a new check to see if it is the primary sym
  10933. Assert(!varSym->IsTypeSpec());
  10934. block->globOptData.liveVarSyms->Set(varSym->m_id);
  10935. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  10936. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10937. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10938. }
  10939. void
  10940. GlobOpt::ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10941. {
  10942. StackSym *varSym = dst->m_sym;
  10943. Assert(!varSym->IsTypeSpec());
  10944. if (!this->IsLoopPrePass() && varSym->IsVar())
  10945. {
  10946. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  10947. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  10948. // since we'll just be hammering the symbol.
  10949. dst = instr->UnlinkDst()->AsRegOpnd();
  10950. dst->m_sym = int32Sym;
  10951. dst->SetType(TyInt32);
  10952. instr->SetDst(dst);
  10953. }
  10954. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  10955. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  10956. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10957. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10958. }
  10959. void
  10960. GlobOpt::ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10961. {
  10962. // We should be calling only for asmjs function
  10963. Assert(GetIsAsmJSFunc());
  10964. StackSym *varSym = dst->m_sym;
  10965. Assert(!varSym->IsTypeSpec());
  10966. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  10967. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  10968. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10969. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10970. }
  10971. void
  10972. GlobOpt::ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10973. {
  10974. StackSym *varSym = dst->m_sym;
  10975. Assert(!varSym->IsTypeSpec());
  10976. if (!this->IsLoopPrePass() && varSym->IsVar())
  10977. {
  10978. StackSym *float64Sym = varSym->GetFloat64EquivSym(this->func);
  10979. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  10980. // since we'll just be hammering the symbol.
  10981. dst = instr->UnlinkDst()->AsRegOpnd();
  10982. dst->m_sym = float64Sym;
  10983. dst->SetType(TyFloat64);
  10984. instr->SetDst(dst);
  10985. }
  10986. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  10987. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10988. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  10989. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10990. }
  10991. static void SetIsConstFlag(StackSym* dstSym, int64 value)
  10992. {
  10993. Assert(dstSym);
  10994. dstSym->SetIsInt64Const();
  10995. }
  10996. static void SetIsConstFlag(StackSym* dstSym, int value)
  10997. {
  10998. Assert(dstSym);
  10999. dstSym->SetIsIntConst(value);
  11000. }
  11001. static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int64 value)
  11002. {
  11003. return (IR::Opnd*)IR::Int64ConstOpnd::New(value, instr->GetDst()->GetType(), instr->m_func);
  11004. }
  11005. static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int value)
  11006. {
  11007. IntConstType constVal;
  11008. if (instr->GetDst()->IsUnsigned())
  11009. {
  11010. // we should zero extend in case of uint
  11011. constVal = (uint32)value;
  11012. }
  11013. else
  11014. {
  11015. constVal = value;
  11016. }
  11017. return (IR::Opnd*)IR::IntConstOpnd::New(constVal, instr->GetDst()->GetType(), instr->m_func);
  11018. }
  11019. template <typename T>
  11020. IR::Opnd* GlobOpt::ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal)
  11021. {
  11022. IR::Instr * &instr = *pInstr;
  11023. IR::Opnd * constOpnd = CreateIntConstOpnd(instr, value);
  11024. instr->ReplaceSrc1(constOpnd);
  11025. instr->FreeSrc2();
  11026. this->OptSrc(constOpnd, &instr);
  11027. IR::Opnd *dst = instr->GetDst();
  11028. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  11029. if (dstSym->IsSingleDef())
  11030. {
  11031. SetIsConstFlag(dstSym, value);
  11032. }
  11033. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  11034. *pDstVal = GetIntConstantValue(value, instr, dst);
  11035. return dst;
  11036. }
  11037. template <typename T>
  11038. bool GlobOpt::OptConstFoldBinaryWasm(
  11039. IR::Instr** pInstr,
  11040. const Value* src1,
  11041. const Value* src2,
  11042. Value **pDstVal)
  11043. {
  11044. IR::Instr* &instr = *pInstr;
  11045. if (!DoConstFold())
  11046. {
  11047. return false;
  11048. }
  11049. T src1IntConstantValue, src2IntConstantValue;
  11050. if (!src1 || !src1->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue, false) || //a bit sketchy: false for int32 means likelyInt = false
  11051. !src2 || !src2->GetValueInfo()->TryGetIntConstantValue(&src2IntConstantValue, false) //and unsigned = false for int64
  11052. )
  11053. {
  11054. return false;
  11055. }
  11056. int64 tmpValueOut;
  11057. if (!instr->BinaryCalculatorT<T>(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, func->GetJITFunctionBody()->IsWasmFunction()))
  11058. {
  11059. return false;
  11060. }
  11061. this->CaptureByteCodeSymUses(instr);
  11062. IR::Opnd *dst = (instr->GetDst()->IsInt64()) ? //dst can be int32 for int64 comparison operators
  11063. ReplaceWConst(pInstr, tmpValueOut, pDstVal) :
  11064. ReplaceWConst(pInstr, (int)tmpValueOut, pDstVal);
  11065. instr->m_opcode = Js::OpCode::Ld_I4;
  11066. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11067. return true;
  11068. }
  11069. bool
  11070. GlobOpt::OptConstFoldBinary(
  11071. IR::Instr * *pInstr,
  11072. const IntConstantBounds &src1IntConstantBounds,
  11073. const IntConstantBounds &src2IntConstantBounds,
  11074. Value **pDstVal)
  11075. {
  11076. IR::Instr * &instr = *pInstr;
  11077. int32 value;
  11078. IR::IntConstOpnd *constOpnd;
  11079. if (!DoConstFold())
  11080. {
  11081. return false;
  11082. }
  11083. int32 src1IntConstantValue = -1;
  11084. int32 src2IntConstantValue = -1;
  11085. int32 src1MaxIntConstantValue = -1;
  11086. int32 src2MaxIntConstantValue = -1;
  11087. int32 src1MinIntConstantValue = -1;
  11088. int32 src2MinIntConstantValue = -1;
  11089. if (instr->IsBranchInstr())
  11090. {
  11091. src1MinIntConstantValue = src1IntConstantBounds.LowerBound();
  11092. src1MaxIntConstantValue = src1IntConstantBounds.UpperBound();
  11093. src2MinIntConstantValue = src2IntConstantBounds.LowerBound();
  11094. src2MaxIntConstantValue = src2IntConstantBounds.UpperBound();
  11095. }
  11096. else if (src1IntConstantBounds.IsConstant() && src2IntConstantBounds.IsConstant())
  11097. {
  11098. src1IntConstantValue = src1IntConstantBounds.LowerBound();
  11099. src2IntConstantValue = src2IntConstantBounds.LowerBound();
  11100. }
  11101. else
  11102. {
  11103. return false;
  11104. }
  11105. IntConstType tmpValueOut;
  11106. if (!instr->BinaryCalculator(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, TyInt32)
  11107. || !Math::FitsInDWord(tmpValueOut))
  11108. {
  11109. return false;
  11110. }
  11111. value = (int32)tmpValueOut;
  11112. this->CaptureByteCodeSymUses(instr);
  11113. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  11114. instr->ReplaceSrc1(constOpnd);
  11115. instr->FreeSrc2();
  11116. this->OptSrc(constOpnd, &instr);
  11117. IR::Opnd *dst = instr->GetDst();
  11118. Assert(dst->IsRegOpnd());
  11119. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  11120. if (dstSym->IsSingleDef())
  11121. {
  11122. dstSym->SetIsIntConst(value);
  11123. }
  11124. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  11125. *pDstVal = GetIntConstantValue(value, instr, dst);
  11126. if (IsTypeSpecPhaseOff(this->func))
  11127. {
  11128. instr->m_opcode = Js::OpCode::LdC_A_I4;
  11129. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  11130. }
  11131. else
  11132. {
  11133. instr->m_opcode = Js::OpCode::Ld_I4;
  11134. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11135. }
  11136. InvalidateInductionVariables(instr);
  11137. return true;
  11138. }
  11139. void
  11140. GlobOpt::OptConstFoldBr(bool test, IR::Instr *instr, Value * src1Val, Value * src2Val)
  11141. {
  11142. GOPT_TRACE_INSTR(instr, _u("Constant folding to branch: "));
  11143. BasicBlock *deadBlock;
  11144. if (src1Val)
  11145. {
  11146. this->ToInt32(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, false);
  11147. }
  11148. if (src2Val)
  11149. {
  11150. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, false);
  11151. }
  11152. this->CaptureByteCodeSymUses(instr);
  11153. if (test)
  11154. {
  11155. instr->m_opcode = Js::OpCode::Br;
  11156. instr->FreeSrc1();
  11157. if(instr->GetSrc2())
  11158. {
  11159. instr->FreeSrc2();
  11160. }
  11161. deadBlock = instr->m_next->AsLabelInstr()->GetBasicBlock();
  11162. }
  11163. else
  11164. {
  11165. AssertMsg(instr->m_next->IsLabelInstr(), "Next instr of branch should be a label...");
  11166. if(instr->AsBranchInstr()->IsMultiBranch())
  11167. {
  11168. return;
  11169. }
  11170. deadBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  11171. instr->FreeSrc1();
  11172. if(instr->GetSrc2())
  11173. {
  11174. instr->FreeSrc2();
  11175. }
  11176. instr->m_opcode = Js::OpCode::Nop;
  11177. }
  11178. // Loop back edge: we would have already decremented data use count for the tail block when we processed the loop header.
  11179. if (!(this->currentBlock->loop && this->currentBlock->loop->GetHeadBlock() == deadBlock))
  11180. {
  11181. this->currentBlock->DecrementDataUseCount();
  11182. }
  11183. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  11184. if (deadBlock->GetPredList()->Count() == 0)
  11185. {
  11186. deadBlock->SetDataUseCount(0);
  11187. }
  11188. }
  11189. void
  11190. GlobOpt::ChangeValueType(
  11191. BasicBlock *const block,
  11192. Value *const value,
  11193. const ValueType newValueType,
  11194. const bool preserveSubclassInfo,
  11195. const bool allowIncompatibleType) const
  11196. {
  11197. Assert(value);
  11198. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  11199. Assert(!value->GetValueInfo()->IsJsType());
  11200. ValueInfo *const valueInfo = value->GetValueInfo();
  11201. const ValueType valueType(valueInfo->Type());
  11202. if(valueType == newValueType && (preserveSubclassInfo || valueInfo->IsGeneric()))
  11203. {
  11204. return;
  11205. }
  11206. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  11207. Assert(
  11208. !preserveSubclassInfo ||
  11209. !valueInfo->IsArrayValueInfo() ||
  11210. newValueType.IsObject() && newValueType.GetObjectType() == valueInfo->GetObjectType());
  11211. Assert(!valueInfo->GetSymStore() || !valueInfo->GetSymStore()->IsStackSym() || !valueInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable());
  11212. ValueInfo *const newValueInfo =
  11213. preserveSubclassInfo
  11214. ? valueInfo->Copy(alloc)
  11215. : valueInfo->CopyWithGenericStructureKind(alloc);
  11216. newValueInfo->Type() = newValueType;
  11217. ChangeValueInfo(block, value, newValueInfo, allowIncompatibleType);
  11218. }
  11219. void
  11220. GlobOpt::ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType, const bool compensated) const
  11221. {
  11222. Assert(value);
  11223. Assert(newValueInfo);
  11224. // The value type must be changed to something more specific or something more generic. For instance, it would be changed to
  11225. // something more specific if the current value type is LikelyArray and checks have been done to ensure that it's an array,
  11226. // and it would be changed to something more generic if a call kills the Array value type and it must be treated as
  11227. // LikelyArray going forward.
  11228. // There are cases where we change the type because of different profile information, and because of rejit, these profile information
  11229. // may conflict. Need to allow incompatible type in those cause. However, the old type should be indefinite.
  11230. Assert((allowIncompatibleType && !value->GetValueInfo()->IsDefinite()) ||
  11231. AreValueInfosCompatible(newValueInfo, value->GetValueInfo()));
  11232. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  11233. Assert(
  11234. !value->GetValueInfo()->IsArrayValueInfo() ||
  11235. !newValueInfo->IsArrayValueInfo() ||
  11236. newValueInfo->GetObjectType() == value->GetValueInfo()->GetObjectType());
  11237. if(block)
  11238. {
  11239. TrackValueInfoChangeForKills(block, value, newValueInfo, compensated);
  11240. }
  11241. value->SetValueInfo(newValueInfo);
  11242. }
  11243. bool
  11244. GlobOpt::AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const
  11245. {
  11246. Assert(v0);
  11247. Assert(v1);
  11248. if(v0->IsUninitialized() || v1->IsUninitialized())
  11249. {
  11250. return true;
  11251. }
  11252. const bool doAggressiveIntTypeSpec = DoAggressiveIntTypeSpec();
  11253. if(doAggressiveIntTypeSpec && (v0->IsInt() || v1->IsInt()))
  11254. {
  11255. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of
  11256. // arbitrary types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop
  11257. // prepass
  11258. return true;
  11259. }
  11260. if ((v0->Type()).IsMixedTypedArrayPair(v1->Type()) || (v1->Type()).IsMixedTypedArrayPair(v0->Type()))
  11261. {
  11262. return true;
  11263. }
  11264. const bool doFloatTypeSpec = DoFloatTypeSpec();
  11265. if(doFloatTypeSpec && (v0->IsFloat() || v1->IsFloat()))
  11266. {
  11267. // Float specialization allows specializing values of arbitrary types, even values that are definitely not float
  11268. return true;
  11269. }
  11270. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  11271. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  11272. const auto AreValueTypesCompatible = [=](const ValueType t0, const ValueType t1)
  11273. {
  11274. return
  11275. t0.IsSubsetOf(t1, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec) ||
  11276. t1.IsSubsetOf(t0, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec);
  11277. };
  11278. const ValueType t0(v0->Type().ToDefinite()), t1(v1->Type().ToDefinite());
  11279. if(t0.IsLikelyObject() && t1.IsLikelyObject())
  11280. {
  11281. // Check compatibility for the primitive portions and the object portions of the value types separately
  11282. if(AreValueTypesCompatible(t0.ToDefiniteObject(), t1.ToDefiniteObject()) &&
  11283. (
  11284. !t0.HasBeenPrimitive() ||
  11285. !t1.HasBeenPrimitive() ||
  11286. AreValueTypesCompatible(t0.ToDefinitePrimitiveSubset(), t1.ToDefinitePrimitiveSubset())
  11287. ))
  11288. {
  11289. return true;
  11290. }
  11291. }
  11292. else if(AreValueTypesCompatible(t0, t1))
  11293. {
  11294. return true;
  11295. }
  11296. const FloatConstantValueInfo *floatConstantValueInfo;
  11297. const ValueInfo *likelyIntValueinfo;
  11298. if(v0->IsFloatConstant() && v1->IsLikelyInt())
  11299. {
  11300. floatConstantValueInfo = v0->AsFloatConstant();
  11301. likelyIntValueinfo = v1;
  11302. }
  11303. else if(v0->IsLikelyInt() && v1->IsFloatConstant())
  11304. {
  11305. floatConstantValueInfo = v1->AsFloatConstant();
  11306. likelyIntValueinfo = v0;
  11307. }
  11308. else
  11309. {
  11310. return false;
  11311. }
  11312. // A float constant value with a value that is actually an int is a subset of a likely-int value.
  11313. // Ideally, we should create an int constant value for this up front, such that IsInt() also returns true. There
  11314. // were other issues with that, should see if that can be done.
  11315. int32 int32Value;
  11316. return
  11317. Js::JavascriptNumber::TryGetInt32Value(floatConstantValueInfo->FloatValue(), &int32Value) &&
  11318. (!likelyIntValueinfo->IsLikelyTaggedInt() || !Js::TaggedInt::IsOverflow(int32Value));
  11319. }
  11320. #if DBG
  11321. void
  11322. GlobOpt::VerifyArrayValueInfoForTracking(
  11323. const ValueInfo *const valueInfo,
  11324. const bool isJsArray,
  11325. const BasicBlock *const block,
  11326. const bool ignoreKnownImplicitCalls) const
  11327. {
  11328. Assert(valueInfo);
  11329. Assert(valueInfo->IsAnyOptimizedArray());
  11330. Assert(isJsArray == valueInfo->IsArrayOrObjectWithArray());
  11331. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11332. Assert(block);
  11333. Loop *implicitCallsLoop;
  11334. if(block->next && !block->next->isDeleted && block->next->isLoopHeader)
  11335. {
  11336. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  11337. // landing pad based on the loop for which this block is the landing pad.
  11338. implicitCallsLoop = block->next->loop;
  11339. Assert(implicitCallsLoop);
  11340. Assert(implicitCallsLoop->landingPad == block);
  11341. }
  11342. else
  11343. {
  11344. implicitCallsLoop = block->loop;
  11345. }
  11346. Assert(
  11347. !isJsArray ||
  11348. DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop) ||
  11349. (
  11350. ignoreKnownImplicitCalls &&
  11351. !(implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  11352. ));
  11353. Assert(!(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist()));
  11354. Assert(
  11355. !(
  11356. valueInfo->IsArrayValueInfo() &&
  11357. (
  11358. valueInfo->AsArrayValueInfo()->HeadSegmentSym() ||
  11359. valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11360. ) &&
  11361. !DoArraySegmentHoist(valueInfo->Type())
  11362. ));
  11363. #if 0
  11364. // We can't assert here that there is only a head segment length sym if hoisting is allowed in the current block,
  11365. // because we may have propagated the sym forward out of a loop, and hoisting may be allowed inside but not
  11366. // outside the loop.
  11367. Assert(
  11368. isJsArray ||
  11369. !valueInfo->IsArrayValueInfo() ||
  11370. !valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym() ||
  11371. DoTypedArraySegmentLengthHoist(implicitCallsLoop) ||
  11372. ignoreKnownImplicitCalls ||
  11373. (implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  11374. );
  11375. #endif
  11376. Assert(
  11377. !(
  11378. isJsArray &&
  11379. valueInfo->IsArrayValueInfo() &&
  11380. valueInfo->AsArrayValueInfo()->LengthSym() &&
  11381. !DoArrayLengthHoist()
  11382. ));
  11383. }
  11384. #endif
  11385. void
  11386. GlobOpt::TrackNewValueForKills(Value *const value)
  11387. {
  11388. Assert(value);
  11389. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11390. {
  11391. return;
  11392. }
  11393. DoTrackNewValueForKills(value);
  11394. }
  11395. void
  11396. GlobOpt::DoTrackNewValueForKills(Value *const value)
  11397. {
  11398. Assert(value);
  11399. ValueInfo *const valueInfo = value->GetValueInfo();
  11400. Assert(valueInfo->IsAnyOptimizedArray());
  11401. Assert(!valueInfo->IsArrayValueInfo());
  11402. // The value and value info here are new, so it's okay to modify the value info in-place
  11403. Assert(!valueInfo->GetSymStore());
  11404. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11405. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11406. const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
  11407. Loop *implicitCallsLoop;
  11408. if(currentBlock->next && !currentBlock->next->isDeleted && currentBlock->next->isLoopHeader)
  11409. {
  11410. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  11411. // landing pad based on the loop for which this block is the landing pad.
  11412. implicitCallsLoop = currentBlock->next->loop;
  11413. Assert(implicitCallsLoop);
  11414. Assert(implicitCallsLoop->landingPad == currentBlock);
  11415. }
  11416. else
  11417. {
  11418. implicitCallsLoop = currentBlock->loop;
  11419. }
  11420. if(isJsArray || isVirtualTypedArray)
  11421. {
  11422. if(!DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop))
  11423. {
  11424. // Array opts are disabled for this value type, so treat it as an indefinite value type going forward
  11425. valueInfo->Type() = valueInfo->Type().ToLikely();
  11426. return;
  11427. }
  11428. if(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  11429. {
  11430. valueInfo->Type() = valueInfo->Type().SetHasNoMissingValues(false);
  11431. }
  11432. }
  11433. #if DBG
  11434. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  11435. #endif
  11436. if(!isJsArray && !isVirtualTypedArray)
  11437. {
  11438. return;
  11439. }
  11440. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11441. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11442. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11443. // revert the value type to a likely version.
  11444. CurrentBlockData()->valuesToKillOnCalls->Add(value);
  11445. }
  11446. void
  11447. GlobOpt::TrackCopiedValueForKills(Value *const value)
  11448. {
  11449. Assert(value);
  11450. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11451. {
  11452. return;
  11453. }
  11454. DoTrackCopiedValueForKills(value);
  11455. }
  11456. void
  11457. GlobOpt::DoTrackCopiedValueForKills(Value *const value)
  11458. {
  11459. Assert(value);
  11460. ValueInfo *const valueInfo = value->GetValueInfo();
  11461. Assert(valueInfo->IsAnyOptimizedArray());
  11462. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11463. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11464. const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
  11465. #if DBG
  11466. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  11467. #endif
  11468. if(!isJsArray && !isVirtualTypedArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  11469. {
  11470. return;
  11471. }
  11472. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11473. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11474. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11475. // revert the value type to a likely version.
  11476. CurrentBlockData()->valuesToKillOnCalls->Add(value);
  11477. }
  11478. void
  11479. GlobOpt::TrackMergedValueForKills(
  11480. Value *const value,
  11481. GlobOptBlockData *const blockData,
  11482. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  11483. {
  11484. Assert(value);
  11485. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11486. {
  11487. return;
  11488. }
  11489. DoTrackMergedValueForKills(value, blockData, mergedValueTypesTrackedForKills);
  11490. }
  11491. void
  11492. GlobOpt::DoTrackMergedValueForKills(
  11493. Value *const value,
  11494. GlobOptBlockData *const blockData,
  11495. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  11496. {
  11497. Assert(value);
  11498. Assert(blockData);
  11499. ValueInfo *valueInfo = value->GetValueInfo();
  11500. Assert(valueInfo->IsAnyOptimizedArray());
  11501. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11502. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11503. const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
  11504. #if DBG
  11505. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock, true);
  11506. #endif
  11507. if(!isJsArray && !isVirtualTypedArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  11508. {
  11509. return;
  11510. }
  11511. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11512. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11513. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11514. // revert the value type to a likely version.
  11515. if(!mergedValueTypesTrackedForKills || !mergedValueTypesTrackedForKills->TestAndSet(value->GetValueNumber()))
  11516. {
  11517. blockData->valuesToKillOnCalls->Add(value);
  11518. }
  11519. }
  11520. void
  11521. GlobOpt::TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const
  11522. {
  11523. Assert(block);
  11524. Assert(value);
  11525. Assert(newValueInfo);
  11526. ValueInfo *const oldValueInfo = value->GetValueInfo();
  11527. #if DBG
  11528. if(oldValueInfo->IsAnyOptimizedArray())
  11529. {
  11530. VerifyArrayValueInfoForTracking(oldValueInfo, oldValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  11531. }
  11532. #endif
  11533. const bool trackOldValueInfo =
  11534. oldValueInfo->IsArrayOrObjectWithArray() ||
  11535. oldValueInfo->IsOptimizedVirtualTypedArray() ||
  11536. (
  11537. oldValueInfo->IsOptimizedTypedArray() &&
  11538. oldValueInfo->IsArrayValueInfo() &&
  11539. oldValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11540. );
  11541. Assert(trackOldValueInfo == block->globOptData.valuesToKillOnCalls->ContainsKey(value));
  11542. #if DBG
  11543. if(newValueInfo->IsAnyOptimizedArray())
  11544. {
  11545. VerifyArrayValueInfoForTracking(newValueInfo, newValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  11546. }
  11547. #endif
  11548. const bool trackNewValueInfo =
  11549. newValueInfo->IsArrayOrObjectWithArray() ||
  11550. newValueInfo->IsOptimizedVirtualTypedArray() ||
  11551. (
  11552. newValueInfo->IsOptimizedTypedArray() &&
  11553. newValueInfo->IsArrayValueInfo() &&
  11554. newValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11555. );
  11556. if(trackOldValueInfo == trackNewValueInfo)
  11557. {
  11558. return;
  11559. }
  11560. if(trackNewValueInfo)
  11561. {
  11562. block->globOptData.valuesToKillOnCalls->Add(value);
  11563. }
  11564. else
  11565. {
  11566. block->globOptData.valuesToKillOnCalls->Remove(value);
  11567. }
  11568. }
  11569. void
  11570. GlobOpt::ProcessValueKills(IR::Instr *const instr)
  11571. {
  11572. Assert(instr);
  11573. ValueSet *const valuesToKillOnCalls = CurrentBlockData()->valuesToKillOnCalls;
  11574. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  11575. {
  11576. return;
  11577. }
  11578. const JsArrayKills kills = CheckJsArrayKills(instr);
  11579. Assert(!kills.KillsArrayHeadSegments() || kills.KillsArrayHeadSegmentLengths());
  11580. if(IsLoopPrePass())
  11581. {
  11582. rootLoopPrePass->jsArrayKills = rootLoopPrePass->jsArrayKills.Merge(kills);
  11583. Assert(
  11584. !rootLoopPrePass->parent ||
  11585. rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  11586. if(kills.KillsAllArrays())
  11587. {
  11588. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = false;
  11589. }
  11590. if(valuesToKillOnCalls->Count() == 0)
  11591. {
  11592. return;
  11593. }
  11594. }
  11595. if(kills.KillsAllArrays())
  11596. {
  11597. Assert(kills.KillsTypedArrayHeadSegmentLengths());
  11598. // - Calls need to kill the value types of values in the following list. For instance, calls can transform a JS array
  11599. // into an ES5 array, so any definitely-array value types need to be killed. Also, VirtualTypeArrays do not have
  11600. // bounds checks; this can be problematic if the array is detached, so check to ensure that it is a virtual array.
  11601. // Update the value types to likley to ensure a bailout that asserts Array type is generated.
  11602. // - Calls also need to kill typed array head segment lengths. A typed array's array buffer may be transferred to a web
  11603. // worker, in which case the typed array's length is set to zero.
  11604. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11605. {
  11606. Value *const value = it.CurrentValue();
  11607. ValueInfo *const valueInfo = value->GetValueInfo();
  11608. Assert(
  11609. valueInfo->IsArrayOrObjectWithArray() ||
  11610. valueInfo->IsOptimizedVirtualTypedArray() ||
  11611. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11612. if (valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsOptimizedVirtualTypedArray())
  11613. {
  11614. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11615. continue;
  11616. }
  11617. ChangeValueInfo(
  11618. nullptr,
  11619. value,
  11620. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11621. }
  11622. valuesToKillOnCalls->Clear();
  11623. return;
  11624. }
  11625. if(kills.KillsArraysWithNoMissingValues())
  11626. {
  11627. // Some operations may kill arrays with no missing values in unlikely circumstances. Convert their value types to likely
  11628. // versions so that the checks have to be redone.
  11629. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11630. {
  11631. Value *const value = it.CurrentValue();
  11632. ValueInfo *const valueInfo = value->GetValueInfo();
  11633. Assert(
  11634. valueInfo->IsArrayOrObjectWithArray() ||
  11635. valueInfo->IsOptimizedVirtualTypedArray() ||
  11636. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11637. if(!valueInfo->IsArrayOrObjectWithArray() || !valueInfo->HasNoMissingValues())
  11638. {
  11639. continue;
  11640. }
  11641. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11642. it.RemoveCurrent();
  11643. }
  11644. }
  11645. if(kills.KillsNativeArrays())
  11646. {
  11647. // Some operations may kill native arrays in (what should be) unlikely circumstances. Convert their value types to
  11648. // likely versions so that the checks have to be redone.
  11649. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11650. {
  11651. Value *const value = it.CurrentValue();
  11652. ValueInfo *const valueInfo = value->GetValueInfo();
  11653. Assert(
  11654. valueInfo->IsArrayOrObjectWithArray() ||
  11655. valueInfo->IsOptimizedVirtualTypedArray() ||
  11656. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11657. if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->HasVarElements())
  11658. {
  11659. continue;
  11660. }
  11661. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11662. it.RemoveCurrent();
  11663. }
  11664. }
  11665. const bool likelyKillsJsArraysWithNoMissingValues = IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr);
  11666. if(!kills.KillsArrayHeadSegmentLengths())
  11667. {
  11668. Assert(!kills.KillsArrayHeadSegments());
  11669. if(!likelyKillsJsArraysWithNoMissingValues && !kills.KillsArrayLengths())
  11670. {
  11671. return;
  11672. }
  11673. }
  11674. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11675. {
  11676. Value *const value = it.CurrentValue();
  11677. ValueInfo *valueInfo = value->GetValueInfo();
  11678. Assert(
  11679. valueInfo->IsArrayOrObjectWithArray() ||
  11680. valueInfo->IsOptimizedVirtualTypedArray() ||
  11681. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11682. if(!valueInfo->IsArrayOrObjectWithArray())
  11683. {
  11684. continue;
  11685. }
  11686. if(likelyKillsJsArraysWithNoMissingValues && valueInfo->HasNoMissingValues())
  11687. {
  11688. ChangeValueType(nullptr, value, valueInfo->Type().SetHasNoMissingValues(false), true);
  11689. valueInfo = value->GetValueInfo();
  11690. }
  11691. if(!valueInfo->IsArrayValueInfo())
  11692. {
  11693. continue;
  11694. }
  11695. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  11696. const bool removeHeadSegment = kills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  11697. const bool removeHeadSegmentLength = kills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  11698. const bool removeLength = kills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  11699. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  11700. {
  11701. ChangeValueInfo(
  11702. nullptr,
  11703. value,
  11704. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  11705. valueInfo = value->GetValueInfo();
  11706. }
  11707. }
  11708. }
  11709. void
  11710. GlobOpt::ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData)
  11711. {
  11712. Assert(block);
  11713. Assert(blockData);
  11714. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  11715. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  11716. {
  11717. return;
  11718. }
  11719. // If the current block or loop has implicit calls, kill all definitely-array value types, as using that info will cause
  11720. // implicit calls to be disabled, resulting in unnecessary bailouts
  11721. const bool killValuesOnImplicitCalls =
  11722. (block->loop ? !this->ImplicitCallFlagsAllowOpts(block->loop) : !this->ImplicitCallFlagsAllowOpts(func));
  11723. if (!killValuesOnImplicitCalls)
  11724. {
  11725. return;
  11726. }
  11727. if(IsLoopPrePass() && block->loop == rootLoopPrePass)
  11728. {
  11729. AnalysisAssert(rootLoopPrePass);
  11730. for (Loop * loop = rootLoopPrePass; loop != nullptr; loop = loop->parent)
  11731. {
  11732. loop->jsArrayKills.SetKillsAllArrays();
  11733. }
  11734. Assert(!rootLoopPrePass->parent || rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  11735. if(valuesToKillOnCalls->Count() == 0)
  11736. {
  11737. return;
  11738. }
  11739. }
  11740. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11741. {
  11742. Value *const value = it.CurrentValue();
  11743. ValueInfo *const valueInfo = value->GetValueInfo();
  11744. Assert(
  11745. valueInfo->IsArrayOrObjectWithArray() ||
  11746. valueInfo->IsOptimizedVirtualTypedArray() ||
  11747. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11748. if(valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsOptimizedVirtualTypedArray())
  11749. {
  11750. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11751. continue;
  11752. }
  11753. ChangeValueInfo(
  11754. nullptr,
  11755. value,
  11756. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11757. }
  11758. valuesToKillOnCalls->Clear();
  11759. }
  11760. void
  11761. GlobOpt::ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData)
  11762. {
  11763. Assert(block);
  11764. Assert(block->isLoopHeader);
  11765. Assert(blockData);
  11766. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  11767. if(valuesToKillOnCalls->Count() == 0)
  11768. {
  11769. return;
  11770. }
  11771. const JsArrayKills loopKills(block->loop->jsArrayKills);
  11772. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11773. {
  11774. Value *const value = it.CurrentValue();
  11775. ValueInfo *valueInfo = value->GetValueInfo();
  11776. Assert(
  11777. valueInfo->IsArrayOrObjectWithArray() ||
  11778. valueInfo->IsOptimizedVirtualTypedArray() ||
  11779. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11780. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11781. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11782. const bool isVirtualTypedArray = valueInfo->IsOptimizedVirtualTypedArray();
  11783. if((isJsArray || isVirtualTypedArray) ? loopKills.KillsValueType(valueInfo->Type()) : loopKills.KillsTypedArrayHeadSegmentLengths())
  11784. {
  11785. // Hoisting array checks and other related things for this type is disabled for the loop due to the kill, as
  11786. // compensation code is currently not added on back-edges. When merging values from a back-edge, the array value
  11787. // type cannot be definite, as that may require adding compensation code on the back-edge if the optimization pass
  11788. // chooses to not optimize the array.
  11789. if(isJsArray || isVirtualTypedArray)
  11790. {
  11791. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11792. }
  11793. else
  11794. {
  11795. ChangeValueInfo(
  11796. nullptr,
  11797. value,
  11798. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11799. }
  11800. it.RemoveCurrent();
  11801. continue;
  11802. }
  11803. if(!isJsArray || !valueInfo->IsArrayValueInfo())
  11804. {
  11805. continue;
  11806. }
  11807. // Similarly, if the loop contains an operation that kills JS array segments, don't make the segment or other related
  11808. // syms available initially inside the loop
  11809. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  11810. const bool removeHeadSegment = loopKills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  11811. const bool removeHeadSegmentLength = loopKills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  11812. const bool removeLength = loopKills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  11813. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  11814. {
  11815. ChangeValueInfo(
  11816. nullptr,
  11817. value,
  11818. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  11819. valueInfo = value->GetValueInfo();
  11820. }
  11821. }
  11822. }
  11823. bool
  11824. GlobOpt::NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const
  11825. {
  11826. if(isForwardPass)
  11827. {
  11828. return block->globOptData.valuesToKillOnCalls->Count() != 0;
  11829. }
  11830. if(block->noImplicitCallUses->IsEmpty())
  11831. {
  11832. Assert(block->noImplicitCallNoMissingValuesUses->IsEmpty());
  11833. Assert(block->noImplicitCallNativeArrayUses->IsEmpty());
  11834. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty());
  11835. Assert(block->noImplicitCallArrayLengthSymUses->IsEmpty());
  11836. return false;
  11837. }
  11838. return true;
  11839. }
  11840. IR::Instr*
  11841. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func)
  11842. {
  11843. IR::Instr* instr = IR::Instr::New(Js::OpCode::BoundCheck, func);
  11844. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  11845. }
  11846. IR::Instr*
  11847. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func * func)
  11848. {
  11849. IR::Instr* instr = IR::BailOutInstr::New(Js::OpCode::BoundCheck, bailoutkind, bailoutInfo, func);
  11850. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  11851. }
  11852. IR::Instr*
  11853. GlobOpt::AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset)
  11854. {
  11855. instr->SetSrc1(lowerBound);
  11856. instr->SetSrc2(upperBound);
  11857. if (offset != 0)
  11858. {
  11859. instr->SetDst(IR::IntConstOpnd::New(offset, TyInt32, instr->m_func));
  11860. }
  11861. return instr;
  11862. }
  11863. void
  11864. GlobOpt::OptArraySrc(IR::Instr ** const instrRef, Value ** src1Val, Value ** src2Val)
  11865. {
  11866. Assert(instrRef != nullptr);
  11867. ArraySrcOpt arraySrcOpt(this, instrRef, src1Val, src2Val);
  11868. arraySrcOpt.Optimize();
  11869. }
  11870. void
  11871. GlobOpt::OptStackArgLenAndConst(IR::Instr* instr, Value** src1Val)
  11872. {
  11873. if (!PHASE_OFF(Js::StackArgLenConstOptPhase, instr->m_func) && instr->m_func->IsStackArgsEnabled() && instr->usesStackArgumentsObject && instr->IsInlined())
  11874. {
  11875. IR::Opnd* src1 = instr->GetSrc1();
  11876. auto replaceInstr = [&](IR::Opnd* newopnd, Js::OpCode opcode)
  11877. {
  11878. if (PHASE_TESTTRACE(Js::StackArgLenConstOptPhase, instr->m_func))
  11879. {
  11880. Output::Print(_u("Inlined function %s have replaced opcode %s with opcode %s for stack arg optimization. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(),
  11881. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode), Js::OpCodeUtil::GetOpCodeName(opcode));
  11882. Output::Flush();
  11883. }
  11884. this->CaptureByteCodeSymUses(instr);
  11885. instr->m_opcode = opcode;
  11886. instr->ReplaceSrc1(newopnd);
  11887. if (instr->HasBailOutInfo())
  11888. {
  11889. instr->ClearBailOutInfo();
  11890. }
  11891. *src1Val = this->OptSrc(instr->GetSrc1(), &instr);
  11892. instr->m_func->hasArgLenAndConstOpt = true;
  11893. };
  11894. Assert(CurrentBlockData()->IsArgumentsOpnd(src1));
  11895. switch(instr->m_opcode)
  11896. {
  11897. case Js::OpCode::LdLen_A:
  11898. {
  11899. IR::AddrOpnd* newopnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked(instr->m_func->actualCount - 1), IR::AddrOpndKindConstantVar, instr->m_func);
  11900. replaceInstr(newopnd, Js::OpCode::Ld_A);
  11901. break;
  11902. }
  11903. case Js::OpCode::LdElemI_A:
  11904. case Js::OpCode::TypeofElem:
  11905. {
  11906. IR::IndirOpnd* indirOpndSrc1 = src1->AsIndirOpnd();
  11907. if (!indirOpndSrc1->GetIndexOpnd())
  11908. {
  11909. int argIndex = indirOpndSrc1->GetOffset() + 1;
  11910. IR::Instr* defInstr = nullptr;
  11911. IR::Instr* inlineeStart = instr->m_func->GetInlineeStart();
  11912. inlineeStart->IterateArgInstrs([&](IR::Instr* argInstr) {
  11913. StackSym *argSym = argInstr->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  11914. if (argSym->GetArgSlotNum() - 1 == argIndex)
  11915. {
  11916. defInstr = argInstr;
  11917. return true;
  11918. }
  11919. return false;
  11920. });
  11921. Js::OpCode replacementOpcode;
  11922. if (instr->m_opcode == Js::OpCode::TypeofElem)
  11923. {
  11924. replacementOpcode = Js::OpCode::Typeof;
  11925. }
  11926. else
  11927. {
  11928. replacementOpcode = Js::OpCode::Ld_A;
  11929. }
  11930. // If we cannot find the right instruction. I.E. When calling arguments[2] and no arguments were passed to the func
  11931. if (defInstr == nullptr)
  11932. {
  11933. IR::Opnd * undefined = IR::AddrOpnd::New(instr->m_func->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicVar, instr->m_func, true);
  11934. undefined->SetValueType(ValueType::Undefined);
  11935. replaceInstr(undefined, replacementOpcode);
  11936. }
  11937. else
  11938. {
  11939. replaceInstr(defInstr->GetSrc1(), replacementOpcode);
  11940. }
  11941. }
  11942. else
  11943. {
  11944. instr->m_func->unoptimizableArgumentsObjReference++;
  11945. }
  11946. break;
  11947. }
  11948. }
  11949. }
  11950. }
  11951. void
  11952. GlobOpt::CaptureNoImplicitCallUses(
  11953. IR::Opnd *opnd,
  11954. const bool usesNoMissingValuesInfo,
  11955. IR::Instr *const includeCurrentInstr)
  11956. {
  11957. Assert(!IsLoopPrePass());
  11958. Assert(noImplicitCallUsesToInsert);
  11959. Assert(opnd);
  11960. // The opnd may be deleted later, so make a copy to ensure it is alive for inserting NoImplicitCallUses later
  11961. opnd = opnd->Copy(func);
  11962. if(!usesNoMissingValuesInfo)
  11963. {
  11964. const ValueType valueType(opnd->GetValueType());
  11965. if(valueType.IsArrayOrObjectWithArray() && valueType.HasNoMissingValues())
  11966. {
  11967. // Inserting NoImplicitCallUses for an opnd with a definitely-array-with-no-missing-values value type means that the
  11968. // instruction following it uses the information that the array has no missing values in some way, for instance, it
  11969. // may omit missing value checks. Based on that, the dead-store phase in turn ensures that the necessary bailouts
  11970. // are inserted to ensure that the array still has no missing values until the following instruction. Since
  11971. // 'usesNoMissingValuesInfo' is false, change the value type to indicate to the dead-store phase that the following
  11972. // instruction does not use the no-missing-values information.
  11973. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  11974. }
  11975. }
  11976. if(includeCurrentInstr)
  11977. {
  11978. IR::Instr *const noImplicitCallUses =
  11979. IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, includeCurrentInstr->m_func);
  11980. noImplicitCallUses->SetSrc1(opnd);
  11981. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  11982. includeCurrentInstr->InsertAfter(noImplicitCallUses);
  11983. return;
  11984. }
  11985. noImplicitCallUsesToInsert->Add(opnd);
  11986. }
  11987. void
  11988. GlobOpt::InsertNoImplicitCallUses(IR::Instr *const instr)
  11989. {
  11990. Assert(noImplicitCallUsesToInsert);
  11991. const int n = noImplicitCallUsesToInsert->Count();
  11992. if(n == 0)
  11993. {
  11994. return;
  11995. }
  11996. IR::Instr *const insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  11997. for(int i = 0; i < n;)
  11998. {
  11999. IR::Instr *const noImplicitCallUses = IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, instr->m_func);
  12000. noImplicitCallUses->SetSrc1(noImplicitCallUsesToInsert->Item(i));
  12001. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  12002. ++i;
  12003. if(i < n)
  12004. {
  12005. noImplicitCallUses->SetSrc2(noImplicitCallUsesToInsert->Item(i));
  12006. noImplicitCallUses->GetSrc2()->SetIsJITOptimizedReg(true);
  12007. ++i;
  12008. }
  12009. noImplicitCallUses->SetByteCodeOffset(instr);
  12010. insertBeforeInstr->InsertBefore(noImplicitCallUses);
  12011. }
  12012. noImplicitCallUsesToInsert->Clear();
  12013. }
  12014. void
  12015. GlobOpt::PrepareLoopArrayCheckHoist()
  12016. {
  12017. if(IsLoopPrePass() || !currentBlock->loop || !currentBlock->isLoopHeader || !currentBlock->loop->parent)
  12018. {
  12019. return;
  12020. }
  12021. if(currentBlock->loop->parent->needImplicitCallBailoutChecksForJsArrayCheckHoist)
  12022. {
  12023. // If the parent loop is an array check elimination candidate, so is the current loop. Even though the current loop may
  12024. // not have array accesses, if the parent loop hoists array checks, the current loop also needs implicit call checks.
  12025. currentBlock->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  12026. }
  12027. }
  12028. JsArrayKills
  12029. GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
  12030. {
  12031. Assert(instr);
  12032. JsArrayKills kills;
  12033. if(instr->UsesAllFields())
  12034. {
  12035. // Calls can (but are unlikely to) change a javascript array into an ES5 array, which may have different behavior for
  12036. // index properties.
  12037. kills.SetKillsAllArrays();
  12038. return kills;
  12039. }
  12040. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  12041. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  12042. const bool doArraySegmentHoist = DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array));
  12043. Assert(doArraySegmentHoist == DoArraySegmentHoist(ValueType::GetObject(ObjectType::ObjectWithArray)));
  12044. const bool doArrayLengthHoist = DoArrayLengthHoist();
  12045. if(!doArrayMissingValueCheckHoist && !doNativeArrayTypeSpec && !doArraySegmentHoist && !doArrayLengthHoist)
  12046. {
  12047. return kills;
  12048. }
  12049. // The following operations may create missing values in an array in an unlikely circumstance. Even though they don't kill
  12050. // the fact that the 'this' parameter is an array (when implicit calls are disabled), we don't have a way to say the value
  12051. // type is definitely array but it likely has no missing values. So, these will kill the definite value type as well, making
  12052. // it likely array, such that the array checks will have to be redone.
  12053. const bool useValueTypes = !IsLoopPrePass(); // Source value types are not guaranteed to be correct in a loop prepass
  12054. switch(instr->m_opcode)
  12055. {
  12056. case Js::OpCode::StElemI_A:
  12057. case Js::OpCode::StElemI_A_Strict:
  12058. {
  12059. Assert(instr->GetDst());
  12060. if(!instr->GetDst()->IsIndirOpnd())
  12061. {
  12062. break;
  12063. }
  12064. const ValueType baseValueType =
  12065. useValueTypes ? instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType() : ValueType::Uninitialized;
  12066. if(useValueTypes && baseValueType.IsNotArrayOrObjectWithArray())
  12067. {
  12068. break;
  12069. }
  12070. if(instr->IsProfiledInstr())
  12071. {
  12072. const Js::StElemInfo *const stElemInfo = instr->AsProfiledInstr()->u.stElemInfo;
  12073. if(doArraySegmentHoist && stElemInfo->LikelyStoresOutsideHeadSegmentBounds())
  12074. {
  12075. kills.SetKillsArrayHeadSegments();
  12076. kills.SetKillsArrayHeadSegmentLengths();
  12077. }
  12078. if(doArrayLengthHoist &&
  12079. !(useValueTypes && baseValueType.IsNotArray()) &&
  12080. stElemInfo->LikelyStoresOutsideArrayBounds())
  12081. {
  12082. kills.SetKillsArrayLengths();
  12083. }
  12084. }
  12085. break;
  12086. }
  12087. case Js::OpCode::DeleteElemI_A:
  12088. case Js::OpCode::DeleteElemIStrict_A:
  12089. Assert(instr->GetSrc1());
  12090. if(!instr->GetSrc1()->IsIndirOpnd() ||
  12091. (useValueTypes && instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsNotArrayOrObjectWithArray()))
  12092. {
  12093. break;
  12094. }
  12095. if(doArrayMissingValueCheckHoist)
  12096. {
  12097. kills.SetKillsArraysWithNoMissingValues();
  12098. }
  12099. if(doArraySegmentHoist)
  12100. {
  12101. kills.SetKillsArrayHeadSegmentLengths();
  12102. }
  12103. break;
  12104. case Js::OpCode::StFld:
  12105. case Js::OpCode::StFldStrict:
  12106. {
  12107. Assert(instr->GetDst());
  12108. if(!doArraySegmentHoist && !doArrayLengthHoist)
  12109. {
  12110. break;
  12111. }
  12112. IR::SymOpnd *const symDst = instr->GetDst()->AsSymOpnd();
  12113. if(!symDst->IsPropertySymOpnd())
  12114. {
  12115. break;
  12116. }
  12117. IR::PropertySymOpnd *const dst = symDst->AsPropertySymOpnd();
  12118. if(dst->m_sym->AsPropertySym()->m_propertyId != Js::PropertyIds::length)
  12119. {
  12120. break;
  12121. }
  12122. if(useValueTypes && dst->GetPropertyOwnerValueType().IsNotArray())
  12123. {
  12124. // Setting the 'length' property of an object that is not an array, even if it has an internal array, does
  12125. // not kill the head segment or head segment length of any arrays.
  12126. break;
  12127. }
  12128. if(doArraySegmentHoist)
  12129. {
  12130. kills.SetKillsArrayHeadSegmentLengths();
  12131. }
  12132. if(doArrayLengthHoist)
  12133. {
  12134. kills.SetKillsArrayLengths();
  12135. }
  12136. break;
  12137. }
  12138. case Js::OpCode::InlineArrayPush:
  12139. {
  12140. Assert(instr->GetSrc2());
  12141. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  12142. Assert(arrayOpnd);
  12143. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12144. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  12145. {
  12146. break;
  12147. }
  12148. if(doArrayMissingValueCheckHoist)
  12149. {
  12150. kills.SetKillsArraysWithNoMissingValues();
  12151. }
  12152. if(doArraySegmentHoist)
  12153. {
  12154. kills.SetKillsArrayHeadSegments();
  12155. kills.SetKillsArrayHeadSegmentLengths();
  12156. }
  12157. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  12158. {
  12159. kills.SetKillsArrayLengths();
  12160. }
  12161. // Don't kill NativeArray, if there is no mismatch between array's type and element's type.
  12162. if(doNativeArrayTypeSpec &&
  12163. !(useValueTypes && arrayValueType.IsNativeArray() &&
  12164. ((arrayValueType.IsLikelyNativeIntArray() && instr->GetSrc2()->IsInt32()) ||
  12165. (arrayValueType.IsLikelyNativeFloatArray() && instr->GetSrc2()->IsFloat()))
  12166. ) &&
  12167. !(useValueTypes && arrayValueType.IsNotNativeArray()))
  12168. {
  12169. kills.SetKillsNativeArrays();
  12170. }
  12171. break;
  12172. }
  12173. case Js::OpCode::InlineArrayPop:
  12174. {
  12175. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  12176. Assert(arrayOpnd);
  12177. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12178. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  12179. {
  12180. break;
  12181. }
  12182. if(doArraySegmentHoist)
  12183. {
  12184. kills.SetKillsArrayHeadSegmentLengths();
  12185. }
  12186. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  12187. {
  12188. kills.SetKillsArrayLengths();
  12189. }
  12190. break;
  12191. }
  12192. case Js::OpCode::CallDirect:
  12193. {
  12194. Assert(instr->GetSrc1());
  12195. // Find the 'this' parameter and check if it's possible for it to be an array
  12196. IR::Opnd *const arrayOpnd = instr->FindCallArgumentOpnd(1);
  12197. Assert(arrayOpnd);
  12198. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12199. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  12200. {
  12201. break;
  12202. }
  12203. const IR::JnHelperMethod helperMethod = instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  12204. if(doArrayMissingValueCheckHoist)
  12205. {
  12206. switch(helperMethod)
  12207. {
  12208. case IR::HelperArray_Reverse:
  12209. case IR::HelperArray_Shift:
  12210. case IR::HelperArray_Splice:
  12211. case IR::HelperArray_Unshift:
  12212. kills.SetKillsArraysWithNoMissingValues();
  12213. break;
  12214. }
  12215. }
  12216. if(doArraySegmentHoist)
  12217. {
  12218. switch(helperMethod)
  12219. {
  12220. case IR::HelperArray_Reverse:
  12221. case IR::HelperArray_Shift:
  12222. case IR::HelperArray_Splice:
  12223. case IR::HelperArray_Unshift:
  12224. kills.SetKillsArrayHeadSegments();
  12225. kills.SetKillsArrayHeadSegmentLengths();
  12226. break;
  12227. }
  12228. }
  12229. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  12230. {
  12231. switch(helperMethod)
  12232. {
  12233. case IR::HelperArray_Shift:
  12234. case IR::HelperArray_Splice:
  12235. case IR::HelperArray_Unshift:
  12236. kills.SetKillsArrayLengths();
  12237. break;
  12238. }
  12239. }
  12240. if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
  12241. {
  12242. switch(helperMethod)
  12243. {
  12244. case IR::HelperArray_Reverse:
  12245. case IR::HelperArray_Shift:
  12246. case IR::HelperArray_Slice:
  12247. // Currently not inlined.
  12248. //case IR::HelperArray_Sort:
  12249. case IR::HelperArray_Splice:
  12250. case IR::HelperArray_Unshift:
  12251. kills.SetKillsNativeArrays();
  12252. break;
  12253. }
  12254. }
  12255. break;
  12256. }
  12257. case Js::OpCode::InitProto:
  12258. {
  12259. // Find the 'this' parameter and check if it's possible for it to be an array
  12260. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  12261. Assert(arrayOpnd);
  12262. const ValueType arrayValueType(arrayOpnd->GetValueType());
  12263. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  12264. {
  12265. break;
  12266. }
  12267. if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
  12268. {
  12269. kills.SetKillsNativeArrays();
  12270. }
  12271. break;
  12272. }
  12273. case Js::OpCode::InitClass:
  12274. Assert(instr->GetSrc1());
  12275. if (instr->GetSrc2() == nullptr)
  12276. {
  12277. // No extends operand, so the InitClass will not make something into a prototype
  12278. break;
  12279. }
  12280. if(doNativeArrayTypeSpec)
  12281. {
  12282. // Class/object construction can make something a prototype
  12283. kills.SetKillsNativeArrays();
  12284. }
  12285. break;
  12286. case Js::OpCode::NewScObjectNoCtor:
  12287. if(doNativeArrayTypeSpec)
  12288. {
  12289. // Class/object construction can make something a prototype
  12290. kills.SetKillsNativeArrays();
  12291. }
  12292. break;
  12293. }
  12294. return kills;
  12295. }
  12296. GlobOptBlockData const * GlobOpt::CurrentBlockData() const
  12297. {
  12298. return &this->currentBlock->globOptData;
  12299. }
  12300. GlobOptBlockData * GlobOpt::CurrentBlockData()
  12301. {
  12302. return &this->currentBlock->globOptData;
  12303. }
  12304. void GlobOpt::CommitCapturedValuesCandidate()
  12305. {
  12306. GlobOptBlockData * globOptData = CurrentBlockData();
  12307. globOptData->changedSyms->ClearAll();
  12308. if (!this->changedSymsAfterIncBailoutCandidate->IsEmpty())
  12309. {
  12310. //
  12311. // some symbols are changed after the values for current bailout have been
  12312. // captured (GlobOpt::CapturedValues), need to restore such symbols as changed
  12313. // for following incremental bailout construction, or we will miss capturing
  12314. // values for later bailout
  12315. //
  12316. // swap changedSyms and changedSymsAfterIncBailoutCandidate
  12317. // because both are from this->alloc
  12318. BVSparse<JitArenaAllocator> * tempBvSwap = globOptData->changedSyms;
  12319. globOptData->changedSyms = this->changedSymsAfterIncBailoutCandidate;
  12320. this->changedSymsAfterIncBailoutCandidate = tempBvSwap;
  12321. }
  12322. if (globOptData->capturedValues)
  12323. {
  12324. globOptData->capturedValues->DecrementRefCount();
  12325. }
  12326. globOptData->capturedValues = globOptData->capturedValuesCandidate;
  12327. // null out capturedValuesCandidate to stop tracking symbols change for it
  12328. globOptData->capturedValuesCandidate = nullptr;
  12329. }
  12330. bool
  12331. GlobOpt::IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr)
  12332. {
  12333. // StElem is profiled with information indicating whether it will likely create a missing value in the array. In that case,
  12334. // we prefer to kill the no-missing-values information in the value so that we don't bail out in a likely circumstance.
  12335. return
  12336. (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  12337. DoArrayMissingValueCheckHoist() &&
  12338. instr->IsProfiledInstr() &&
  12339. instr->AsProfiledInstr()->u.stElemInfo->LikelyCreatesMissingValue();
  12340. }
  12341. bool
  12342. GlobOpt::NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const
  12343. {
  12344. Assert(block);
  12345. return isForwardPass && block->loop && block->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist;
  12346. }
  12347. bool
  12348. GlobOpt::PrepareForIgnoringIntOverflow(IR::Instr *const instr)
  12349. {
  12350. Assert(instr);
  12351. const bool isBoundary = instr->m_opcode == Js::OpCode::NoIntOverflowBoundary;
  12352. // Update the instruction's "int overflow matters" flag based on whether we are currently allowing ignoring int overflows.
  12353. // Some operations convert their srcs to int32s, those can still ignore int overflow.
  12354. if(instr->ignoreIntOverflowInRange)
  12355. {
  12356. instr->ignoreIntOverflowInRange = !intOverflowCurrentlyMattersInRange || OpCodeAttr::IsInt32(instr->m_opcode);
  12357. }
  12358. if(!intOverflowDoesNotMatterRange)
  12359. {
  12360. Assert(intOverflowCurrentlyMattersInRange);
  12361. // There are no more ranges of instructions where int overflow does not matter, in this block.
  12362. return isBoundary;
  12363. }
  12364. if(instr == intOverflowDoesNotMatterRange->LastInstr())
  12365. {
  12366. Assert(isBoundary);
  12367. // Reached the last instruction in the range
  12368. intOverflowCurrentlyMattersInRange = true;
  12369. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  12370. return isBoundary;
  12371. }
  12372. if(!intOverflowCurrentlyMattersInRange)
  12373. {
  12374. return isBoundary;
  12375. }
  12376. if(instr != intOverflowDoesNotMatterRange->FirstInstr())
  12377. {
  12378. // Have not reached the next range
  12379. return isBoundary;
  12380. }
  12381. Assert(isBoundary);
  12382. // This is the first instruction in a range of instructions where int overflow does not matter. There can be many inputs to
  12383. // instructions in the range, some of which are inputs to the range itself (that is, the values are not defined in the
  12384. // range). Ignoring int overflow is only valid for int operations, so we need to ensure that all inputs to the range are
  12385. // int (not "likely int") before ignoring any overflows in the range. Ensuring that a sym with a "likely int" value is an
  12386. // int requires a bail-out. These bail-out check need to happen before any overflows are ignored, otherwise it's too late.
  12387. // The backward pass tracked all inputs into the range. Iterate over them and verify the values, and insert lossless
  12388. // conversions to int as necessary, before the first instruction in the range. If for any reason all values cannot be
  12389. // guaranteed to be ints, the optimization will be disabled for this range.
  12390. intOverflowCurrentlyMattersInRange = false;
  12391. {
  12392. BVSparse<JitArenaAllocator> tempBv1(tempAlloc);
  12393. BVSparse<JitArenaAllocator> tempBv2(tempAlloc);
  12394. {
  12395. // Just renaming the temp BVs for this section to indicate how they're used so that it makes sense
  12396. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  12397. BVSparse<JitArenaAllocator> &symsToInclude = tempBv2;
  12398. #if DBG_DUMP
  12399. SymID couldNotConvertSymId = 0;
  12400. #endif
  12401. FOREACH_BITSET_IN_SPARSEBV(id, intOverflowDoesNotMatterRange->SymsRequiredToBeInt())
  12402. {
  12403. Sym *const sym = func->m_symTable->Find(id);
  12404. Assert(sym);
  12405. // Some instructions with property syms are also tracked by the backward pass, and may be included in the range
  12406. // (LdSlot for instance). These property syms don't get their values until either copy-prop resolves a value for
  12407. // them, or a new value is created once the use of the property sym is reached. In either case, we're not that
  12408. // far yet, so we need to find the future value of the property sym by evaluating copy-prop in reverse.
  12409. Value *const value = sym->IsStackSym() ? CurrentBlockData()->FindValue(sym) : CurrentBlockData()->FindFuturePropertyValue(sym->AsPropertySym());
  12410. if(!value)
  12411. {
  12412. #if DBG_DUMP
  12413. couldNotConvertSymId = id;
  12414. #endif
  12415. intOverflowCurrentlyMattersInRange = true;
  12416. BREAK_BITSET_IN_SPARSEBV;
  12417. }
  12418. const bool isInt32OrUInt32Float =
  12419. value->GetValueInfo()->IsFloatConstant() &&
  12420. Js::JavascriptNumber::IsInt32OrUInt32(value->GetValueInfo()->AsFloatConstant()->FloatValue());
  12421. if(value->GetValueInfo()->IsInt() || isInt32OrUInt32Float)
  12422. {
  12423. if(!IsLoopPrePass())
  12424. {
  12425. // Input values that are already int can be excluded from int-specialization. We can treat unsigned
  12426. // int32 values as int32 values (ignoring the overflow), since the values will only be used inside the
  12427. // range where overflow does not matter.
  12428. symsToExclude.Set(sym->m_id);
  12429. }
  12430. continue;
  12431. }
  12432. if(!DoAggressiveIntTypeSpec() || !value->GetValueInfo()->IsLikelyInt())
  12433. {
  12434. // When aggressive int specialization is off, syms with "likely int" values cannot be forced to int since
  12435. // int bail-out checks are not allowed in that mode. Similarly, with aggressive int specialization on, it
  12436. // wouldn't make sense to force non-"likely int" values to int since it would almost guarantee a bail-out at
  12437. // runtime. In both cases, just disable ignoring overflow for this range.
  12438. #if DBG_DUMP
  12439. couldNotConvertSymId = id;
  12440. #endif
  12441. intOverflowCurrentlyMattersInRange = true;
  12442. BREAK_BITSET_IN_SPARSEBV;
  12443. }
  12444. if(IsLoopPrePass())
  12445. {
  12446. // The loop prepass does not modify bit-vectors. Since it doesn't add bail-out checks, it also does not need
  12447. // to specialize anything up-front. It only needs to be consistent in how it determines whether to allow
  12448. // ignoring overflow for a range, based on the values of inputs into the range.
  12449. continue;
  12450. }
  12451. // Since input syms are tracked in the backward pass, where there is no value tracking, it will not be aware of
  12452. // copy-prop. If a copy-prop sym is available, it will be used instead, so exclude the original sym and include
  12453. // the copy-prop sym for specialization.
  12454. StackSym *const copyPropSym = CurrentBlockData()->GetCopyPropSym(sym, value);
  12455. if(copyPropSym)
  12456. {
  12457. symsToExclude.Set(sym->m_id);
  12458. Assert(!symsToExclude.Test(copyPropSym->m_id));
  12459. const bool needsToBeLossless =
  12460. !intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(sym->m_id);
  12461. if(intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(copyPropSym->m_id) ||
  12462. symsToInclude.TestAndSet(copyPropSym->m_id))
  12463. {
  12464. // The copy-prop sym is already included
  12465. if(needsToBeLossless)
  12466. {
  12467. // The original sym needs to be lossless, so make the copy-prop sym lossless as well.
  12468. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(copyPropSym->m_id);
  12469. }
  12470. }
  12471. else if(!needsToBeLossless)
  12472. {
  12473. // The copy-prop sym was not included before, and the original sym can be lossy, so make it lossy.
  12474. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(copyPropSym->m_id);
  12475. }
  12476. }
  12477. else if(!sym->IsStackSym())
  12478. {
  12479. // Only stack syms can be converted to int, and copy-prop syms are stack syms. If a copy-prop sym was not
  12480. // found for the property sym, we can't ignore overflows in this range.
  12481. #if DBG_DUMP
  12482. couldNotConvertSymId = id;
  12483. #endif
  12484. intOverflowCurrentlyMattersInRange = true;
  12485. BREAK_BITSET_IN_SPARSEBV;
  12486. }
  12487. } NEXT_BITSET_IN_SPARSEBV;
  12488. if(intOverflowCurrentlyMattersInRange)
  12489. {
  12490. #if DBG_DUMP
  12491. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func) && !IsLoopPrePass())
  12492. {
  12493. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12494. Output::Print(
  12495. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u, Disabled ignoring overflows\n"),
  12496. func->GetJITFunctionBody()->GetDisplayName(),
  12497. func->GetDebugNumberSet(debugStringBuffer),
  12498. Js::PhaseNames[Js::ForwardPhase],
  12499. currentBlock->GetBlockNum());
  12500. Output::Print(_u(" Input sym could not be turned into an int: %u\n"), couldNotConvertSymId);
  12501. Output::Print(_u(" First instr: "));
  12502. instr->m_next->Dump();
  12503. Output::Flush();
  12504. }
  12505. #endif
  12506. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  12507. return isBoundary;
  12508. }
  12509. if(IsLoopPrePass())
  12510. {
  12511. return isBoundary;
  12512. }
  12513. // Update the syms to specialize after enumeration
  12514. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  12515. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  12516. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Or(&symsToInclude);
  12517. }
  12518. {
  12519. // Exclude syms that are already live as lossless int32, and exclude lossy conversions of syms that are already live
  12520. // as lossy int32.
  12521. // symsToExclude = liveInt32Syms - liveLossyInt32Syms // syms live as lossless int
  12522. // lossySymsToExclude = symsRequiredToBeLossyInt & liveLossyInt32Syms; // syms we want as lossy int that are already live as lossy int
  12523. // symsToExclude |= lossySymsToExclude
  12524. // symsRequiredToBeInt -= symsToExclude
  12525. // symsRequiredToBeLossyInt -= symsToExclude
  12526. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  12527. BVSparse<JitArenaAllocator> &lossySymsToExclude = tempBv2;
  12528. symsToExclude.Minus(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveLossyInt32Syms);
  12529. lossySymsToExclude.And(
  12530. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(),
  12531. CurrentBlockData()->liveLossyInt32Syms);
  12532. symsToExclude.Or(&lossySymsToExclude);
  12533. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  12534. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  12535. }
  12536. #if DBG
  12537. {
  12538. // Verify that the syms to be converted are live
  12539. // liveSyms = liveInt32Syms | liveFloat64Syms | liveVarSyms
  12540. // deadSymsRequiredToBeInt = symsRequiredToBeInt - liveSyms
  12541. BVSparse<JitArenaAllocator> &liveSyms = tempBv1;
  12542. BVSparse<JitArenaAllocator> &deadSymsRequiredToBeInt = tempBv2;
  12543. liveSyms.Or(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveFloat64Syms);
  12544. liveSyms.Or(CurrentBlockData()->liveVarSyms);
  12545. deadSymsRequiredToBeInt.Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), &liveSyms);
  12546. Assert(deadSymsRequiredToBeInt.IsEmpty());
  12547. }
  12548. #endif
  12549. }
  12550. // Int-specialize the syms before the first instruction of the range (the current instruction)
  12551. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt());
  12552. #if DBG_DUMP
  12553. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  12554. {
  12555. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12556. Output::Print(
  12557. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  12558. func->GetJITFunctionBody()->GetDisplayName(),
  12559. func->GetDebugNumberSet(debugStringBuffer),
  12560. Js::PhaseNames[Js::ForwardPhase],
  12561. currentBlock->GetBlockNum());
  12562. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  12563. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Dump();
  12564. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  12565. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  12566. Output::Print(_u(" First instr: "));
  12567. instr->m_next->Dump();
  12568. Output::Flush();
  12569. }
  12570. #endif
  12571. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), currentBlock, false /* lossy */, instr);
  12572. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(), currentBlock, true /* lossy */, instr);
  12573. return isBoundary;
  12574. }
  12575. void
  12576. GlobOpt::VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr)
  12577. {
  12578. if(intOverflowCurrentlyMattersInRange || IsLoopPrePass())
  12579. {
  12580. return;
  12581. }
  12582. Assert(instr->m_opcode != Js::OpCode::Mul_I4 ||
  12583. (instr->m_opcode == Js::OpCode::Mul_I4 && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow() ));
  12584. // Instructions that are marked as "overflow doesn't matter" in the range must guarantee that they operate on int values and
  12585. // result in int values, for ignoring overflow to be valid. So, int-specialization is required for such instructions in the
  12586. // range. Ld_A is an exception because it only specializes if the src sym is available as a required specialized sym, and it
  12587. // doesn't generate bailouts or cause ignoring int overflow to be invalid.
  12588. // MULs are allowed to start a region and have BailOutInfo since they will bailout on non-32 bit overflow.
  12589. if(instr->m_opcode == Js::OpCode::Ld_A ||
  12590. ((!instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::Mul_I4) &&
  12591. (!instr->GetDst() || instr->GetDst()->IsInt32()) &&
  12592. (!instr->GetSrc1() || instr->GetSrc1()->IsInt32()) &&
  12593. (!instr->GetSrc2() || instr->GetSrc2()->IsInt32())))
  12594. {
  12595. return;
  12596. }
  12597. if (!instr->HasBailOutInfo() && !instr->HasAnySideEffects())
  12598. {
  12599. return;
  12600. }
  12601. // This can happen for Neg_A if it needs to bail out on negative zero, and perhaps other cases as well. It's too late to fix
  12602. // the problem (overflows may already be ignored), so handle it by bailing out at compile-time and disabling tracking int
  12603. // overflow.
  12604. Assert(!func->IsTrackCompoundedIntOverflowDisabled());
  12605. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12606. {
  12607. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12608. Output::Print(
  12609. _u("BailOut (compile-time): function: %s (%s) instr: "),
  12610. func->GetJITFunctionBody()->GetDisplayName(),
  12611. func->GetDebugNumberSet(debugStringBuffer));
  12612. #if DBG_DUMP
  12613. instr->Dump();
  12614. #else
  12615. Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  12616. #endif
  12617. Output::Print(_u("(overflow does not matter but could not int-spec or needed bailout)\n"));
  12618. Output::Flush();
  12619. }
  12620. if(func->IsTrackCompoundedIntOverflowDisabled())
  12621. {
  12622. // Tracking int overflows is already off for some reason. Prevent trying to rejit again because it won't help and the
  12623. // same thing will happen again and cause an infinite loop. Just abort jitting this function.
  12624. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12625. {
  12626. Output::Print(_u(" Aborting JIT because TrackIntOverflow is already off\n"));
  12627. Output::Flush();
  12628. }
  12629. throw Js::OperationAbortedException();
  12630. }
  12631. throw Js::RejitException(RejitReason::TrackIntOverflowDisabled);
  12632. }
  12633. // It makes lowering easier if it can assume that the first src is never a constant,
  12634. // at least for commutative operators. For non-commutative, just hoist the constant.
  12635. void
  12636. GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val)
  12637. {
  12638. IR::Opnd *dst = instr->GetDst();
  12639. IR::Opnd *src1 = instr->GetSrc1();
  12640. IR::Opnd *src2 = instr->GetSrc2();
  12641. if (src1->IsImmediateOpnd())
  12642. {
  12643. // Swap for dst, src
  12644. }
  12645. else if (src2 && dst && src2->IsRegOpnd())
  12646. {
  12647. if (src2->GetIsDead() && !src1->GetIsDead() && !src1->IsEqual(dst))
  12648. {
  12649. // Swap if src2 is dead, as the reg can be reuse for the dst for opEqs like on x86 (ADD r1, r2)
  12650. }
  12651. else if (src2->IsEqual(dst))
  12652. {
  12653. // Helps lowering of opEqs
  12654. }
  12655. else
  12656. {
  12657. return;
  12658. }
  12659. // Make sure we don't swap 2 srcs with valueOf calls.
  12660. if (OpCodeAttr::OpndHasImplicitCall(instr->m_opcode))
  12661. {
  12662. if (instr->IsBranchInstr())
  12663. {
  12664. if (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive())
  12665. {
  12666. return;
  12667. }
  12668. }
  12669. else if (!src1->GetValueType().IsPrimitive() && !src2->GetValueType().IsPrimitive())
  12670. {
  12671. return;
  12672. }
  12673. }
  12674. }
  12675. else
  12676. {
  12677. return;
  12678. }
  12679. Js::OpCode opcode = instr->m_opcode;
  12680. switch (opcode)
  12681. {
  12682. case Js::OpCode::And_A:
  12683. case Js::OpCode::Mul_A:
  12684. case Js::OpCode::Or_A:
  12685. case Js::OpCode::Xor_A:
  12686. case Js::OpCode::And_I4:
  12687. case Js::OpCode::Mul_I4:
  12688. case Js::OpCode::Or_I4:
  12689. case Js::OpCode::Xor_I4:
  12690. case Js::OpCode::Add_I4:
  12691. swap_srcs:
  12692. if (!instr->GetSrc2()->IsImmediateOpnd())
  12693. {
  12694. instr->m_opcode = opcode;
  12695. instr->SwapOpnds();
  12696. Value *tempVal = *pSrc1Val;
  12697. *pSrc1Val = *pSrc2Val;
  12698. *pSrc2Val = tempVal;
  12699. return;
  12700. }
  12701. break;
  12702. case Js::OpCode::BrSrEq_A:
  12703. case Js::OpCode::BrSrNotNeq_A:
  12704. case Js::OpCode::BrEq_I4:
  12705. goto swap_srcs;
  12706. case Js::OpCode::BrSrNeq_A:
  12707. case Js::OpCode::BrNeq_A:
  12708. case Js::OpCode::BrSrNotEq_A:
  12709. case Js::OpCode::BrNotEq_A:
  12710. case Js::OpCode::BrNeq_I4:
  12711. goto swap_srcs;
  12712. case Js::OpCode::BrGe_A:
  12713. opcode = Js::OpCode::BrLe_A;
  12714. goto swap_srcs;
  12715. case Js::OpCode::BrNotGe_A:
  12716. opcode = Js::OpCode::BrNotLe_A;
  12717. goto swap_srcs;
  12718. case Js::OpCode::BrGe_I4:
  12719. opcode = Js::OpCode::BrLe_I4;
  12720. goto swap_srcs;
  12721. case Js::OpCode::BrGt_A:
  12722. opcode = Js::OpCode::BrLt_A;
  12723. goto swap_srcs;
  12724. case Js::OpCode::BrNotGt_A:
  12725. opcode = Js::OpCode::BrNotLt_A;
  12726. goto swap_srcs;
  12727. case Js::OpCode::BrGt_I4:
  12728. opcode = Js::OpCode::BrLt_I4;
  12729. goto swap_srcs;
  12730. case Js::OpCode::BrLe_A:
  12731. opcode = Js::OpCode::BrGe_A;
  12732. goto swap_srcs;
  12733. case Js::OpCode::BrNotLe_A:
  12734. opcode = Js::OpCode::BrNotGe_A;
  12735. goto swap_srcs;
  12736. case Js::OpCode::BrLe_I4:
  12737. opcode = Js::OpCode::BrGe_I4;
  12738. goto swap_srcs;
  12739. case Js::OpCode::BrLt_A:
  12740. opcode = Js::OpCode::BrGt_A;
  12741. goto swap_srcs;
  12742. case Js::OpCode::BrNotLt_A:
  12743. opcode = Js::OpCode::BrNotGt_A;
  12744. goto swap_srcs;
  12745. case Js::OpCode::BrLt_I4:
  12746. opcode = Js::OpCode::BrGt_I4;
  12747. goto swap_srcs;
  12748. case Js::OpCode::BrEq_A:
  12749. case Js::OpCode::BrNotNeq_A:
  12750. case Js::OpCode::CmEq_A:
  12751. case Js::OpCode::CmNeq_A:
  12752. // this == "" not the same as "" == this...
  12753. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  12754. {
  12755. return;
  12756. }
  12757. goto swap_srcs;
  12758. case Js::OpCode::CmGe_A:
  12759. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  12760. {
  12761. return;
  12762. }
  12763. opcode = Js::OpCode::CmLe_A;
  12764. goto swap_srcs;
  12765. case Js::OpCode::CmGt_A:
  12766. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  12767. {
  12768. return;
  12769. }
  12770. opcode = Js::OpCode::CmLt_A;
  12771. goto swap_srcs;
  12772. case Js::OpCode::CmLe_A:
  12773. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  12774. {
  12775. return;
  12776. }
  12777. opcode = Js::OpCode::CmGe_A;
  12778. goto swap_srcs;
  12779. case Js::OpCode::CmLt_A:
  12780. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  12781. {
  12782. return;
  12783. }
  12784. opcode = Js::OpCode::CmGt_A;
  12785. goto swap_srcs;
  12786. case Js::OpCode::CallI:
  12787. case Js::OpCode::CallIFixed:
  12788. case Js::OpCode::NewScObject:
  12789. case Js::OpCode::NewScObjectSpread:
  12790. case Js::OpCode::NewScObjArray:
  12791. case Js::OpCode::NewScObjArraySpread:
  12792. case Js::OpCode::NewScObjectNoCtor:
  12793. // Don't insert load to register if the function operand is a fixed function.
  12794. if (instr->HasFixedFunctionAddressTarget())
  12795. {
  12796. return;
  12797. }
  12798. break;
  12799. // Can't do add because <32 + "Hello"> isn't equal to <"Hello" + 32>
  12800. // Lower can do the swap. Other op-codes listed below don't need immediate source hoisting, as the fast paths handle it,
  12801. // or the lowering handles the hoisting.
  12802. case Js::OpCode::Add_A:
  12803. if (src1->IsFloat())
  12804. {
  12805. goto swap_srcs;
  12806. }
  12807. return;
  12808. case Js::OpCode::Sub_I4:
  12809. case Js::OpCode::Neg_I4:
  12810. case Js::OpCode::Not_I4:
  12811. case Js::OpCode::NewScFunc:
  12812. case Js::OpCode::NewScGenFunc:
  12813. case Js::OpCode::NewScFuncHomeObj:
  12814. case Js::OpCode::NewScGenFuncHomeObj:
  12815. case Js::OpCode::NewScArray:
  12816. case Js::OpCode::NewScIntArray:
  12817. case Js::OpCode::NewScFltArray:
  12818. case Js::OpCode::NewScArrayWithMissingValues:
  12819. case Js::OpCode::NewRegEx:
  12820. case Js::OpCode::Ld_A:
  12821. case Js::OpCode::Ld_I4:
  12822. case Js::OpCode::ThrowRuntimeError:
  12823. case Js::OpCode::TrapIfMinIntOverNegOne:
  12824. case Js::OpCode::TrapIfTruncOverflow:
  12825. case Js::OpCode::TrapIfZero:
  12826. case Js::OpCode::TrapIfUnalignedAccess:
  12827. case Js::OpCode::FromVar:
  12828. case Js::OpCode::Conv_Prim:
  12829. case Js::OpCode::Conv_Prim_Sat:
  12830. case Js::OpCode::LdC_A_I4:
  12831. case Js::OpCode::LdStr:
  12832. case Js::OpCode::InitFld:
  12833. case Js::OpCode::InitRootFld:
  12834. case Js::OpCode::StartCall:
  12835. case Js::OpCode::ArgOut_A:
  12836. case Js::OpCode::ArgOut_A_Inline:
  12837. case Js::OpCode::ArgOut_A_Dynamic:
  12838. case Js::OpCode::ArgOut_A_FromStackArgs:
  12839. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  12840. case Js::OpCode::ArgOut_A_InlineSpecialized:
  12841. case Js::OpCode::ArgOut_A_SpreadArg:
  12842. case Js::OpCode::InlineeEnd:
  12843. case Js::OpCode::EndCallForPolymorphicInlinee:
  12844. case Js::OpCode::InlineeMetaArg:
  12845. case Js::OpCode::InlineBuiltInEnd:
  12846. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  12847. case Js::OpCode::CallHelper:
  12848. case Js::OpCode::LdElemUndef:
  12849. case Js::OpCode::LdElemUndefScoped:
  12850. case Js::OpCode::RuntimeTypeError:
  12851. case Js::OpCode::RuntimeReferenceError:
  12852. case Js::OpCode::Ret:
  12853. case Js::OpCode::NewScObjectSimple:
  12854. case Js::OpCode::NewScObjectLiteral:
  12855. case Js::OpCode::StFld:
  12856. case Js::OpCode::StRootFld:
  12857. case Js::OpCode::StSlot:
  12858. case Js::OpCode::StSlotChkUndecl:
  12859. case Js::OpCode::StElemC:
  12860. case Js::OpCode::StArrSegElemC:
  12861. case Js::OpCode::StElemI_A:
  12862. case Js::OpCode::StElemI_A_Strict:
  12863. case Js::OpCode::CallDirect:
  12864. case Js::OpCode::BrNotHasSideEffects:
  12865. case Js::OpCode::NewConcatStrMulti:
  12866. case Js::OpCode::NewConcatStrMultiBE:
  12867. case Js::OpCode::ExtendArg_A:
  12868. #ifdef ENABLE_DOM_FAST_PATH
  12869. case Js::OpCode::DOMFastPathGetter:
  12870. case Js::OpCode::DOMFastPathSetter:
  12871. #endif
  12872. case Js::OpCode::NewScopeSlots:
  12873. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  12874. case Js::OpCode::NewStackScopeSlots:
  12875. case Js::OpCode::IsInst:
  12876. case Js::OpCode::BailOnEqual:
  12877. case Js::OpCode::BailOnNotEqual:
  12878. case Js::OpCode::StArrViewElem:
  12879. return;
  12880. }
  12881. if (!src1->IsImmediateOpnd())
  12882. {
  12883. return;
  12884. }
  12885. // The fast paths or lowering of the remaining instructions may not support handling immediate opnds for the first src. The
  12886. // immediate src1 is hoisted here into a separate instruction.
  12887. if (src1->IsIntConstOpnd())
  12888. {
  12889. IR::Instr *newInstr = instr->HoistSrc1(Js::OpCode::Ld_I4);
  12890. ToInt32Dst(newInstr, newInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  12891. }
  12892. else if (src1->IsInt64ConstOpnd())
  12893. {
  12894. instr->HoistSrc1(Js::OpCode::Ld_I4);
  12895. }
  12896. else
  12897. {
  12898. instr->HoistSrc1(Js::OpCode::Ld_A);
  12899. }
  12900. src1 = instr->GetSrc1();
  12901. src1->AsRegOpnd()->m_sym->SetIsConst();
  12902. }
  12903. // Clear the ValueMap pf the values invalidated by this instr.
  12904. void
  12905. GlobOpt::ProcessKills(IR::Instr *instr)
  12906. {
  12907. this->ProcessFieldKills(instr);
  12908. this->ProcessValueKills(instr);
  12909. this->ProcessArrayValueKills(instr);
  12910. }
  12911. bool
  12912. GlobOpt::OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives)
  12913. {
  12914. if(!loop->CanHoistInvariants())
  12915. {
  12916. return false;
  12917. }
  12918. Sym *sym;
  12919. switch(src->GetKind())
  12920. {
  12921. case IR::OpndKindAddr:
  12922. case IR::OpndKindFloatConst:
  12923. case IR::OpndKindIntConst:
  12924. return true;
  12925. case IR::OpndKindReg:
  12926. sym = src->AsRegOpnd()->m_sym;
  12927. break;
  12928. case IR::OpndKindSym:
  12929. sym = src->AsSymOpnd()->m_sym;
  12930. if (src->AsSymOpnd()->IsPropertySymOpnd())
  12931. {
  12932. if (src->AsSymOpnd()->AsPropertySymOpnd()->IsTypeChecked())
  12933. {
  12934. // We do not handle hoisting these yet. We might be hoisting this across the instr with the type check protecting this one.
  12935. // And somehow, the dead-store pass now removes the type check on that instr later on...
  12936. // For CheckFixedFld, there is no benefit hoisting these if they don't have a type check as they won't generate code.
  12937. return false;
  12938. }
  12939. }
  12940. break;
  12941. case IR::OpndKindHelperCall:
  12942. // Helper calls, like the private slot getter, can be invariant.
  12943. // Consider moving more math builtin to invariant?
  12944. return HelperMethodAttributes::IsInVariant(src->AsHelperCallOpnd()->m_fnHelper);
  12945. default:
  12946. return false;
  12947. }
  12948. return OptIsInvariant(sym, block, loop, srcVal, isNotTypeSpecConv, allowNonPrimitives);
  12949. }
  12950. bool
  12951. GlobOpt::OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef)
  12952. {
  12953. Value *localLoopHeadVal;
  12954. if(!loopHeadValRef)
  12955. {
  12956. loopHeadValRef = &localLoopHeadVal;
  12957. }
  12958. Value *&loopHeadVal = *loopHeadValRef;
  12959. loopHeadVal = nullptr;
  12960. if(!loop->CanHoistInvariants())
  12961. {
  12962. return false;
  12963. }
  12964. if (sym->IsStackSym())
  12965. {
  12966. if (sym->AsStackSym()->IsTypeSpec())
  12967. {
  12968. StackSym *varSym = sym->AsStackSym()->GetVarEquivSym(this->func);
  12969. // Make sure the int32/float64 version of this is available.
  12970. // Note: We could handle this by converting the src, but usually the
  12971. // conversion is hoistable if this is hoistable anyway.
  12972. // In some weird cases it may not be however, so we'll bail out.
  12973. if (sym->AsStackSym()->IsInt32())
  12974. {
  12975. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  12976. if (!loop->landingPad->globOptData.liveInt32Syms->Test(varSym->m_id) ||
  12977. (loop->landingPad->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  12978. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)))
  12979. {
  12980. // Either the int32 sym is not live in the landing pad, or it's lossy in the landing pad and the
  12981. // instruction's block is using the lossless version. In either case, the instruction cannot be hoisted
  12982. // without doing a conversion of this operand.
  12983. return false;
  12984. }
  12985. }
  12986. else if (sym->AsStackSym()->IsFloat64())
  12987. {
  12988. if (!loop->landingPad->globOptData.liveFloat64Syms->Test(varSym->m_id))
  12989. {
  12990. return false;
  12991. }
  12992. }
  12993. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  12994. }
  12995. else
  12996. {
  12997. // Make sure the var version of this is available.
  12998. // Note: We could handle this by converting the src, but usually the
  12999. // conversion is hoistable if this is hoistable anyway.
  13000. // In some weird cases it may not be however, so we'll bail out.
  13001. if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->m_id))
  13002. {
  13003. return false;
  13004. }
  13005. }
  13006. }
  13007. else if (sym->IsPropertySym())
  13008. {
  13009. if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id))
  13010. {
  13011. return false;
  13012. }
  13013. }
  13014. else
  13015. {
  13016. return false;
  13017. }
  13018. // We rely on having a value.
  13019. if (srcVal == NULL)
  13020. {
  13021. return false;
  13022. }
  13023. // A symbol is invariant if its current value is the same as it was upon entering the loop.
  13024. loopHeadVal = loop->landingPad->globOptData.FindValue(sym);
  13025. if (loopHeadVal == NULL || loopHeadVal->GetValueNumber() != srcVal->GetValueNumber())
  13026. {
  13027. return false;
  13028. }
  13029. // Can't hoist non-primitives, unless we have safeguards against valueof/tostring. Additionally, we need to consider
  13030. // the value annotations on the source *before* the loop: if we hoist this instruction outside the loop, we can't
  13031. // necessarily rely on type annotations added (and enforced) earlier in the loop's body.
  13032. //
  13033. // It might look as though !loopHeadVal->GetValueInfo()->IsPrimitive() implies
  13034. // !loop->landingPad->globOptData.IsTypeSpecialized(sym), but it turns out that this is not always the case. We
  13035. // encountered a test case in which we had previously hoisted a FromVar (to float 64) instruction, but its bailout code was
  13036. // BailoutPrimitiveButString, rather than BailoutNumberOnly, which would have allowed us to conclude that the dest was
  13037. // definitely a float64. Instead, it was only *likely* a float64, causing IsPrimitive to return false.
  13038. if (!allowNonPrimitives && !loopHeadVal->GetValueInfo()->IsPrimitive() && !loop->landingPad->globOptData.IsTypeSpecialized(sym))
  13039. {
  13040. return false;
  13041. }
  13042. if(!isNotTypeSpecConv && loop->symsDefInLoop->Test(sym->m_id))
  13043. {
  13044. // Typically, a sym is considered invariant if it has the same value in the current block and in the loop landing pad.
  13045. // The sym may have had a different value earlier in the loop or on the back-edge, but as long as it's reassigned to its
  13046. // value outside the loop, it would be considered invariant in this block. Consider that case:
  13047. // s1 = s2[invariant]
  13048. // <loop start>
  13049. // s1 = s2[invariant]
  13050. // // s1 now has the same value as in the landing pad, and is considered invariant
  13051. // s1 += s3
  13052. // // s1 is not invariant here, or on the back-edge
  13053. // ++s3 // s3 is not invariant, so the add above cannot be hoisted
  13054. // <loop end>
  13055. //
  13056. // A problem occurs at the point of (s1 += s3) when:
  13057. // - At (s1 = s2) inside the loop, s1 was made to be the sym store of that value. This by itself is legal, because
  13058. // after that transfer, s1 and s2 have the same value.
  13059. // - (s1 += s3) is type-specialized but s1 is not specialized in the loop header. This happens when s1 is not
  13060. // specialized entering the loop, and since s1 is not used before it's defined in the loop, it's not specialized
  13061. // on back-edges.
  13062. //
  13063. // With that, at (s1 += s3), the conversion of s1 to the type-specialized version would be hoisted because s1 is
  13064. // invariant just before that instruction. Since this add is specialized, the specialized version of the sym is modified
  13065. // in the loop without a reassignment at (s1 = s2) inside the loop, and (s1 += s3) would then use an incorrect value of
  13066. // s1 (it would use the value of s1 from the previous loop iteration, instead of using the value of s2).
  13067. //
  13068. // The problem here, is that we cannot hoist the conversion of s1 into its specialized version across the assignment
  13069. // (s1 = s2) inside the loop. So for the purposes of type specialization, don't consider a sym invariant if it has a def
  13070. // inside the loop.
  13071. return false;
  13072. }
  13073. // For values with an int range, require additionally that the range is the same as in the landing pad, as the range may
  13074. // have been changed on this path based on branches, and int specialization and invariant hoisting may rely on the range
  13075. // being the same. For type spec conversions, only require that if the value is an int constant in the current block, that
  13076. // it is also an int constant with the same value in the landing pad. Other range differences don't matter for type spec.
  13077. IntConstantBounds srcIntConstantBounds, loopHeadIntConstantBounds;
  13078. if(srcVal->GetValueInfo()->TryGetIntConstantBounds(&srcIntConstantBounds) &&
  13079. (isNotTypeSpecConv || srcIntConstantBounds.IsConstant()) &&
  13080. (
  13081. !loopHeadVal->GetValueInfo()->TryGetIntConstantBounds(&loopHeadIntConstantBounds) ||
  13082. loopHeadIntConstantBounds.LowerBound() != srcIntConstantBounds.LowerBound() ||
  13083. loopHeadIntConstantBounds.UpperBound() != srcIntConstantBounds.UpperBound()
  13084. ))
  13085. {
  13086. return false;
  13087. }
  13088. // Disabling this assert, because it does not hold true when we force specialize in the loop landing pad
  13089. //Assert((!loopHeadVal->GetValueInfo()->IsPrimitive()) || srcVal->GetValueInfo()->IsLikelyPrimitive());
  13090. return true;
  13091. }
  13092. bool
  13093. GlobOpt::OptIsInvariant(
  13094. IR::Instr *instr,
  13095. BasicBlock *block,
  13096. Loop *loop,
  13097. Value *src1Val,
  13098. Value *src2Val,
  13099. bool isNotTypeSpecConv,
  13100. const bool forceInvariantHoisting)
  13101. {
  13102. if (!loop->CanHoistInvariants())
  13103. {
  13104. return false;
  13105. }
  13106. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  13107. {
  13108. return false;
  13109. }
  13110. bool allowNonPrimitives = !OpCodeAttr::OpndHasImplicitCall(instr->m_opcode);
  13111. switch(instr->m_opcode)
  13112. {
  13113. // Can't legally hoist these
  13114. case Js::OpCode::LdLen_A:
  13115. return false;
  13116. //Can't Hoist BailOnNotStackArgs, as it is necessary as InlineArgsOptimization relies on this opcode
  13117. //to decide whether to throw rejit exception or not.
  13118. case Js::OpCode::BailOnNotStackArgs:
  13119. return false;
  13120. // Usually not worth hoisting these
  13121. case Js::OpCode::Ld_A:
  13122. case Js::OpCode::Ld_I4:
  13123. case Js::OpCode::LdC_A_I4:
  13124. if(!forceInvariantHoisting)
  13125. {
  13126. return false;
  13127. }
  13128. break;
  13129. // Can't hoist these outside the function it's for. The LdArgumentsFromFrame for an inlinee depends on the inlinee meta arg
  13130. // that holds the arguments object, which is only initialized at the start of the inlinee. So, can't hoist this outside the
  13131. // inlinee.
  13132. case Js::OpCode::LdArgumentsFromFrame:
  13133. if(instr->m_func != loop->GetFunc())
  13134. {
  13135. return false;
  13136. }
  13137. break;
  13138. case Js::OpCode::FromVar:
  13139. if (instr->HasBailOutInfo())
  13140. {
  13141. allowNonPrimitives = true;
  13142. }
  13143. break;
  13144. case Js::OpCode::CheckObjType:
  13145. // Bug 11712101: If the operand is a field, ensure that its containing object type is invariant
  13146. // before hoisting -- that is, don't hoist a CheckObjType over a DeleteFld on that object.
  13147. // (CheckObjType only checks the operand and its immediate parent, so we don't need to go
  13148. // any farther up the object graph.)
  13149. Assert(instr->GetSrc1());
  13150. PropertySym *propertySym = instr->GetSrc1()->AsPropertySymOpnd()->GetPropertySym();
  13151. if (propertySym->HasObjectTypeSym()) {
  13152. StackSym *objectTypeSym = propertySym->GetObjectTypeSym();
  13153. if (!this->OptIsInvariant(objectTypeSym, block, loop, this->CurrentBlockData()->FindValue(objectTypeSym), true, true)) {
  13154. return false;
  13155. }
  13156. }
  13157. break;
  13158. }
  13159. IR::Opnd *dst = instr->GetDst();
  13160. if (dst && !dst->IsRegOpnd())
  13161. {
  13162. return false;
  13163. }
  13164. IR::Opnd *src1 = instr->GetSrc1();
  13165. if (src1)
  13166. {
  13167. if (!this->OptIsInvariant(src1, block, loop, src1Val, isNotTypeSpecConv, allowNonPrimitives))
  13168. {
  13169. return false;
  13170. }
  13171. IR::Opnd *src2 = instr->GetSrc2();
  13172. if (src2)
  13173. {
  13174. if (!this->OptIsInvariant(src2, block, loop, src2Val, isNotTypeSpecConv, allowNonPrimitives))
  13175. {
  13176. return false;
  13177. }
  13178. }
  13179. }
  13180. return true;
  13181. }
  13182. bool
  13183. GlobOpt::OptDstIsInvariant(IR::RegOpnd *dst)
  13184. {
  13185. StackSym *dstSym = dst->m_sym;
  13186. if (dstSym->IsTypeSpec())
  13187. {
  13188. // The type-specialized sym may be single def, but not the original...
  13189. dstSym = dstSym->GetVarEquivSym(this->func);
  13190. }
  13191. return (dstSym->m_isSingleDef);
  13192. }
  13193. void
  13194. GlobOpt::OptHoistUpdateValueType(
  13195. Loop* loop,
  13196. IR::Instr* instr,
  13197. IR::Opnd** srcOpndPtr /* All code paths that change src, should update srcOpndPtr*/,
  13198. Value* opndVal)
  13199. {
  13200. if (opndVal == nullptr || instr->m_opcode == Js::OpCode::FromVar || srcOpndPtr == nullptr || *srcOpndPtr == nullptr)
  13201. {
  13202. return;
  13203. }
  13204. IR::Opnd* srcOpnd = *srcOpndPtr;
  13205. Sym* opndSym = srcOpnd->GetSym();;
  13206. if (opndSym)
  13207. {
  13208. BasicBlock* landingPad = loop->landingPad;
  13209. Value* opndValueInLandingPad = landingPad->globOptData.FindValue(opndSym);
  13210. Assert(opndVal->GetValueNumber() == opndValueInLandingPad->GetValueNumber());
  13211. ValueType opndValueTypeInLandingPad = opndValueInLandingPad->GetValueInfo()->Type();
  13212. if (srcOpnd->GetValueType() != opndValueTypeInLandingPad)
  13213. {
  13214. srcOpnd->SetValueType(opndValueTypeInLandingPad);
  13215. if (instr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE)
  13216. {
  13217. Assert(!opndSym->IsPropertySym());
  13218. Assert(!opndValueTypeInLandingPad.IsString());
  13219. Assert(instr->GetDst());
  13220. IR::RegOpnd* strOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  13221. strOpnd->SetValueType(ValueType::String);
  13222. strOpnd->SetValueTypeFixed();
  13223. IR::Instr* convPrimStrInstr =
  13224. IR::Instr::New(Js::OpCode::Conv_PrimStr, strOpnd, srcOpnd->Use(instr->m_func), instr->m_func);
  13225. instr->ReplaceSrc(srcOpnd, strOpnd);
  13226. // Replace above will free srcOpnd, so reassign it
  13227. *srcOpndPtr = srcOpnd = reinterpret_cast<IR::Opnd *>(strOpnd);
  13228. // We add ConvPrim_Str in the landingpad, and since this instruction doesn't go through the checks in OptInstr, the bailout is never added
  13229. // As we expand hoisting of instructions to new opcode, we need a better framework to handle such cases
  13230. if (IsImplicitCallBailOutCurrentlyNeeded(convPrimStrInstr, opndValueInLandingPad, nullptr, landingPad, landingPad->globOptData.liveFields->IsEmpty(), true, true))
  13231. {
  13232. EnsureBailTarget(loop);
  13233. loop->bailOutInfo->bailOutInstr->InsertBefore(convPrimStrInstr);
  13234. convPrimStrInstr = convPrimStrInstr->ConvertToBailOutInstr(convPrimStrInstr, IR::BailOutOnImplicitCallsPreOp, loop->bailOutInfo->bailOutOffset);
  13235. convPrimStrInstr->ReplaceBailOutInfo(loop->bailOutInfo);
  13236. }
  13237. else
  13238. {
  13239. if (loop->bailOutInfo->bailOutInstr)
  13240. {
  13241. loop->bailOutInfo->bailOutInstr->InsertBefore(convPrimStrInstr);
  13242. }
  13243. else
  13244. {
  13245. landingPad->InsertAfter(convPrimStrInstr);
  13246. }
  13247. }
  13248. // If we came here opndSym can't be PropertySym
  13249. return;
  13250. }
  13251. }
  13252. if (opndSym->IsPropertySym())
  13253. {
  13254. // Also fix valueInfo on objPtr
  13255. StackSym* opndObjPtrSym = opndSym->AsPropertySym()->m_stackSym;
  13256. Value* opndObjPtrSymValInLandingPad = landingPad->globOptData.FindValue(opndObjPtrSym);
  13257. ValueInfo* opndObjPtrSymValueInfoInLandingPad = opndObjPtrSymValInLandingPad->GetValueInfo();
  13258. srcOpnd->AsSymOpnd()->SetPropertyOwnerValueType(opndObjPtrSymValueInfoInLandingPad->Type());
  13259. }
  13260. }
  13261. }
  13262. void
  13263. GlobOpt::OptHoistInvariant(
  13264. IR::Instr *instr,
  13265. BasicBlock *block,
  13266. Loop *loop,
  13267. Value *dstVal,
  13268. Value *const src1Val,
  13269. Value *const src2Val,
  13270. bool isNotTypeSpecConv,
  13271. bool lossy,
  13272. IR::BailOutKind bailoutKind)
  13273. {
  13274. BasicBlock *landingPad = loop->landingPad;
  13275. IR::Opnd* src1 = instr->GetSrc1();
  13276. if (src1)
  13277. {
  13278. // We are hoisting this instruction possibly past other uses, which might invalidate the last use info. Clear it.
  13279. OptHoistUpdateValueType(loop, instr, &src1, src1Val);
  13280. if (src1->IsRegOpnd())
  13281. {
  13282. src1->AsRegOpnd()->m_isTempLastUse = false;
  13283. }
  13284. IR::Opnd* src2 = instr->GetSrc2();
  13285. if (src2)
  13286. {
  13287. OptHoistUpdateValueType(loop, instr, &src2, src2Val);
  13288. if (src2->IsRegOpnd())
  13289. {
  13290. src2->AsRegOpnd()->m_isTempLastUse = false;
  13291. }
  13292. }
  13293. }
  13294. IR::RegOpnd *dst = instr->GetDst() ? instr->GetDst()->AsRegOpnd() : nullptr;
  13295. if(dst)
  13296. {
  13297. switch (instr->m_opcode)
  13298. {
  13299. case Js::OpCode::CmEq_I4:
  13300. case Js::OpCode::CmNeq_I4:
  13301. case Js::OpCode::CmLt_I4:
  13302. case Js::OpCode::CmLe_I4:
  13303. case Js::OpCode::CmGt_I4:
  13304. case Js::OpCode::CmGe_I4:
  13305. case Js::OpCode::CmUnLt_I4:
  13306. case Js::OpCode::CmUnLe_I4:
  13307. case Js::OpCode::CmUnGt_I4:
  13308. case Js::OpCode::CmUnGe_I4:
  13309. // These operations are a special case. They generate a lossy int value, and the var sym is initialized using
  13310. // Conv_Bool. A sym cannot be live only as a lossy int sym, the var needs to be live as well since the lossy int
  13311. // sym cannot be used to convert to var. We don't know however, whether the Conv_Bool will be hoisted. The idea
  13312. // currently is that the sym is only used on the path in which it is initialized inside the loop. So, don't
  13313. // hoist any liveness info for the dst.
  13314. if (!this->GetIsAsmJSFunc())
  13315. {
  13316. lossy = true;
  13317. }
  13318. break;
  13319. case Js::OpCode::FromVar:
  13320. {
  13321. StackSym* src1StackSym = IR::RegOpnd::TryGetStackSym(instr->GetSrc1());
  13322. if (instr->HasBailOutInfo())
  13323. {
  13324. IR::BailOutKind instrBailoutKind = instr->GetBailOutKind();
  13325. Assert(instrBailoutKind == IR::BailOutIntOnly ||
  13326. instrBailoutKind == IR::BailOutExpectingInteger ||
  13327. instrBailoutKind == IR::BailOutOnNotPrimitive ||
  13328. instrBailoutKind == IR::BailOutNumberOnly ||
  13329. instrBailoutKind == IR::BailOutPrimitiveButString);
  13330. }
  13331. else if (src1StackSym && bailoutKind != IR::BailOutInvalid)
  13332. {
  13333. // We may be hoisting FromVar from a region where it didn't need a bailout (src1 had a definite value type) to a region
  13334. // where it would. In such cases, the FromVar needs a bailout based on the value type of src1 in its new position.
  13335. Assert(!src1StackSym->IsTypeSpec());
  13336. Value* landingPadSrc1val = landingPad->globOptData.FindValue(src1StackSym);
  13337. Assert(src1Val->GetValueNumber() == landingPadSrc1val->GetValueNumber());
  13338. ValueInfo *src1ValueInfo = src1Val->GetValueInfo();
  13339. ValueInfo *landingPadSrc1ValueInfo = landingPadSrc1val->GetValueInfo();
  13340. IRType dstType = dst->GetType();
  13341. const auto AddBailOutToFromVar = [&]()
  13342. {
  13343. instr->GetSrc1()->SetValueType(landingPadSrc1val->GetValueInfo()->Type());
  13344. EnsureBailTarget(loop);
  13345. if (block->IsLandingPad())
  13346. {
  13347. instr = instr->ConvertToBailOutInstr(instr, bailoutKind, loop->bailOutInfo->bailOutOffset);
  13348. }
  13349. else
  13350. {
  13351. instr = instr->ConvertToBailOutInstr(instr, bailoutKind);
  13352. }
  13353. };
  13354. // A definite type in the source position and not a definite type in the destination (landing pad)
  13355. // and no bailout on the instruction; we should put a bailout on the hoisted instruction.
  13356. if (dstType == TyInt32)
  13357. {
  13358. if (lossy)
  13359. {
  13360. if ((src1ValueInfo->IsPrimitive() || block->globOptData.IsTypeSpecialized(src1StackSym)) && // didn't need a lossy type spec bailout in the source block
  13361. (!landingPadSrc1ValueInfo->IsPrimitive() && !landingPad->globOptData.IsTypeSpecialized(src1StackSym))) // needs a lossy type spec bailout in the landing pad
  13362. {
  13363. bailoutKind = IR::BailOutOnNotPrimitive;
  13364. AddBailOutToFromVar();
  13365. }
  13366. }
  13367. else if (src1ValueInfo->IsInt() && !landingPadSrc1ValueInfo->IsInt())
  13368. {
  13369. AddBailOutToFromVar();
  13370. }
  13371. }
  13372. else if ((dstType == TyFloat64 && src1ValueInfo->IsNumber() && !landingPadSrc1ValueInfo->IsNumber()))
  13373. {
  13374. AddBailOutToFromVar();
  13375. }
  13376. }
  13377. break;
  13378. }
  13379. }
  13380. if (dstVal == NULL)
  13381. {
  13382. dstVal = this->NewGenericValue(ValueType::Uninitialized, dst);
  13383. }
  13384. // ToVar/FromVar don't need a new dst because it has to be invariant if their src is invariant.
  13385. bool dstDoesntNeedLoad = (!isNotTypeSpecConv && instr->m_opcode != Js::OpCode::LdC_A_I4);
  13386. StackSym *varSym = dst->m_sym;
  13387. if (varSym->IsTypeSpec())
  13388. {
  13389. varSym = varSym->GetVarEquivSym(this->func);
  13390. }
  13391. Value *const landingPadDstVal = loop->landingPad->globOptData.FindValue(varSym);
  13392. if(landingPadDstVal
  13393. ? dstVal->GetValueNumber() != landingPadDstVal->GetValueNumber()
  13394. : loop->symsDefInLoop->Test(varSym->m_id))
  13395. {
  13396. // We need a temp for FromVar/ToVar if dst changes in the loop.
  13397. dstDoesntNeedLoad = false;
  13398. }
  13399. if (!dstDoesntNeedLoad && this->OptDstIsInvariant(dst) == false)
  13400. {
  13401. // Keep dst in place, hoist instr using a new dst.
  13402. instr->UnlinkDst();
  13403. // Set type specialization info correctly for this new sym
  13404. StackSym *copyVarSym;
  13405. IR::RegOpnd *copyReg;
  13406. if (dst->m_sym->IsTypeSpec())
  13407. {
  13408. copyVarSym = StackSym::New(TyVar, instr->m_func);
  13409. StackSym *copySym = copyVarSym;
  13410. if (dst->m_sym->IsInt32())
  13411. {
  13412. if(lossy)
  13413. {
  13414. // The new sym would only be live as a lossy int since we're only hoisting the store to the int version
  13415. // of the sym, and cannot be converted to var. It is not legal to have a sym only live as a lossy int,
  13416. // so don't update liveness info for this sym.
  13417. }
  13418. else
  13419. {
  13420. block->globOptData.liveInt32Syms->Set(copyVarSym->m_id);
  13421. }
  13422. copySym = copySym->GetInt32EquivSym(instr->m_func);
  13423. }
  13424. else if (dst->m_sym->IsFloat64())
  13425. {
  13426. block->globOptData.liveFloat64Syms->Set(copyVarSym->m_id);
  13427. copySym = copySym->GetFloat64EquivSym(instr->m_func);
  13428. }
  13429. copyReg = IR::RegOpnd::New(copySym, copySym->GetType(), instr->m_func);
  13430. }
  13431. else
  13432. {
  13433. copyReg = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  13434. copyVarSym = copyReg->m_sym;
  13435. block->globOptData.liveVarSyms->Set(copyVarSym->m_id);
  13436. }
  13437. copyReg->SetValueType(dst->GetValueType());
  13438. IR::Instr *copyInstr = IR::Instr::New(Js::OpCode::Ld_A, dst, copyReg, instr->m_func);
  13439. copyInstr->SetByteCodeOffset(instr);
  13440. instr->SetDst(copyReg);
  13441. instr->InsertBefore(copyInstr);
  13442. dst->m_sym->m_mayNotBeTempLastUse = true;
  13443. if (instr->GetSrc1() && instr->GetSrc1()->IsImmediateOpnd())
  13444. {
  13445. // Propagate IsIntConst if appropriate
  13446. switch(instr->m_opcode)
  13447. {
  13448. case Js::OpCode::Ld_A:
  13449. case Js::OpCode::Ld_I4:
  13450. case Js::OpCode::LdC_A_I4:
  13451. copyReg->m_sym->SetIsConst();
  13452. break;
  13453. }
  13454. }
  13455. ValueInfo *dstValueInfo = dstVal->GetValueInfo();
  13456. if((!dstValueInfo->GetSymStore() || dstValueInfo->GetSymStore() == varSym) && !lossy)
  13457. {
  13458. // The destination's value may have been transferred from one of the invariant sources, in which case we should
  13459. // keep the sym store intact, as that sym will likely have a better lifetime than this new copy sym. For
  13460. // instance, if we're inside a conditioned block, because we don't make the copy sym live and set its value in
  13461. // all preceding blocks, this sym would not be live after exiting this block, causing this value to not
  13462. // participate in copy-prop after this block.
  13463. this->SetSymStoreDirect(dstValueInfo, copyVarSym);
  13464. }
  13465. block->globOptData.InsertNewValue(dstVal, copyReg);
  13466. dst = copyReg;
  13467. }
  13468. }
  13469. // Move to landing pad
  13470. block->UnlinkInstr(instr);
  13471. if (loop->bailOutInfo->bailOutInstr)
  13472. {
  13473. loop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  13474. }
  13475. else
  13476. {
  13477. landingPad->InsertAfter(instr);
  13478. }
  13479. GlobOpt::MarkNonByteCodeUsed(instr);
  13480. if (instr->HasBailOutInfo() || instr->HasAuxBailOut())
  13481. {
  13482. Assert(loop->bailOutInfo);
  13483. EnsureBailTarget(loop);
  13484. // Copy bailout info of loop top.
  13485. instr->ReplaceBailOutInfo(loop->bailOutInfo);
  13486. }
  13487. if(!dst)
  13488. {
  13489. return;
  13490. }
  13491. // The bailout info's liveness for the dst sym is not updated in loop landing pads because bailout instructions previously
  13492. // hoisted into the loop's landing pad may bail out before the current type of the dst sym became live (perhaps due to this
  13493. // instruction). Since the landing pad will have a shared bailout point, the bailout info cannot assume that the current
  13494. // type of the dst sym was live during every bailout hoisted into the landing pad.
  13495. StackSym *const dstSym = dst->m_sym;
  13496. StackSym *const dstVarSym = dstSym->IsTypeSpec() ? dstSym->GetVarEquivSym(nullptr) : dstSym;
  13497. Assert(dstVarSym);
  13498. if(isNotTypeSpecConv || !loop->landingPad->globOptData.IsLive(dstVarSym))
  13499. {
  13500. // A new dst is being hoisted, or the same single-def dst that would not be live before this block. So, make it live and
  13501. // update the value info with the same value info in this block.
  13502. if(lossy)
  13503. {
  13504. // This is a lossy conversion to int. The instruction was given a new dst specifically for hoisting, so this new dst
  13505. // will not be live as a var before this block. A sym cannot be live only as a lossy int sym, the var needs to be
  13506. // live as well since the lossy int sym cannot be used to convert to var. Since the var version of the sym is not
  13507. // going to be initialized, don't hoist any liveness info for the dst. The sym is only going to be used on the path
  13508. // in which it is initialized inside the loop.
  13509. Assert(dstSym->IsTypeSpec());
  13510. Assert(dstSym->IsInt32());
  13511. return;
  13512. }
  13513. // Check if the dst value was transferred from the src. If so, the value transfer needs to be replicated.
  13514. bool isTransfer = dstVal == src1Val;
  13515. StackSym *transferValueOfSym = nullptr;
  13516. if(isTransfer)
  13517. {
  13518. Assert(instr->GetSrc1());
  13519. if(instr->GetSrc1()->IsRegOpnd())
  13520. {
  13521. StackSym *src1Sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  13522. if(src1Sym->IsTypeSpec())
  13523. {
  13524. src1Sym = src1Sym->GetVarEquivSym(nullptr);
  13525. Assert(src1Sym);
  13526. }
  13527. if(dstVal == block->globOptData.FindValue(src1Sym))
  13528. {
  13529. transferValueOfSym = src1Sym;
  13530. }
  13531. }
  13532. }
  13533. // SIMD_JS
  13534. if (instr->m_opcode == Js::OpCode::ExtendArg_A)
  13535. {
  13536. // Check if we should have CSE'ed this EA
  13537. Assert(instr->GetSrc1());
  13538. // If the dstVal symstore is not the dst itself, then we copied the Value from another expression.
  13539. if (dstVal->GetValueInfo()->GetSymStore() != instr->GetDst()->GetStackSym())
  13540. {
  13541. isTransfer = true;
  13542. transferValueOfSym = dstVal->GetValueInfo()->GetSymStore()->AsStackSym();
  13543. }
  13544. }
  13545. const ValueNumber dstValueNumber = dstVal->GetValueNumber();
  13546. ValueNumber dstNewValueNumber = InvalidValueNumber;
  13547. for(InvariantBlockBackwardIterator it(this, block, loop->landingPad, nullptr); it.IsValid(); it.MoveNext())
  13548. {
  13549. BasicBlock *const hoistBlock = it.Block();
  13550. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  13551. Assert(!hoistBlockData.IsLive(dstVarSym));
  13552. hoistBlockData.MakeLive(dstSym, lossy);
  13553. Value *newDstValue;
  13554. do
  13555. {
  13556. if(isTransfer)
  13557. {
  13558. if(transferValueOfSym)
  13559. {
  13560. newDstValue = hoistBlockData.FindValue(transferValueOfSym);
  13561. if(newDstValue && newDstValue->GetValueNumber() == dstValueNumber)
  13562. {
  13563. break;
  13564. }
  13565. }
  13566. // It's a transfer, but we don't have a sym whose value number matches in the target block. Use a new value
  13567. // number since we don't know if there is already a value with the current number for the target block.
  13568. if(dstNewValueNumber == InvalidValueNumber)
  13569. {
  13570. dstNewValueNumber = NewValueNumber();
  13571. }
  13572. newDstValue = CopyValue(dstVal, dstNewValueNumber);
  13573. break;
  13574. }
  13575. newDstValue = CopyValue(dstVal, dstValueNumber);
  13576. } while(false);
  13577. hoistBlockData.SetValue(newDstValue, dstVarSym);
  13578. }
  13579. return;
  13580. }
  13581. #if DBG
  13582. if(instr->GetSrc1()->IsRegOpnd()) // Type spec conversion may load a constant into a dst sym
  13583. {
  13584. StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  13585. Assert(srcSym != dstSym); // Type spec conversion must be changing the type, so the syms must be different
  13586. StackSym *const srcVarSym = srcSym->IsTypeSpec() ? srcSym->GetVarEquivSym(nullptr) : srcSym;
  13587. Assert(srcVarSym == dstVarSym); // Type spec conversion must be between variants of the same var sym
  13588. }
  13589. #endif
  13590. bool changeValueType = false, changeValueTypeToInt = false;
  13591. if(dstSym->IsTypeSpec())
  13592. {
  13593. if(dst->IsInt32())
  13594. {
  13595. if(!lossy)
  13596. {
  13597. Assert(
  13598. !instr->HasBailOutInfo() ||
  13599. instr->GetBailOutKind() == IR::BailOutIntOnly ||
  13600. instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  13601. changeValueType = changeValueTypeToInt = true;
  13602. }
  13603. }
  13604. else if (dst->IsFloat64())
  13605. {
  13606. if(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutNumberOnly)
  13607. {
  13608. changeValueType = true;
  13609. }
  13610. }
  13611. }
  13612. ValueInfo *previousValueInfoBeforeUpdate = nullptr, *previousValueInfoAfterUpdate = nullptr;
  13613. for(InvariantBlockBackwardIterator it(
  13614. this,
  13615. block,
  13616. loop->landingPad,
  13617. dstVarSym,
  13618. dstVal->GetValueNumber());
  13619. it.IsValid();
  13620. it.MoveNext())
  13621. {
  13622. BasicBlock *const hoistBlock = it.Block();
  13623. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  13624. #if DBG
  13625. // TODO: There are some odd cases with field hoisting where the sym is invariant in only part of the loop and the info
  13626. // does not flow through all blocks. Un-comment the verification below after PRE replaces field hoisting.
  13627. //// Verify that the src sym is live as the required type, and that the conversion is valid
  13628. //Assert(IsLive(dstVarSym, &hoistBlockData));
  13629. //if(instr->GetSrc1()->IsRegOpnd())
  13630. //{
  13631. // IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  13632. // StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  13633. // if(srcSym->IsTypeSpec())
  13634. // {
  13635. // if(src->IsInt32())
  13636. // {
  13637. // Assert(hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  13638. // Assert(!hoistBlockData.liveLossyInt32Syms->Test(dstVarSym->m_id)); // shouldn't try to convert a lossy int32 to anything
  13639. // }
  13640. // else
  13641. // {
  13642. // Assert(src->IsFloat64());
  13643. // Assert(hoistBlockData.liveFloat64Syms->Test(dstVarSym->m_id));
  13644. // if(dstSym->IsTypeSpec() && dst->IsInt32())
  13645. // {
  13646. // Assert(lossy); // shouldn't try to do a lossless conversion from float64 to int32
  13647. // }
  13648. // }
  13649. // }
  13650. // else
  13651. // {
  13652. // Assert(hoistBlockData.liveVarSyms->Test(dstVarSym->m_id));
  13653. // }
  13654. //}
  13655. //if(dstSym->IsTypeSpec() && dst->IsInt32())
  13656. //{
  13657. // // If the sym is already specialized as required in the block to which we are attempting to hoist the conversion,
  13658. // // that info should have flowed into this block
  13659. // if(lossy)
  13660. // {
  13661. // Assert(!hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  13662. // }
  13663. // else
  13664. // {
  13665. // Assert(!IsInt32TypeSpecialized(dstVarSym, hoistBlock));
  13666. // }
  13667. //}
  13668. #endif
  13669. hoistBlockData.MakeLive(dstSym, lossy);
  13670. if(!changeValueType)
  13671. {
  13672. continue;
  13673. }
  13674. Value *const hoistBlockValue = it.InvariantSymValue();
  13675. ValueInfo *const hoistBlockValueInfo = hoistBlockValue->GetValueInfo();
  13676. if(hoistBlockValueInfo == previousValueInfoBeforeUpdate)
  13677. {
  13678. if(hoistBlockValueInfo != previousValueInfoAfterUpdate)
  13679. {
  13680. HoistInvariantValueInfo(previousValueInfoAfterUpdate, hoistBlockValue, hoistBlock);
  13681. }
  13682. }
  13683. else
  13684. {
  13685. previousValueInfoBeforeUpdate = hoistBlockValueInfo;
  13686. ValueInfo *const newValueInfo =
  13687. changeValueTypeToInt
  13688. ? hoistBlockValueInfo->SpecializeToInt32(alloc)
  13689. : hoistBlockValueInfo->SpecializeToFloat64(alloc);
  13690. previousValueInfoAfterUpdate = newValueInfo;
  13691. ChangeValueInfo(changeValueTypeToInt ? nullptr : hoistBlock, hoistBlockValue, newValueInfo);
  13692. }
  13693. }
  13694. }
  13695. bool
  13696. GlobOpt::TryHoistInvariant(
  13697. IR::Instr *instr,
  13698. BasicBlock *block,
  13699. Value *dstVal,
  13700. Value *src1Val,
  13701. Value *src2Val,
  13702. bool isNotTypeSpecConv,
  13703. const bool lossy,
  13704. const bool forceInvariantHoisting,
  13705. IR::BailOutKind bailoutKind)
  13706. {
  13707. Assert(!this->IsLoopPrePass());
  13708. if (OptIsInvariant(instr, block, block->loop, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  13709. {
  13710. #if DBG
  13711. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::InvariantsPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  13712. {
  13713. Output::Print(_u(" **** INVARIANT *** "));
  13714. instr->Dump();
  13715. }
  13716. #endif
  13717. #if ENABLE_DEBUG_CONFIG_OPTIONS
  13718. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::InvariantsPhase))
  13719. {
  13720. Output::Print(_u(" **** INVARIANT *** "));
  13721. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  13722. }
  13723. #endif
  13724. Loop *loop = block->loop;
  13725. // Try hoisting from to outer most loop
  13726. while (loop->parent && OptIsInvariant(instr, block, loop->parent, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  13727. {
  13728. loop = loop->parent;
  13729. }
  13730. // Record the byte code use here since we are going to move this instruction up
  13731. if (isNotTypeSpecConv)
  13732. {
  13733. InsertNoImplicitCallUses(instr);
  13734. this->CaptureByteCodeSymUses(instr);
  13735. this->InsertByteCodeUses(instr, true);
  13736. }
  13737. #if DBG
  13738. else
  13739. {
  13740. PropertySym *propertySymUse = NULL;
  13741. NoRecoverMemoryJitArenaAllocator tempAllocator(_u("BE-GlobOpt-Temp"), this->alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  13742. BVSparse<JitArenaAllocator> * tempByteCodeUse = JitAnew(&tempAllocator, BVSparse<JitArenaAllocator>, &tempAllocator);
  13743. GlobOpt::TrackByteCodeSymUsed(instr, tempByteCodeUse, &propertySymUse);
  13744. Assert(tempByteCodeUse->Count() == 0 && propertySymUse == NULL);
  13745. }
  13746. #endif
  13747. OptHoistInvariant(instr, block, loop, dstVal, src1Val, src2Val, isNotTypeSpecConv, lossy, bailoutKind);
  13748. return true;
  13749. }
  13750. return false;
  13751. }
  13752. InvariantBlockBackwardIterator::InvariantBlockBackwardIterator(
  13753. GlobOpt *const globOpt,
  13754. BasicBlock *const exclusiveBeginBlock,
  13755. BasicBlock *const inclusiveEndBlock,
  13756. StackSym *const invariantSym,
  13757. const ValueNumber invariantSymValueNumber,
  13758. bool followFlow)
  13759. : globOpt(globOpt),
  13760. exclusiveEndBlock(inclusiveEndBlock->prev),
  13761. invariantSym(invariantSym),
  13762. invariantSymValueNumber(invariantSymValueNumber),
  13763. block(exclusiveBeginBlock),
  13764. blockBV(globOpt->tempAlloc),
  13765. followFlow(followFlow)
  13766. #if DBG
  13767. ,
  13768. inclusiveEndBlock(inclusiveEndBlock)
  13769. #endif
  13770. {
  13771. Assert(exclusiveBeginBlock);
  13772. Assert(inclusiveEndBlock);
  13773. Assert(!inclusiveEndBlock->isDeleted);
  13774. Assert(exclusiveBeginBlock != inclusiveEndBlock);
  13775. Assert(!invariantSym == (invariantSymValueNumber == InvalidValueNumber));
  13776. MoveNext();
  13777. }
  13778. bool
  13779. InvariantBlockBackwardIterator::IsValid() const
  13780. {
  13781. return block != exclusiveEndBlock;
  13782. }
  13783. void
  13784. InvariantBlockBackwardIterator::MoveNext()
  13785. {
  13786. Assert(IsValid());
  13787. while(true)
  13788. {
  13789. #if DBG
  13790. BasicBlock *const previouslyIteratedBlock = block;
  13791. #endif
  13792. block = block->prev;
  13793. if(!IsValid())
  13794. {
  13795. Assert(previouslyIteratedBlock == inclusiveEndBlock);
  13796. break;
  13797. }
  13798. if (!this->UpdatePredBlockBV())
  13799. {
  13800. continue;
  13801. }
  13802. if (!this->UpdatePredBlockBV())
  13803. {
  13804. continue;
  13805. }
  13806. if(block->isDeleted)
  13807. {
  13808. continue;
  13809. }
  13810. if(!block->globOptData.HasData())
  13811. {
  13812. // This block's info has already been merged with all of its successors
  13813. continue;
  13814. }
  13815. if(!invariantSym)
  13816. {
  13817. break;
  13818. }
  13819. invariantSymValue = block->globOptData.FindValue(invariantSym);
  13820. if(!invariantSymValue || invariantSymValue->GetValueNumber() != invariantSymValueNumber)
  13821. {
  13822. // BailOnNoProfile and throw blocks are not moved outside loops. A sym table cleanup on these paths may delete the
  13823. // values. Field hoisting also has some odd cases where the hoisted stack sym is invariant in only part of the loop.
  13824. continue;
  13825. }
  13826. break;
  13827. }
  13828. }
  13829. bool
  13830. InvariantBlockBackwardIterator::UpdatePredBlockBV()
  13831. {
  13832. if (!this->followFlow)
  13833. {
  13834. return true;
  13835. }
  13836. // Track blocks we've visited to ensure that we only iterate over predecessor blocks
  13837. if (!this->blockBV.IsEmpty() && !this->blockBV.Test(this->block->GetBlockNum()))
  13838. {
  13839. return false;
  13840. }
  13841. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->block->GetPredList())
  13842. {
  13843. this->blockBV.Set(edge->GetPred()->GetBlockNum());
  13844. } NEXT_SLISTBASECOUNTED_ENTRY;
  13845. return true;
  13846. }
  13847. BasicBlock *
  13848. InvariantBlockBackwardIterator::Block() const
  13849. {
  13850. Assert(IsValid());
  13851. return block;
  13852. }
  13853. Value *
  13854. InvariantBlockBackwardIterator::InvariantSymValue() const
  13855. {
  13856. Assert(IsValid());
  13857. Assert(invariantSym);
  13858. return invariantSymValue;
  13859. }
  13860. void
  13861. GlobOpt::HoistInvariantValueInfo(
  13862. ValueInfo *const invariantValueInfoToHoist,
  13863. Value *const valueToUpdate,
  13864. BasicBlock *const targetBlock)
  13865. {
  13866. Assert(invariantValueInfoToHoist);
  13867. Assert(valueToUpdate);
  13868. Assert(targetBlock);
  13869. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  13870. Assert(!invariantValueInfoToHoist->IsJsType());
  13871. Sym *const symStore = valueToUpdate->GetValueInfo()->GetSymStore();
  13872. ValueInfo *newValueInfo;
  13873. if(invariantValueInfoToHoist->GetSymStore() == symStore)
  13874. {
  13875. newValueInfo = invariantValueInfoToHoist;
  13876. }
  13877. else
  13878. {
  13879. newValueInfo = invariantValueInfoToHoist->Copy(alloc);
  13880. this->SetSymStoreDirect(newValueInfo, symStore);
  13881. }
  13882. ChangeValueInfo(targetBlock, valueToUpdate, newValueInfo, true);
  13883. }
  13884. // static
  13885. bool
  13886. GlobOpt::DoInlineArgsOpt(Func const * func)
  13887. {
  13888. Func const * topFunc = func->GetTopFunc();
  13889. Assert(topFunc != func);
  13890. bool doInlineArgsOpt =
  13891. !PHASE_OFF(Js::InlineArgsOptPhase, topFunc) &&
  13892. !func->GetHasCalls() &&
  13893. !func->GetHasUnoptimizedArgumentsAccess() &&
  13894. func->m_canDoInlineArgsOpt;
  13895. return doInlineArgsOpt;
  13896. }
  13897. bool
  13898. GlobOpt::IsSwitchOptEnabled(Func const * func)
  13899. {
  13900. Assert(func->IsTopFunc());
  13901. return !PHASE_OFF(Js::SwitchOptPhase, func) && !func->IsSwitchOptDisabled() && func->DoGlobOpt();
  13902. }
  13903. bool
  13904. GlobOpt::IsSwitchOptEnabledForIntTypeSpec(Func const * func)
  13905. {
  13906. return IsSwitchOptEnabled(func) && !IsTypeSpecPhaseOff(func) && DoAggressiveIntTypeSpec(func);
  13907. }
  13908. bool
  13909. GlobOpt::DoConstFold() const
  13910. {
  13911. return !PHASE_OFF(Js::ConstFoldPhase, func);
  13912. }
  13913. bool
  13914. GlobOpt::IsTypeSpecPhaseOff(Func const *func)
  13915. {
  13916. return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
  13917. }
  13918. bool
  13919. GlobOpt::DoTypeSpec() const
  13920. {
  13921. return doTypeSpec;
  13922. }
  13923. bool
  13924. GlobOpt::DoAggressiveIntTypeSpec(Func const * func)
  13925. {
  13926. return
  13927. !PHASE_OFF(Js::AggressiveIntTypeSpecPhase, func) &&
  13928. !IsTypeSpecPhaseOff(func) &&
  13929. !func->IsAggressiveIntTypeSpecDisabled();
  13930. }
  13931. bool
  13932. GlobOpt::DoAggressiveIntTypeSpec() const
  13933. {
  13934. return doAggressiveIntTypeSpec;
  13935. }
  13936. bool
  13937. GlobOpt::DoAggressiveMulIntTypeSpec() const
  13938. {
  13939. return doAggressiveMulIntTypeSpec;
  13940. }
  13941. bool
  13942. GlobOpt::DoDivIntTypeSpec() const
  13943. {
  13944. return doDivIntTypeSpec;
  13945. }
  13946. // static
  13947. bool
  13948. GlobOpt::DoLossyIntTypeSpec(Func const * func)
  13949. {
  13950. return
  13951. !PHASE_OFF(Js::LossyIntTypeSpecPhase, func) &&
  13952. !IsTypeSpecPhaseOff(func) &&
  13953. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLossyIntTypeSpecDisabled());
  13954. }
  13955. bool
  13956. GlobOpt::DoLossyIntTypeSpec() const
  13957. {
  13958. return doLossyIntTypeSpec;
  13959. }
  13960. // static
  13961. bool
  13962. GlobOpt::DoFloatTypeSpec(Func const * func)
  13963. {
  13964. return
  13965. !PHASE_OFF(Js::FloatTypeSpecPhase, func) &&
  13966. !IsTypeSpecPhaseOff(func) &&
  13967. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsFloatTypeSpecDisabled()) &&
  13968. AutoSystemInfo::Data.SSE2Available();
  13969. }
  13970. bool
  13971. GlobOpt::DoFloatTypeSpec() const
  13972. {
  13973. return doFloatTypeSpec;
  13974. }
  13975. bool
  13976. GlobOpt::DoStringTypeSpec(Func const * func)
  13977. {
  13978. return !PHASE_OFF(Js::StringTypeSpecPhase, func) && !IsTypeSpecPhaseOff(func);
  13979. }
  13980. // static
  13981. bool
  13982. GlobOpt::DoTypedArrayTypeSpec(Func const * func)
  13983. {
  13984. return !PHASE_OFF(Js::TypedArrayTypeSpecPhase, func) &&
  13985. !IsTypeSpecPhaseOff(func) &&
  13986. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTypedArrayTypeSpecDisabled(func->IsLoopBody()))
  13987. #if defined(_M_IX86)
  13988. && AutoSystemInfo::Data.SSE2Available()
  13989. #endif
  13990. ;
  13991. }
  13992. // static
  13993. bool
  13994. GlobOpt::DoNativeArrayTypeSpec(Func const * func)
  13995. {
  13996. return !PHASE_OFF(Js::NativeArrayPhase, func) &&
  13997. !IsTypeSpecPhaseOff(func)
  13998. #if defined(_M_IX86)
  13999. && AutoSystemInfo::Data.SSE2Available()
  14000. #endif
  14001. ;
  14002. }
  14003. bool
  14004. GlobOpt::DoArrayCheckHoist(Func const * const func)
  14005. {
  14006. Assert(func->IsTopFunc());
  14007. return
  14008. !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
  14009. !func->IsArrayCheckHoistDisabled() &&
  14010. !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
  14011. func->DoGlobOptsForGeneratorFunc();
  14012. }
  14013. bool
  14014. GlobOpt::DoArrayCheckHoist() const
  14015. {
  14016. return doArrayCheckHoist;
  14017. }
  14018. bool
  14019. GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr) const
  14020. {
  14021. if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
  14022. {
  14023. return false;
  14024. }
  14025. // This includes typed arrays, but not virtual typed arrays, whose vtable can change if the buffer goes away.
  14026. // Note that in the virtual case the vtable check is the only way to catch this, since there's no bound check.
  14027. if(!(baseValueType.IsLikelyArrayOrObjectWithArray() || baseValueType.IsLikelyOptimizedVirtualTypedArray()) ||
  14028. (loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func)))
  14029. {
  14030. return true;
  14031. }
  14032. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant JS array checks
  14033. #if DBG_DUMP
  14034. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  14035. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  14036. {
  14037. Output::Print(_u("DoArrayCheckHoist disabled for JS arrays because of external: "));
  14038. func->DumpFullFunctionName();
  14039. Output::Print(_u("\n"));
  14040. Output::Flush();
  14041. }
  14042. #endif
  14043. return false;
  14044. }
  14045. bool
  14046. GlobOpt::DoArrayMissingValueCheckHoist(Func const * const func)
  14047. {
  14048. return
  14049. DoArrayCheckHoist(func) &&
  14050. !PHASE_OFF(Js::ArrayMissingValueCheckHoistPhase, func) &&
  14051. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayMissingValueCheckHoistDisabled(func->IsLoopBody()));
  14052. }
  14053. bool
  14054. GlobOpt::DoArrayMissingValueCheckHoist() const
  14055. {
  14056. return doArrayMissingValueCheckHoist;
  14057. }
  14058. bool
  14059. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType, Func const * const func)
  14060. {
  14061. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  14062. if(!DoArrayCheckHoist(func) || PHASE_OFF(Js::ArraySegmentHoistPhase, func))
  14063. {
  14064. return false;
  14065. }
  14066. if(!baseValueType.IsLikelyArrayOrObjectWithArray())
  14067. {
  14068. return true;
  14069. }
  14070. return
  14071. !PHASE_OFF(Js::JsArraySegmentHoistPhase, func) &&
  14072. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsJsArraySegmentHoistDisabled(func->IsLoopBody()));
  14073. }
  14074. bool
  14075. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType) const
  14076. {
  14077. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  14078. return baseValueType.IsLikelyArrayOrObjectWithArray() ? doJsArraySegmentHoist : doArraySegmentHoist;
  14079. }
  14080. bool
  14081. GlobOpt::DoTypedArraySegmentLengthHoist(Loop *const loop) const
  14082. {
  14083. if(!DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array)))
  14084. {
  14085. return false;
  14086. }
  14087. if(loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func))
  14088. {
  14089. return true;
  14090. }
  14091. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant typed array
  14092. // segment length loads.
  14093. #if DBG_DUMP
  14094. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  14095. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  14096. {
  14097. Output::Print(_u("DoArraySegmentLengthHoist disabled for typed arrays because of external: "));
  14098. func->DumpFullFunctionName();
  14099. Output::Print(_u("\n"));
  14100. Output::Flush();
  14101. }
  14102. #endif
  14103. return false;
  14104. }
  14105. bool
  14106. GlobOpt::DoArrayLengthHoist(Func const * const func)
  14107. {
  14108. return
  14109. DoArrayCheckHoist(func) &&
  14110. !PHASE_OFF(Js::Phase::ArrayLengthHoistPhase, func) &&
  14111. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayLengthHoistDisabled(func->IsLoopBody()));
  14112. }
  14113. bool
  14114. GlobOpt::DoArrayLengthHoist() const
  14115. {
  14116. return doArrayLengthHoist;
  14117. }
  14118. bool
  14119. GlobOpt::DoEliminateArrayAccessHelperCall(Func *const func)
  14120. {
  14121. return DoArrayCheckHoist(func);
  14122. }
  14123. bool
  14124. GlobOpt::DoEliminateArrayAccessHelperCall() const
  14125. {
  14126. return doEliminateArrayAccessHelperCall;
  14127. }
  14128. bool
  14129. GlobOpt::DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType)
  14130. {
  14131. Assert(!instr || instr->m_opcode == Js::OpCode::LdLen_A);
  14132. Assert(!instr || instr->GetDst());
  14133. Assert(!instr || instr->GetSrc1());
  14134. if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
  14135. IsTypeSpecPhaseOff(func) ||
  14136. (func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
  14137. (instr && !IsLoopPrePass() && instr->DoStackArgsOpt()))
  14138. {
  14139. return false;
  14140. }
  14141. if(instr &&
  14142. instr->IsProfiledInstr() &&
  14143. (
  14144. !instr->AsProfiledInstr()->u.FldInfo().valueType.IsLikelyInt() ||
  14145. instr->GetDst()->AsRegOpnd()->m_sym->m_isNotNumber
  14146. ))
  14147. {
  14148. return false;
  14149. }
  14150. Assert(!instr || baseValueType == instr->GetSrc1()->GetValueType());
  14151. return
  14152. baseValueType.HasBeenString() ||
  14153. (baseValueType.IsLikelyAnyOptimizedArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray);
  14154. }
  14155. bool
  14156. GlobOpt::DoPathDependentValues() const
  14157. {
  14158. return !PHASE_OFF(Js::Phase::PathDependentValuesPhase, func);
  14159. }
  14160. bool
  14161. GlobOpt::DoTrackRelativeIntBounds() const
  14162. {
  14163. return doTrackRelativeIntBounds;
  14164. }
  14165. bool
  14166. GlobOpt::DoBoundCheckElimination() const
  14167. {
  14168. return doBoundCheckElimination;
  14169. }
  14170. bool
  14171. GlobOpt::DoBoundCheckHoist() const
  14172. {
  14173. return doBoundCheckHoist;
  14174. }
  14175. bool
  14176. GlobOpt::DoLoopCountBasedBoundCheckHoist() const
  14177. {
  14178. return doLoopCountBasedBoundCheckHoist;
  14179. }
  14180. bool
  14181. GlobOpt::DoPowIntIntTypeSpec() const
  14182. {
  14183. return doPowIntIntTypeSpec;
  14184. }
  14185. bool
  14186. GlobOpt::DoTagChecks() const
  14187. {
  14188. return doTagChecks;
  14189. }
  14190. bool
  14191. GlobOpt::TrackArgumentsObject()
  14192. {
  14193. if (PHASE_OFF(Js::StackArgOptPhase, this->func))
  14194. {
  14195. this->CannotAllocateArgumentsObjectOnStack(nullptr);
  14196. return false;
  14197. }
  14198. return func->GetHasStackArgs();
  14199. }
  14200. void
  14201. GlobOpt::CannotAllocateArgumentsObjectOnStack(Func * curFunc)
  14202. {
  14203. if (curFunc != nullptr && curFunc->hasArgLenAndConstOpt)
  14204. {
  14205. Assert(!curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt);
  14206. curFunc->GetJITOutput()->GetOutputData()->disableStackArgOpt = true;
  14207. throw Js::RejitException(RejitReason::DisableStackArgLenAndConstOpt);
  14208. }
  14209. func->SetHasStackArgs(false);
  14210. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  14211. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  14212. {
  14213. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14214. Output::Print(_u("Stack args disabled for function %s(%s)\n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetDebugNumberSet(debugStringBuffer));
  14215. Output::Flush();
  14216. }
  14217. #endif
  14218. }
  14219. IR::Instr *
  14220. GlobOpt::PreOptPeep(IR::Instr *instr)
  14221. {
  14222. if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
  14223. {
  14224. switch (instr->m_opcode)
  14225. {
  14226. case Js::OpCode::BailOnNoProfile:
  14227. {
  14228. // Handle BailOnNoProfile
  14229. if (instr->HasBailOutInfo())
  14230. {
  14231. if (!this->prePassLoop)
  14232. {
  14233. FillBailOutInfo(this->currentBlock, instr);
  14234. }
  14235. // Already processed.
  14236. return instr;
  14237. }
  14238. // Convert to bailout instr
  14239. IR::Instr *nextBytecodeOffsetInstr = instr->GetNextRealInstrOrLabel();
  14240. while(nextBytecodeOffsetInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  14241. {
  14242. nextBytecodeOffsetInstr = nextBytecodeOffsetInstr->GetNextRealInstrOrLabel();
  14243. Assert(!nextBytecodeOffsetInstr->IsLabelInstr());
  14244. }
  14245. instr = instr->ConvertToBailOutInstr(nextBytecodeOffsetInstr, IR::BailOutOnNoProfile);
  14246. instr->ClearByteCodeOffset();
  14247. instr->SetByteCodeOffset(nextBytecodeOffsetInstr);
  14248. if (!this->currentBlock->loop)
  14249. {
  14250. FillBailOutInfo(this->currentBlock, instr);
  14251. }
  14252. else
  14253. {
  14254. Assert(this->prePassLoop);
  14255. }
  14256. break;
  14257. }
  14258. case Js::OpCode::BailOnException:
  14259. {
  14260. Assert(
  14261. (
  14262. this->func->HasTry() && this->func->DoOptimizeTry() &&
  14263. instr->m_prev->m_opcode == Js::OpCode::Catch &&
  14264. instr->m_prev->m_prev->IsLabelInstr() &&
  14265. instr->m_prev->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeCatch
  14266. )
  14267. ||
  14268. (
  14269. this->func->HasFinally() && this->func->DoOptimizeTry() &&
  14270. instr->m_prev->AsLabelInstr() &&
  14271. instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeFinally
  14272. )
  14273. );
  14274. break;
  14275. }
  14276. case Js::OpCode::BailOnEarlyExit:
  14277. {
  14278. Assert(this->func->HasFinally() && this->func->DoOptimizeTry());
  14279. break;
  14280. }
  14281. default:
  14282. {
  14283. if(this->currentBlock->loop && !this->IsLoopPrePass())
  14284. {
  14285. return instr;
  14286. }
  14287. break;
  14288. }
  14289. }
  14290. RemoveCodeAfterNoFallthroughInstr(instr);
  14291. }
  14292. return instr;
  14293. }
  14294. void
  14295. GlobOpt::RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr)
  14296. {
  14297. if (instr != this->currentBlock->GetLastInstr())
  14298. {
  14299. // Remove dead code after bailout
  14300. IR::Instr *instrDead = instr->m_next;
  14301. IR::Instr *instrNext;
  14302. for (; instrDead != this->currentBlock->GetLastInstr(); instrDead = instrNext)
  14303. {
  14304. instrNext = instrDead->m_next;
  14305. if (instrNext->m_opcode == Js::OpCode::FunctionExit)
  14306. {
  14307. break;
  14308. }
  14309. this->func->m_fg->RemoveInstr(instrDead, this);
  14310. }
  14311. IR::Instr *instrNextBlock = instrDead->m_next;
  14312. this->func->m_fg->RemoveInstr(instrDead, this);
  14313. this->currentBlock->SetLastInstr(instrNextBlock->m_prev);
  14314. }
  14315. // Cleanup dead successors
  14316. FOREACH_SUCCESSOR_BLOCK_EDITING(deadBlock, this->currentBlock, iter)
  14317. {
  14318. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  14319. if (this->currentBlock->GetDataUseCount() > 0)
  14320. {
  14321. this->currentBlock->DecrementDataUseCount();
  14322. }
  14323. } NEXT_SUCCESSOR_BLOCK_EDITING;
  14324. }
  14325. void
  14326. GlobOpt::ProcessTryHandler(IR::Instr* instr)
  14327. {
  14328. Assert(instr->m_next->IsLabelInstr() && instr->m_next->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeTry);
  14329. Region* tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  14330. BVSparse<JitArenaAllocator> * writeThroughSymbolsSet = tryRegion->writeThroughSymbolsSet;
  14331. ToVar(writeThroughSymbolsSet, this->currentBlock);
  14332. }
  14333. bool
  14334. GlobOpt::ProcessExceptionHandlingEdges(IR::Instr* instr)
  14335. {
  14336. Assert(instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException);
  14337. if (instr->m_opcode == Js::OpCode::BrOnException)
  14338. {
  14339. if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
  14340. {
  14341. // BrOnException was added to model flow from try region to the catch region to assist
  14342. // the backward pass in propagating bytecode upward exposed info from the catch block
  14343. // to the try, and to handle break blocks. Removing it here as it has served its purpose
  14344. // and keeping it around might also have unintended effects while merging block data for
  14345. // the catch block's predecessors.
  14346. // Note that the Deadstore pass will still be able to propagate bytecode upward exposed info
  14347. // because it doesn't skip dead blocks for that.
  14348. this->RemoveFlowEdgeToCatchBlock(instr);
  14349. this->currentBlock->RemoveInstr(instr);
  14350. return true;
  14351. }
  14352. else
  14353. {
  14354. // We add BrOnException from a finally region to early exit, remove that since it has served its purpose
  14355. return this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
  14356. }
  14357. }
  14358. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  14359. {
  14360. if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
  14361. {
  14362. this->RemoveFlowEdgeToCatchBlock(instr);
  14363. }
  14364. else
  14365. {
  14366. this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
  14367. }
  14368. }
  14369. return false;
  14370. }
  14371. void
  14372. GlobOpt::InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd)
  14373. {
  14374. if ((this->currentRegion->GetType() == RegionTypeTry || this->currentRegion->GetType() == RegionTypeFinally) &&
  14375. dstOpnd->IsRegOpnd() && dstOpnd->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  14376. {
  14377. StackSym * sym = dstOpnd->AsRegOpnd()->m_sym;
  14378. if (sym->IsVar())
  14379. {
  14380. return;
  14381. }
  14382. StackSym * varSym = sym->GetVarEquivSym(nullptr);
  14383. if ((this->currentRegion->GetType() == RegionTypeTry && this->currentRegion->writeThroughSymbolsSet->Test(varSym->m_id)) ||
  14384. ((this->currentRegion->GetType() == RegionTypeFinally && this->currentRegion->GetMatchingTryRegion()->writeThroughSymbolsSet->Test(varSym->m_id))))
  14385. {
  14386. IR::RegOpnd * regOpnd = IR::RegOpnd::New(varSym, IRType::TyVar, instr->m_func);
  14387. this->ToVar(instr->m_next, regOpnd, this->currentBlock, NULL, false);
  14388. }
  14389. }
  14390. }
  14391. void
  14392. GlobOpt::RemoveFlowEdgeToCatchBlock(IR::Instr * instr)
  14393. {
  14394. Assert(instr->IsBranchInstr());
  14395. BasicBlock * catchBlock = nullptr;
  14396. BasicBlock * predBlock = nullptr;
  14397. if (instr->m_opcode == Js::OpCode::BrOnException)
  14398. {
  14399. catchBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  14400. predBlock = this->currentBlock;
  14401. }
  14402. else
  14403. {
  14404. Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
  14405. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  14406. Assert(nextInstr->IsLabelInstr());
  14407. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  14408. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeCatch)
  14409. {
  14410. catchBlock = nextLabel->GetBasicBlock();
  14411. predBlock = this->currentBlock;
  14412. }
  14413. else
  14414. {
  14415. Assert(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional());
  14416. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  14417. IR::BranchInstr * branchToCatchBlock = nextLabel->m_next->AsBranchInstr();
  14418. IR::LabelInstr * catchBlockLabel = branchToCatchBlock->GetTarget();
  14419. Assert(catchBlockLabel->GetRegion()->GetType() == RegionTypeCatch);
  14420. catchBlock = catchBlockLabel->GetBasicBlock();
  14421. predBlock = nextBlock;
  14422. }
  14423. }
  14424. Assert(catchBlock);
  14425. Assert(predBlock);
  14426. if (this->func->m_fg->FindEdge(predBlock, catchBlock))
  14427. {
  14428. predBlock->RemoveDeadSucc(catchBlock, this->func->m_fg);
  14429. if (predBlock == this->currentBlock)
  14430. {
  14431. predBlock->DecrementDataUseCount();
  14432. }
  14433. }
  14434. }
  14435. bool
  14436. GlobOpt::RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr)
  14437. {
  14438. Assert(instr->IsBranchInstr());
  14439. if (instr->m_opcode == Js::OpCode::BrOnNoException && instr->AsBranchInstr()->m_brFinallyToEarlyExit)
  14440. {
  14441. // We add edge from finally to early exit block
  14442. // We should not remove this edge
  14443. // If a loop has continue, and we add edge in finally to continue
  14444. // Break block removal can move all continues inside the loop to branch to the continue added within finally
  14445. // If we get rid of this edge, then loop may loose all backedges
  14446. // Ideally, doing tail duplication before globopt would enable us to remove these edges, but since we do it after globopt, keep it this way for now
  14447. // See test1() in core/test/tryfinallytests.js
  14448. return false;
  14449. }
  14450. BasicBlock * finallyBlock = nullptr;
  14451. BasicBlock * predBlock = nullptr;
  14452. if (instr->m_opcode == Js::OpCode::BrOnException)
  14453. {
  14454. finallyBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  14455. predBlock = this->currentBlock;
  14456. }
  14457. else
  14458. {
  14459. Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
  14460. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  14461. Assert(nextInstr->IsLabelInstr());
  14462. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  14463. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeFinally)
  14464. {
  14465. finallyBlock = nextLabel->GetBasicBlock();
  14466. predBlock = this->currentBlock;
  14467. }
  14468. else
  14469. {
  14470. if (!(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional()))
  14471. {
  14472. return false;
  14473. }
  14474. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  14475. IR::BranchInstr * branchTofinallyBlockOrEarlyExit = nextLabel->m_next->AsBranchInstr();
  14476. IR::LabelInstr * finallyBlockLabelOrEarlyExitLabel = branchTofinallyBlockOrEarlyExit->GetTarget();
  14477. finallyBlock = finallyBlockLabelOrEarlyExitLabel->GetBasicBlock();
  14478. predBlock = nextBlock;
  14479. }
  14480. }
  14481. Assert(finallyBlock && predBlock);
  14482. if (this->func->m_fg->FindEdge(predBlock, finallyBlock))
  14483. {
  14484. predBlock->RemoveDeadSucc(finallyBlock, this->func->m_fg);
  14485. if (instr->m_opcode == Js::OpCode::BrOnException)
  14486. {
  14487. this->currentBlock->RemoveInstr(instr);
  14488. }
  14489. if (finallyBlock->GetFirstInstr()->AsLabelInstr()->IsUnreferenced())
  14490. {
  14491. // Traverse predBlocks of finallyBlock, if any of the preds have a different region, set m_hasNonBranchRef to true
  14492. // If not, this label can get eliminated and an incorrect region from the predecessor can get propagated in lowered code
  14493. // See test3() in tryfinallytests.js
  14494. Region * finallyRegion = finallyBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  14495. FOREACH_PREDECESSOR_BLOCK(pred, finallyBlock)
  14496. {
  14497. Region * predRegion = pred->GetFirstInstr()->AsLabelInstr()->GetRegion();
  14498. if (predRegion != finallyRegion)
  14499. {
  14500. finallyBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef = true;
  14501. }
  14502. } NEXT_PREDECESSOR_BLOCK;
  14503. }
  14504. if (predBlock == this->currentBlock)
  14505. {
  14506. predBlock->DecrementDataUseCount();
  14507. }
  14508. }
  14509. return true;
  14510. }
  14511. IR::Instr *
  14512. GlobOpt::OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val)
  14513. {
  14514. IR::Opnd *dst, *src1, *src2;
  14515. if (this->IsLoopPrePass())
  14516. {
  14517. return instr;
  14518. }
  14519. switch (instr->m_opcode)
  14520. {
  14521. case Js::OpCode::DeadBrEqual:
  14522. case Js::OpCode::DeadBrRelational:
  14523. case Js::OpCode::DeadBrSrEqual:
  14524. src1 = instr->GetSrc1();
  14525. src2 = instr->GetSrc2();
  14526. // These branches were turned into dead branches because they were unnecessary (branch to next, ...).
  14527. // The DeadBr are necessary in case the evaluation of the sources have side-effects.
  14528. // If we know for sure the srcs are primitive or have been type specialized, we don't need these instructions
  14529. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))) &&
  14530. ((src2Val && src2Val->GetValueInfo()->IsPrimitive()) || (src2->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src2->AsRegOpnd()->m_sym))))
  14531. {
  14532. this->CaptureByteCodeSymUses(instr);
  14533. instr->m_opcode = Js::OpCode::Nop;
  14534. }
  14535. break;
  14536. case Js::OpCode::DeadBrOnHasProperty:
  14537. src1 = instr->GetSrc1();
  14538. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))))
  14539. {
  14540. this->CaptureByteCodeSymUses(instr);
  14541. instr->m_opcode = Js::OpCode::Nop;
  14542. }
  14543. break;
  14544. case Js::OpCode::Ld_A:
  14545. case Js::OpCode::Ld_I4:
  14546. src1 = instr->GetSrc1();
  14547. dst = instr->GetDst();
  14548. if (dst->IsRegOpnd() && dst->IsEqual(src1))
  14549. {
  14550. dst = instr->UnlinkDst();
  14551. if (!dst->GetIsJITOptimizedReg())
  14552. {
  14553. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(instr);
  14554. bytecodeUse->SetDst(dst);
  14555. instr->InsertAfter(bytecodeUse);
  14556. }
  14557. instr->FreeSrc1();
  14558. instr->m_opcode = Js::OpCode::Nop;
  14559. }
  14560. break;
  14561. }
  14562. return instr;
  14563. }
  14564. void
  14565. GlobOpt::OptimizeIndirUses(IR::IndirOpnd *indirOpnd, IR::Instr * *pInstr, Value **indirIndexValRef)
  14566. {
  14567. IR::Instr * &instr = *pInstr;
  14568. Assert(!indirIndexValRef || !*indirIndexValRef);
  14569. // Update value types and copy-prop the base
  14570. OptSrc(indirOpnd->GetBaseOpnd(), &instr, nullptr, indirOpnd);
  14571. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  14572. if (!indexOpnd)
  14573. {
  14574. return;
  14575. }
  14576. // Update value types and copy-prop the index
  14577. Value *indexVal = OptSrc(indexOpnd, &instr, nullptr, indirOpnd);
  14578. if(indirIndexValRef)
  14579. {
  14580. *indirIndexValRef = indexVal;
  14581. }
  14582. }
  14583. bool
  14584. GlobOpt::IsPREInstrCandidateLoad(Js::OpCode opcode)
  14585. {
  14586. switch (opcode)
  14587. {
  14588. case Js::OpCode::LdFld:
  14589. case Js::OpCode::LdFldForTypeOf:
  14590. case Js::OpCode::LdRootFld:
  14591. case Js::OpCode::LdRootFldForTypeOf:
  14592. case Js::OpCode::LdMethodFld:
  14593. case Js::OpCode::LdRootMethodFld:
  14594. case Js::OpCode::LdSlot:
  14595. case Js::OpCode::LdSlotArr:
  14596. return true;
  14597. }
  14598. return false;
  14599. }
  14600. bool
  14601. GlobOpt::IsPREInstrSequenceCandidateLoad(Js::OpCode opcode)
  14602. {
  14603. switch (opcode)
  14604. {
  14605. default:
  14606. return IsPREInstrCandidateLoad(opcode);
  14607. case Js::OpCode::Ld_A:
  14608. case Js::OpCode::BytecodeArgOutCapture:
  14609. return true;
  14610. }
  14611. }
  14612. bool
  14613. GlobOpt::IsPREInstrCandidateStore(Js::OpCode opcode)
  14614. {
  14615. switch (opcode)
  14616. {
  14617. case Js::OpCode::StFld:
  14618. case Js::OpCode::StRootFld:
  14619. case Js::OpCode::StSlot:
  14620. return true;
  14621. }
  14622. return false;
  14623. }
  14624. bool
  14625. GlobOpt::ImplicitCallFlagsAllowOpts(Loop *loop)
  14626. {
  14627. return loop->GetImplicitCallFlags() != Js::ImplicitCall_HasNoInfo &&
  14628. (((loop->GetImplicitCallFlags() & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  14629. }
  14630. bool
  14631. GlobOpt::ImplicitCallFlagsAllowOpts(Func const *func)
  14632. {
  14633. return func->m_fg->implicitCallFlags != Js::ImplicitCall_HasNoInfo &&
  14634. (((func->m_fg->implicitCallFlags & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  14635. }
  14636. #if DBG_DUMP
  14637. void
  14638. GlobOpt::Dump() const
  14639. {
  14640. this->DumpSymToValueMap();
  14641. }
  14642. void
  14643. GlobOpt::DumpSymToValueMap(BasicBlock const * block) const
  14644. {
  14645. Output::Print(_u("\n*** SymToValueMap ***\n"));
  14646. block->globOptData.DumpSymToValueMap();
  14647. }
  14648. void
  14649. GlobOpt::DumpSymToValueMap() const
  14650. {
  14651. DumpSymToValueMap(this->currentBlock);
  14652. }
  14653. void
  14654. GlobOpt::DumpSymVal(int index)
  14655. {
  14656. SymID id = index;
  14657. extern Func *CurrentFunc;
  14658. Sym *sym = this->func->m_symTable->Find(id);
  14659. AssertMsg(sym, "Sym not found!!!");
  14660. Output::Print(_u("Sym: "));
  14661. sym->Dump();
  14662. Output::Print(_u("\t\tValueNumber: "));
  14663. Value * pValue = CurrentBlockData()->FindValueFromMapDirect(sym->m_id);
  14664. pValue->Dump();
  14665. Output::Print(_u("\n"));
  14666. }
  14667. void
  14668. GlobOpt::Trace(BasicBlock * block, bool before) const
  14669. {
  14670. bool globOptTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14671. bool typeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::TypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14672. bool floatTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FloatTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14673. bool fieldCopyPropTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14674. bool objTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ObjTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14675. bool valueTableTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ValueTablePhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14676. bool fieldPRETrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  14677. bool anyTrace = globOptTrace || typeSpecTrace || floatTypeSpecTrace || fieldCopyPropTrace || objTypeSpecTrace || valueTableTrace || fieldPRETrace;
  14678. if (!anyTrace)
  14679. {
  14680. return;
  14681. }
  14682. if (fieldPRETrace && this->IsLoopPrePass())
  14683. {
  14684. if (block->isLoopHeader && before)
  14685. {
  14686. Output::Print(_u("==== Loop Prepass block header #%-3d, Visiting Loop block head #%-3d\n"),
  14687. this->prePassLoop->GetHeadBlock()->GetBlockNum(), block->GetBlockNum());
  14688. }
  14689. }
  14690. if (!typeSpecTrace && !floatTypeSpecTrace && !valueTableTrace && !Js::Configuration::Global.flags.Verbose)
  14691. {
  14692. return;
  14693. }
  14694. if (before)
  14695. {
  14696. Output::Print(_u("========================================================================\n"));
  14697. Output::Print(_u("Begin OptBlock: Block #%-3d"), block->GetBlockNum());
  14698. if (block->loop)
  14699. {
  14700. Output::Print(_u(" Loop block header:%-3d currentLoop block head:%-3d %s"),
  14701. block->loop->GetHeadBlock()->GetBlockNum(),
  14702. this->prePassLoop ? this->prePassLoop->GetHeadBlock()->GetBlockNum() : 0,
  14703. this->IsLoopPrePass() ? _u("PrePass") : _u(""));
  14704. }
  14705. Output::Print(_u("\n"));
  14706. }
  14707. else
  14708. {
  14709. Output::Print(_u("-----------------------------------------------------------------------\n"));
  14710. Output::Print(_u("After OptBlock: Block #%-3d\n"), block->GetBlockNum());
  14711. }
  14712. if ((typeSpecTrace || floatTypeSpecTrace) && !block->globOptData.liveVarSyms->IsEmpty())
  14713. {
  14714. Output::Print(_u(" Live var syms: "));
  14715. block->globOptData.liveVarSyms->Dump();
  14716. }
  14717. if (typeSpecTrace && !block->globOptData.liveInt32Syms->IsEmpty())
  14718. {
  14719. Assert(this->tempBv->IsEmpty());
  14720. this->tempBv->Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  14721. if(!this->tempBv->IsEmpty())
  14722. {
  14723. Output::Print(_u(" Int32 type specialized (lossless) syms: "));
  14724. this->tempBv->Dump();
  14725. }
  14726. this->tempBv->ClearAll();
  14727. if(!block->globOptData.liveLossyInt32Syms->IsEmpty())
  14728. {
  14729. Output::Print(_u(" Int32 converted (lossy) syms: "));
  14730. block->globOptData.liveLossyInt32Syms->Dump();
  14731. }
  14732. }
  14733. if (floatTypeSpecTrace && !block->globOptData.liveFloat64Syms->IsEmpty())
  14734. {
  14735. Output::Print(_u(" Float64 type specialized syms: "));
  14736. block->globOptData.liveFloat64Syms->Dump();
  14737. }
  14738. if ((fieldCopyPropTrace || objTypeSpecTrace) && this->DoFieldCopyProp(block->loop) && !block->globOptData.liveFields->IsEmpty())
  14739. {
  14740. Output::Print(_u(" Live field syms: "));
  14741. block->globOptData.liveFields->Dump();
  14742. }
  14743. if (objTypeSpecTrace || valueTableTrace)
  14744. {
  14745. Output::Print(_u(" Value table:\n"));
  14746. block->globOptData.DumpSymToValueMap();
  14747. }
  14748. if (before)
  14749. {
  14750. Output::Print(_u("-----------------------------------------------------------------------\n")); \
  14751. }
  14752. Output::Flush();
  14753. }
  14754. void
  14755. GlobOpt::TraceSettings() const
  14756. {
  14757. Output::Print(_u("GlobOpt Settings:\r\n"));
  14758. Output::Print(_u(" FloatTypeSpec: %s\r\n"), this->DoFloatTypeSpec() ? _u("enabled") : _u("disabled"));
  14759. Output::Print(_u(" AggressiveIntTypeSpec: %s\r\n"), this->DoAggressiveIntTypeSpec() ? _u("enabled") : _u("disabled"));
  14760. Output::Print(_u(" LossyIntTypeSpec: %s\r\n"), this->DoLossyIntTypeSpec() ? _u("enabled") : _u("disabled"));
  14761. Output::Print(_u(" ArrayCheckHoist: %s\r\n"), this->func->IsArrayCheckHoistDisabled() ? _u("disabled") : _u("enabled"));
  14762. Output::Print(_u(" ImplicitCallFlags: %s\r\n"), Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->func->m_fg->implicitCallFlags));
  14763. for (Loop * loop = this->func->m_fg->loopList; loop != NULL; loop = loop->next)
  14764. {
  14765. Output::Print(_u(" loop: %d, ImplicitCallFlags: %s\r\n"), loop->GetLoopNumber(),
  14766. Js::DynamicProfileInfo::GetImplicitCallFlagsString(loop->GetImplicitCallFlags()));
  14767. }
  14768. Output::Flush();
  14769. }
  14770. #endif // DBG_DUMP
  14771. IR::Instr *
  14772. GlobOpt::TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrLast)
  14773. {
  14774. if (!this->func->GetHasMarkTempObjects())
  14775. {
  14776. return instrLast;
  14777. }
  14778. IR::Instr * instr = instrStart;
  14779. IR::Instr * instrEnd = instrLast->m_next;
  14780. IR::Instr * lastInstr = nullptr;
  14781. GlobOptBlockData& globOptData = *CurrentBlockData();
  14782. do
  14783. {
  14784. bool mayNeedBailOnImplicitCallsPreOp = !this->IsLoopPrePass()
  14785. && instr->HasAnyImplicitCalls()
  14786. && globOptData.maybeTempObjectSyms != nullptr;
  14787. if (mayNeedBailOnImplicitCallsPreOp)
  14788. {
  14789. IR::Opnd * src1 = instr->GetSrc1();
  14790. if (src1)
  14791. {
  14792. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src1, false);
  14793. IR::Opnd * src2 = instr->GetSrc2();
  14794. if (src2)
  14795. {
  14796. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src2, false);
  14797. }
  14798. }
  14799. }
  14800. IR::Opnd *dst = instr->GetDst();
  14801. if (dst)
  14802. {
  14803. if (dst->IsRegOpnd())
  14804. {
  14805. TrackTempObjectSyms(instr, dst->AsRegOpnd());
  14806. }
  14807. else if (mayNeedBailOnImplicitCallsPreOp)
  14808. {
  14809. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, dst, true);
  14810. }
  14811. }
  14812. lastInstr = instr;
  14813. instr = instr->m_next;
  14814. }
  14815. while (instr != instrEnd);
  14816. return lastInstr;
  14817. }
  14818. void
  14819. GlobOpt::TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd)
  14820. {
  14821. // If it is marked as dstIsTempObject, we should have mark temped it, or type specialized it to Ld_I4.
  14822. Assert(!instr->dstIsTempObject || ObjectTempVerify::CanMarkTemp(instr, nullptr));
  14823. GlobOptBlockData& globOptData = *CurrentBlockData();
  14824. bool canStoreTemp = false;
  14825. bool maybeTemp = false;
  14826. if (OpCodeAttr::TempObjectProducing(instr->m_opcode))
  14827. {
  14828. maybeTemp = instr->dstIsTempObject;
  14829. // We have to make sure that lower will always generate code to do stack allocation
  14830. // before we can store any other stack instance onto it. Otherwise, we would not
  14831. // walk object to box the stack property.
  14832. canStoreTemp = instr->dstIsTempObject && ObjectTemp::CanStoreTemp(instr);
  14833. }
  14834. else if (OpCodeAttr::TempObjectTransfer(instr->m_opcode))
  14835. {
  14836. // Need to check both sources, GetNewScObject has two srcs for transfer.
  14837. // No need to get var equiv sym here as transfer of type spec value does not transfer a mark temp object.
  14838. maybeTemp = globOptData.maybeTempObjectSyms && (
  14839. (instr->GetSrc1()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  14840. || (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id)));
  14841. canStoreTemp = globOptData.canStoreTempObjectSyms && (
  14842. (instr->GetSrc1()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  14843. && (!instr->GetSrc2() || (instr->GetSrc2()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))));
  14844. AssertOrFailFast(!canStoreTemp || instr->dstIsTempObject);
  14845. AssertOrFailFast(!maybeTemp || instr->dstIsTempObject);
  14846. }
  14847. // Need to get the var equiv sym as assignment of type specialized sym kill the var sym value anyway.
  14848. StackSym * sym = opnd->m_sym;
  14849. if (!sym->IsVar())
  14850. {
  14851. sym = sym->GetVarEquivSym(nullptr);
  14852. if (sym == nullptr)
  14853. {
  14854. return;
  14855. }
  14856. }
  14857. SymID symId = sym->m_id;
  14858. if (maybeTemp)
  14859. {
  14860. // Only var sym should be temp objects
  14861. Assert(opnd->m_sym == sym);
  14862. if (globOptData.maybeTempObjectSyms == nullptr)
  14863. {
  14864. globOptData.maybeTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  14865. }
  14866. globOptData.maybeTempObjectSyms->Set(symId);
  14867. if (canStoreTemp)
  14868. {
  14869. if (instr->m_opcode == Js::OpCode::NewScObjectLiteral && !this->IsLoopPrePass())
  14870. {
  14871. // For object literal, we install the final type up front.
  14872. // If there are bailout before we finish initializing all the fields, we need to
  14873. // zero out the rest if we stack allocate the literal, so that the boxing would not
  14874. // try to box trash pointer in the properties.
  14875. // Although object Literal initialization can be done lexically, BailOnNoProfile may cause some path
  14876. // to disappear. Do it is flow base make it easier to stop propagate those entries.
  14877. IR::IntConstOpnd * propertyArrayIdOpnd = instr->GetSrc1()->AsIntConstOpnd();
  14878. const Js::PropertyIdArray * propIds = instr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  14879. // Duplicates are removed by parser
  14880. Assert(!propIds->hadDuplicates);
  14881. if (globOptData.stackLiteralInitFldDataMap == nullptr)
  14882. {
  14883. globOptData.stackLiteralInitFldDataMap = JitAnew(alloc, StackLiteralInitFldDataMap, alloc);
  14884. }
  14885. else
  14886. {
  14887. Assert(!globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  14888. }
  14889. StackLiteralInitFldData data = { propIds, 0};
  14890. globOptData.stackLiteralInitFldDataMap->AddNew(sym, data);
  14891. }
  14892. if (globOptData.canStoreTempObjectSyms == nullptr)
  14893. {
  14894. globOptData.canStoreTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  14895. }
  14896. globOptData.canStoreTempObjectSyms->Set(symId);
  14897. }
  14898. else if (globOptData.canStoreTempObjectSyms)
  14899. {
  14900. globOptData.canStoreTempObjectSyms->Clear(symId);
  14901. }
  14902. }
  14903. else
  14904. {
  14905. Assert(!canStoreTemp);
  14906. if (globOptData.maybeTempObjectSyms)
  14907. {
  14908. if (globOptData.canStoreTempObjectSyms)
  14909. {
  14910. globOptData.canStoreTempObjectSyms->Clear(symId);
  14911. }
  14912. globOptData.maybeTempObjectSyms->Clear(symId);
  14913. }
  14914. else
  14915. {
  14916. Assert(!globOptData.canStoreTempObjectSyms);
  14917. }
  14918. // The symbol is being assigned to, the sym shouldn't still be in the stackLiteralInitFldDataMap
  14919. Assert(this->IsLoopPrePass() ||
  14920. globOptData.stackLiteralInitFldDataMap == nullptr
  14921. || globOptData.stackLiteralInitFldDataMap->Count() == 0
  14922. || !globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  14923. }
  14924. }
  14925. IR::Instr *
  14926. GlobOpt::GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst)
  14927. {
  14928. Assert(opnd);
  14929. Assert(isDst == (opnd == instr->GetDst()));
  14930. Assert(opnd != instr->GetDst() || !opnd->IsRegOpnd());
  14931. Assert(!this->IsLoopPrePass());
  14932. Assert(instr->HasAnyImplicitCalls());
  14933. // Only dst reg opnd opcode or ArgOut_A should have dstIsTempObject marked
  14934. Assert(!isDst || !instr->dstIsTempObject || instr->m_opcode == Js::OpCode::ArgOut_A);
  14935. // Post-op implicit call shouldn't have installed yet
  14936. Assert(!instr->HasBailOutInfo() || (instr->GetBailOutKind() & IR::BailOutKindBits) != IR::BailOutOnImplicitCalls);
  14937. GlobOptBlockData& globOptData = *CurrentBlockData();
  14938. Assert(globOptData.maybeTempObjectSyms != nullptr);
  14939. IR::PropertySymOpnd * propertySymOpnd = nullptr;
  14940. StackSym * stackSym = ObjectTemp::GetStackSym(opnd, &propertySymOpnd);
  14941. // It is okay to not get the var equiv sym here, as use of a type specialized sym is not use of the temp object
  14942. // so no need to add mark temp bailout.
  14943. // TempObjectSysm doesn't contain any type spec sym, so we will get false here for all type spec sym.
  14944. if (stackSym && globOptData.maybeTempObjectSyms->Test(stackSym->m_id))
  14945. {
  14946. if (instr->HasBailOutInfo())
  14947. {
  14948. instr->SetBailOutKind(instr->GetBailOutKind() | IR::BailOutMarkTempObject);
  14949. }
  14950. else
  14951. {
  14952. // On insert the pre op bailout if it is not Direct field access do nothing, don't check the dst yet.
  14953. // SetTypeCheckBailout will clear this out if it is direct field access.
  14954. if (isDst
  14955. || (instr->m_opcode == Js::OpCode::FromVar && !opnd->GetValueType().IsPrimitive())
  14956. || propertySymOpnd == nullptr
  14957. || !propertySymOpnd->IsTypeCheckProtected())
  14958. {
  14959. this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
  14960. }
  14961. }
  14962. if (!opnd->IsRegOpnd() && (!isDst || (globOptData.canStoreTempObjectSyms && globOptData.canStoreTempObjectSyms->Test(stackSym->m_id))))
  14963. {
  14964. // If this opnd is a dst, that means that the object pointer is a stack object,
  14965. // and we can store temp object/number on it.
  14966. // If the opnd is a src, that means that the object pointer may be a stack object
  14967. // so the load may be a temp object/number and we need to track its use.
  14968. // Don't mark start of indir as can store temp, because we don't actually know
  14969. // what it is assigning to.
  14970. if (!isDst || !opnd->IsIndirOpnd())
  14971. {
  14972. opnd->SetCanStoreTemp();
  14973. }
  14974. if (propertySymOpnd)
  14975. {
  14976. // Track initfld of stack literals
  14977. if (isDst && instr->m_opcode == Js::OpCode::InitFld)
  14978. {
  14979. const Js::PropertyId propertyId = propertySymOpnd->m_sym->AsPropertySym()->m_propertyId;
  14980. // We don't need to track numeric properties init
  14981. if (!this->func->GetThreadContextInfo()->IsNumericProperty(propertyId))
  14982. {
  14983. DebugOnly(bool found = false);
  14984. globOptData.stackLiteralInitFldDataMap->RemoveIf(stackSym,
  14985. [&](StackSym * key, StackLiteralInitFldData & data)
  14986. {
  14987. DebugOnly(found = true);
  14988. Assert(key == stackSym);
  14989. Assert(data.currentInitFldCount < data.propIds->count);
  14990. if (data.propIds->elements[data.currentInitFldCount] != propertyId)
  14991. {
  14992. #if DBG
  14993. bool duplicate = false;
  14994. for (uint i = 0; i < data.currentInitFldCount; i++)
  14995. {
  14996. if (data.propIds->elements[i] == propertyId)
  14997. {
  14998. duplicate = true;
  14999. break;
  15000. }
  15001. }
  15002. Assert(duplicate);
  15003. #endif
  15004. // duplicate initialization
  15005. return false;
  15006. }
  15007. bool finished = (++data.currentInitFldCount == data.propIds->count);
  15008. #if DBG
  15009. if (finished)
  15010. {
  15011. // We can still track the finished stack literal InitFld lexically.
  15012. this->finishedStackLiteralInitFld->Set(stackSym->m_id);
  15013. }
  15014. #endif
  15015. return finished;
  15016. });
  15017. // We might still see InitFld even we have finished with all the property Id because
  15018. // of duplicate entries at the end
  15019. Assert(found || finishedStackLiteralInitFld->Test(stackSym->m_id));
  15020. }
  15021. }
  15022. }
  15023. }
  15024. }
  15025. return instr;
  15026. }
  15027. LoopCount *
  15028. GlobOpt::GetOrGenerateLoopCountForMemOp(Loop *loop)
  15029. {
  15030. LoopCount *loopCount = loop->loopCount;
  15031. if (loopCount && !loopCount->HasGeneratedLoopCountSym())
  15032. {
  15033. Assert(loop->bailOutInfo);
  15034. EnsureBailTarget(loop);
  15035. GenerateLoopCountPlusOne(loop, loopCount);
  15036. }
  15037. return loopCount;
  15038. }
  15039. IR::Opnd *
  15040. GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr)
  15041. {
  15042. LoopCount *loopCount = loop->loopCount;
  15043. IR::Opnd *sizeOpnd = nullptr;
  15044. Assert(loopCount);
  15045. Assert(loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  15046. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap->TryGetValue(unroll, &sizeOpnd))
  15047. {
  15048. return sizeOpnd;
  15049. }
  15050. Func *localFunc = loop->GetFunc();
  15051. const auto InsertInstr = [&](IR::Instr *instr)
  15052. {
  15053. if (insertBeforeInstr == nullptr)
  15054. {
  15055. loop->landingPad->InsertAfter(instr);
  15056. }
  15057. else
  15058. {
  15059. insertBeforeInstr->InsertBefore(instr);
  15060. }
  15061. };
  15062. if (loopCount->LoopCountMinusOneSym())
  15063. {
  15064. IRType type = loopCount->LoopCountSym()->GetType();
  15065. // Loop count is off by one, so add one
  15066. IR::RegOpnd *loopCountOpnd = IR::RegOpnd::New(loopCount->LoopCountSym(), type, localFunc);
  15067. sizeOpnd = loopCountOpnd;
  15068. if (unroll != 1)
  15069. {
  15070. sizeOpnd = IR::RegOpnd::New(TyUint32, this->func);
  15071. IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
  15072. InsertInstr(IR::Instr::New(Js::OpCode::Mul_I4,
  15073. sizeOpnd,
  15074. loopCountOpnd,
  15075. unrollOpnd,
  15076. localFunc));
  15077. }
  15078. }
  15079. else
  15080. {
  15081. uint size = (loopCount->LoopCountMinusOneConstantValue() + 1) * unroll;
  15082. sizeOpnd = IR::IntConstOpnd::New(size, IRType::TyUint32, localFunc);
  15083. }
  15084. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Add(unroll, sizeOpnd);
  15085. return sizeOpnd;
  15086. }
  15087. IR::RegOpnd*
  15088. GlobOpt::GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr)
  15089. {
  15090. IR::RegOpnd *startIndexOpnd = nullptr;
  15091. Func *localFunc = loop->GetFunc();
  15092. IRType type = indexOpnd->GetType();
  15093. const int cacheIndex = ((int)isInductionVariableChangeIncremental << 1) | (int)bIndexAlreadyChanged;
  15094. if (loop->memOpInfo->startIndexOpndCache[cacheIndex])
  15095. {
  15096. return loop->memOpInfo->startIndexOpndCache[cacheIndex];
  15097. }
  15098. const auto InsertInstr = [&](IR::Instr *instr)
  15099. {
  15100. if (insertBeforeInstr == nullptr)
  15101. {
  15102. loop->landingPad->InsertAfter(instr);
  15103. }
  15104. else
  15105. {
  15106. insertBeforeInstr->InsertBefore(instr);
  15107. }
  15108. };
  15109. startIndexOpnd = IR::RegOpnd::New(type, localFunc);
  15110. // If the 2 are different we can simply use indexOpnd
  15111. if (isInductionVariableChangeIncremental != bIndexAlreadyChanged)
  15112. {
  15113. InsertInstr(IR::Instr::New(Js::OpCode::Ld_A,
  15114. startIndexOpnd,
  15115. indexOpnd,
  15116. localFunc));
  15117. }
  15118. else
  15119. {
  15120. // Otherwise add 1 to it
  15121. InsertInstr(IR::Instr::New(Js::OpCode::Add_I4,
  15122. startIndexOpnd,
  15123. indexOpnd,
  15124. IR::IntConstOpnd::New(1, type, localFunc, true),
  15125. localFunc));
  15126. }
  15127. if (!isInductionVariableChangeIncremental)
  15128. {
  15129. InsertInstr(IR::Instr::New(Js::OpCode::Sub_I4,
  15130. startIndexOpnd,
  15131. startIndexOpnd,
  15132. sizeOpnd,
  15133. localFunc));
  15134. }
  15135. loop->memOpInfo->startIndexOpndCache[cacheIndex] = startIndexOpnd;
  15136. return startIndexOpnd;
  15137. }
  15138. IR::Instr*
  15139. GlobOpt::FindUpperBoundsCheckInstr(IR::Instr* fromInstr)
  15140. {
  15141. IR::Instr *upperBoundCheck = fromInstr;
  15142. do
  15143. {
  15144. upperBoundCheck = upperBoundCheck->m_prev;
  15145. Assert(upperBoundCheck);
  15146. Assert(!upperBoundCheck->IsLabelInstr());
  15147. } while (upperBoundCheck->m_opcode != Js::OpCode::BoundCheck);
  15148. return upperBoundCheck;
  15149. }
  15150. IR::Instr*
  15151. GlobOpt::FindArraySegmentLoadInstr(IR::Instr* fromInstr)
  15152. {
  15153. IR::Instr *headSegmentLengthLoad = fromInstr;
  15154. do
  15155. {
  15156. headSegmentLengthLoad = headSegmentLengthLoad->m_prev;
  15157. Assert(headSegmentLengthLoad);
  15158. Assert(!headSegmentLengthLoad->IsLabelInstr());
  15159. } while (headSegmentLengthLoad->m_opcode != Js::OpCode::LdIndir);
  15160. return headSegmentLengthLoad;
  15161. }
  15162. void
  15163. GlobOpt::RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block)
  15164. {
  15165. Assert(srcInstr && (srcInstr->m_opcode == Js::OpCode::LdElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict));
  15166. Assert(memopInstr && (memopInstr->m_opcode == Js::OpCode::Memcopy || memopInstr->m_opcode == Js::OpCode::Memset));
  15167. Assert(block);
  15168. const bool isDst = srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict;
  15169. IR::RegOpnd* opnd = (isDst ? memopInstr->GetDst() : memopInstr->GetSrc1())->AsIndirOpnd()->GetBaseOpnd();
  15170. IR::ArrayRegOpnd* arrayOpnd = opnd->IsArrayRegOpnd() ? opnd->AsArrayRegOpnd() : nullptr;
  15171. IR::Instr* topInstr = srcInstr;
  15172. if (srcInstr->extractedUpperBoundCheckWithoutHoisting)
  15173. {
  15174. IR::Instr *upperBoundCheck = FindUpperBoundsCheckInstr(srcInstr);
  15175. Assert(upperBoundCheck && upperBoundCheck != srcInstr);
  15176. topInstr = upperBoundCheck;
  15177. }
  15178. if (srcInstr->loadedArrayHeadSegmentLength && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  15179. {
  15180. IR::Instr *arrayLoadSegmentHeadLength = FindArraySegmentLoadInstr(topInstr);
  15181. Assert(arrayLoadSegmentHeadLength);
  15182. topInstr = arrayLoadSegmentHeadLength;
  15183. arrayOpnd->RemoveHeadSegmentLengthSym();
  15184. }
  15185. if (srcInstr->loadedArrayHeadSegment && arrayOpnd && arrayOpnd->HeadSegmentSym())
  15186. {
  15187. IR::Instr *arrayLoadSegmentHead = FindArraySegmentLoadInstr(topInstr);
  15188. Assert(arrayLoadSegmentHead);
  15189. topInstr = arrayLoadSegmentHead;
  15190. arrayOpnd->RemoveHeadSegmentSym();
  15191. }
  15192. // If no bounds check are present, simply look up for instruction added for instrumentation
  15193. if(topInstr == srcInstr)
  15194. {
  15195. bool checkPrev = true;
  15196. while (checkPrev)
  15197. {
  15198. switch (topInstr->m_prev->m_opcode)
  15199. {
  15200. case Js::OpCode::BailOnNotArray:
  15201. case Js::OpCode::NoImplicitCallUses:
  15202. case Js::OpCode::ByteCodeUses:
  15203. topInstr = topInstr->m_prev;
  15204. checkPrev = !!topInstr->m_prev;
  15205. break;
  15206. default:
  15207. checkPrev = false;
  15208. break;
  15209. }
  15210. }
  15211. }
  15212. while (topInstr != srcInstr)
  15213. {
  15214. IR::Instr* removeInstr = topInstr;
  15215. topInstr = topInstr->m_next;
  15216. Assert(
  15217. removeInstr->m_opcode == Js::OpCode::BailOnNotArray ||
  15218. removeInstr->m_opcode == Js::OpCode::NoImplicitCallUses ||
  15219. removeInstr->m_opcode == Js::OpCode::ByteCodeUses ||
  15220. removeInstr->m_opcode == Js::OpCode::LdIndir ||
  15221. removeInstr->m_opcode == Js::OpCode::BoundCheck
  15222. );
  15223. if (removeInstr->m_opcode != Js::OpCode::ByteCodeUses)
  15224. {
  15225. block->RemoveInstr(removeInstr);
  15226. }
  15227. }
  15228. this->ConvertToByteCodeUses(srcInstr);
  15229. }
  15230. void
  15231. GlobOpt::GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType)
  15232. {
  15233. Assert(instr && (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict));
  15234. IR::Opnd* arrayOpnd = instr->m_opcode == Js::OpCode::LdElemI_A ? instr->GetSrc1() : instr->GetDst();
  15235. Assert(arrayOpnd->IsIndirOpnd());
  15236. IR::IndirOpnd* indirArrayOpnd = arrayOpnd->AsIndirOpnd();
  15237. IR::RegOpnd* baseOpnd = (IR::RegOpnd*)indirArrayOpnd->GetBaseOpnd();
  15238. IR::RegOpnd* indexOpnd = (IR::RegOpnd*)indirArrayOpnd->GetIndexOpnd();
  15239. Assert(baseOpnd);
  15240. Assert(indexOpnd);
  15241. // Process Out Params
  15242. base = baseOpnd;
  15243. index = indexOpnd;
  15244. arrayType = indirArrayOpnd->GetType();
  15245. }
  15246. void
  15247. GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitData)
  15248. {
  15249. Assert(emitData);
  15250. Assert(emitData->candidate);
  15251. Assert(emitData->stElemInstr);
  15252. Assert(emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A || emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A_Strict);
  15253. IR::BailOutKind bailOutKind = emitData->bailOutKind;
  15254. const byte unroll = emitData->inductionVar.unroll;
  15255. Assert(unroll == 1);
  15256. const bool isInductionVariableChangeIncremental = emitData->inductionVar.isIncremental;
  15257. const bool bIndexAlreadyChanged = emitData->candidate->bIndexAlreadyChanged;
  15258. IR::RegOpnd *baseOpnd = nullptr;
  15259. IR::RegOpnd *indexOpnd = nullptr;
  15260. IRType dstType;
  15261. GetMemOpSrcInfo(loop, emitData->stElemInstr, baseOpnd, indexOpnd, dstType);
  15262. Func *localFunc = loop->GetFunc();
  15263. // Handle bailout info
  15264. EnsureBailTarget(loop);
  15265. Assert(bailOutKind != IR::BailOutInvalid);
  15266. // Keep only Array bits bailOuts. Consider handling these bailouts instead of simply ignoring them
  15267. bailOutKind &= IR::BailOutForArrayBits;
  15268. // Add our custom bailout to handle Op_MemCopy return value.
  15269. bailOutKind |= IR::BailOutOnMemOpError;
  15270. BailOutInfo *const bailOutInfo = loop->bailOutInfo;
  15271. Assert(bailOutInfo);
  15272. IR::Instr *insertBeforeInstr = bailOutInfo->bailOutInstr;
  15273. Assert(insertBeforeInstr);
  15274. IR::Opnd *sizeOpnd = GenerateInductionVariableChangeForMemOp(loop, unroll, insertBeforeInstr);
  15275. IR::RegOpnd *startIndexOpnd = GenerateStartIndexOpndForMemop(loop, indexOpnd, sizeOpnd, isInductionVariableChangeIncremental, bIndexAlreadyChanged, insertBeforeInstr);
  15276. IR::IndirOpnd* dstOpnd = IR::IndirOpnd::New(baseOpnd, startIndexOpnd, dstType, localFunc);
  15277. IR::Opnd *src1;
  15278. const bool isMemset = emitData->candidate->IsMemSet();
  15279. // Get the source according to the memop type
  15280. if (isMemset)
  15281. {
  15282. MemSetEmitData* data = (MemSetEmitData*)emitData;
  15283. const Loop::MemSetCandidate* candidate = data->candidate->AsMemSet();
  15284. if (candidate->srcSym)
  15285. {
  15286. IR::RegOpnd* regSrc = IR::RegOpnd::New(candidate->srcSym, candidate->srcSym->GetType(), func);
  15287. regSrc->SetIsJITOptimizedReg(true);
  15288. src1 = regSrc;
  15289. }
  15290. else
  15291. {
  15292. src1 = IR::AddrOpnd::New(candidate->constant.ToVar(localFunc), IR::AddrOpndKindConstantAddress, localFunc);
  15293. }
  15294. }
  15295. else
  15296. {
  15297. Assert(emitData->candidate->IsMemCopy());
  15298. MemCopyEmitData* data = (MemCopyEmitData*)emitData;
  15299. Assert(data->ldElemInstr);
  15300. Assert(data->ldElemInstr->m_opcode == Js::OpCode::LdElemI_A);
  15301. IR::RegOpnd *srcBaseOpnd = nullptr;
  15302. IR::RegOpnd *srcIndexOpnd = nullptr;
  15303. IRType srcType;
  15304. GetMemOpSrcInfo(loop, data->ldElemInstr, srcBaseOpnd, srcIndexOpnd, srcType);
  15305. Assert(GetVarSymID(srcIndexOpnd->GetStackSym()) == GetVarSymID(indexOpnd->GetStackSym()));
  15306. src1 = IR::IndirOpnd::New(srcBaseOpnd, startIndexOpnd, srcType, localFunc);
  15307. }
  15308. // Generate memcopy
  15309. IR::Instr* memopInstr = IR::BailOutInstr::New(isMemset ? Js::OpCode::Memset : Js::OpCode::Memcopy, bailOutKind, bailOutInfo, localFunc);
  15310. memopInstr->SetDst(dstOpnd);
  15311. memopInstr->SetSrc1(src1);
  15312. memopInstr->SetSrc2(sizeOpnd);
  15313. insertBeforeInstr->InsertBefore(memopInstr);
  15314. #if DBG_DUMP
  15315. if (DO_MEMOP_TRACE())
  15316. {
  15317. char valueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15318. baseOpnd->GetValueType().ToString(valueTypeStr);
  15319. const int loopCountBufSize = 16;
  15320. char16 loopCountBuf[loopCountBufSize];
  15321. if (loopCount->LoopCountMinusOneSym())
  15322. {
  15323. swprintf_s(loopCountBuf, _u("s%u"), loopCount->LoopCountMinusOneSym()->m_id);
  15324. }
  15325. else
  15326. {
  15327. swprintf_s(loopCountBuf, _u("%u"), loopCount->LoopCountMinusOneConstantValue() + 1);
  15328. }
  15329. if (isMemset)
  15330. {
  15331. const Loop::MemSetCandidate* candidate = emitData->candidate->AsMemSet();
  15332. const int constBufSize = 32;
  15333. char16 constBuf[constBufSize];
  15334. if (candidate->srcSym)
  15335. {
  15336. swprintf_s(constBuf, _u("s%u"), candidate->srcSym->m_id);
  15337. }
  15338. else
  15339. {
  15340. switch (candidate->constant.type)
  15341. {
  15342. case TyInt8:
  15343. case TyInt16:
  15344. case TyInt32:
  15345. case TyInt64:
  15346. swprintf_s(constBuf, sizeof(IntConstType) == 8 ? _u("%lld") : _u("%d"), candidate->constant.u.intConst.value);
  15347. break;
  15348. case TyFloat32:
  15349. case TyFloat64:
  15350. swprintf_s(constBuf, _u("%.4f"), candidate->constant.u.floatConst.value);
  15351. break;
  15352. case TyVar:
  15353. swprintf_s(constBuf, sizeof(Js::Var) == 8 ? _u("0x%.16llX") : _u("0x%.8X"), candidate->constant.u.varConst.value);
  15354. break;
  15355. default:
  15356. AssertMsg(false, "Unsupported constant type");
  15357. swprintf_s(constBuf, _u("Unknown"));
  15358. break;
  15359. }
  15360. }
  15361. TRACE_MEMOP_PHASE(MemSet, loop, emitData->stElemInstr,
  15362. _u("ValueType: %S, Base: s%u, Index: s%u, Constant: %s, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  15363. valueTypeStr,
  15364. candidate->base,
  15365. candidate->index,
  15366. constBuf,
  15367. loopCountBuf,
  15368. bIndexAlreadyChanged);
  15369. }
  15370. else
  15371. {
  15372. const Loop::MemCopyCandidate* candidate = emitData->candidate->AsMemCopy();
  15373. TRACE_MEMOP_PHASE(MemCopy, loop, emitData->stElemInstr,
  15374. _u("ValueType: %S, StBase: s%u, Index: s%u, LdBase: s%u, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  15375. valueTypeStr,
  15376. candidate->base,
  15377. candidate->index,
  15378. candidate->ldBase,
  15379. loopCountBuf,
  15380. bIndexAlreadyChanged);
  15381. }
  15382. }
  15383. #endif
  15384. RemoveMemOpSrcInstr(memopInstr, emitData->stElemInstr, emitData->block);
  15385. if (!isMemset)
  15386. {
  15387. RemoveMemOpSrcInstr(memopInstr, ((MemCopyEmitData*)emitData)->ldElemInstr, emitData->block);
  15388. }
  15389. }
  15390. bool
  15391. GlobOpt::InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, MemSetEmitData* emitData, bool& errorInInstr)
  15392. {
  15393. Assert(emitData && emitData->candidate && emitData->candidate->IsMemSet());
  15394. Loop::MemSetCandidate* candidate = (Loop::MemSetCandidate*)emitData->candidate;
  15395. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  15396. {
  15397. if (instr->GetDst()->IsIndirOpnd()
  15398. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base)
  15399. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  15400. )
  15401. {
  15402. Assert(instr->IsProfiledInstr());
  15403. emitData->stElemInstr = instr;
  15404. emitData->bailOutKind = instr->GetBailOutKind();
  15405. return true;
  15406. }
  15407. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan StElemI_A detected"));
  15408. errorInInstr = true;
  15409. }
  15410. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  15411. {
  15412. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan LdElemI_A detected"));
  15413. errorInInstr = true;
  15414. }
  15415. return false;
  15416. }
  15417. bool
  15418. GlobOpt::InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, MemCopyEmitData* emitData, bool& errorInInstr)
  15419. {
  15420. Assert(emitData && emitData->candidate && emitData->candidate->IsMemCopy());
  15421. Loop::MemCopyCandidate* candidate = (Loop::MemCopyCandidate*)emitData->candidate;
  15422. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  15423. {
  15424. if (
  15425. instr->GetDst()->IsIndirOpnd() &&
  15426. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base) &&
  15427. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  15428. )
  15429. {
  15430. Assert(instr->IsProfiledInstr());
  15431. emitData->stElemInstr = instr;
  15432. emitData->bailOutKind = instr->GetBailOutKind();
  15433. // Still need to find the LdElem
  15434. return false;
  15435. }
  15436. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan StElemI_A detected"));
  15437. errorInInstr = true;
  15438. }
  15439. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  15440. {
  15441. if (
  15442. emitData->stElemInstr &&
  15443. instr->GetSrc1()->IsIndirOpnd() &&
  15444. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->ldBase) &&
  15445. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  15446. )
  15447. {
  15448. Assert(instr->IsProfiledInstr());
  15449. emitData->ldElemInstr = instr;
  15450. ValueType stValueType = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  15451. ValueType ldValueType = emitData->ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  15452. if (stValueType != ldValueType)
  15453. {
  15454. #if DBG_DUMP
  15455. char16 stValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15456. stValueType.ToString(stValueTypeStr);
  15457. char16 ldValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  15458. ldValueType.ToString(ldValueTypeStr);
  15459. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("for mismatch in Load(%s) and Store(%s) value type"), ldValueTypeStr, stValueTypeStr);
  15460. #endif
  15461. errorInInstr = true;
  15462. return false;
  15463. }
  15464. // We found both instruction for this candidate
  15465. return true;
  15466. }
  15467. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan LdElemI_A detected"));
  15468. errorInInstr = true;
  15469. }
  15470. return false;
  15471. }
  15472. // The caller is responsible to free the memory allocated between inOrderEmitData[iEmitData -> end]
  15473. bool
  15474. GlobOpt::ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) MemOpEmitData** inOrderEmitData, int& iEmitData)
  15475. {
  15476. AnalysisAssert(iEmitData == (int)loop->memOpInfo->candidates->Count());
  15477. // We iterate over the second block of the loop only. MemOp Works only if the loop has exactly 2 blocks
  15478. Assert(loop->blockList.HasTwo());
  15479. Loop::MemOpList::Iterator iter(loop->memOpInfo->candidates);
  15480. BasicBlock* bblock = loop->blockList.Head()->next;
  15481. Loop::MemOpCandidate* candidate = nullptr;
  15482. MemOpEmitData* emitData = nullptr;
  15483. // Iterate backward because the list of candidate is reversed
  15484. FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, bblock)
  15485. {
  15486. if (!candidate)
  15487. {
  15488. // Time to check next candidate
  15489. if (!iter.Next())
  15490. {
  15491. // We have been through the whole list of candidates, finish
  15492. break;
  15493. }
  15494. candidate = iter.Data();
  15495. if (!candidate)
  15496. {
  15497. continue;
  15498. }
  15499. // Common check for memset and memcopy
  15500. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  15501. // Get the inductionVariable changeInfo
  15502. if (!loop->memOpInfo->inductionVariableChangeInfoMap->TryGetValue(candidate->index, &inductionVariableChangeInfo))
  15503. {
  15504. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): no induction variable"), candidate->base);
  15505. return false;
  15506. }
  15507. if (inductionVariableChangeInfo.unroll != candidate->count)
  15508. {
  15509. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): not matching unroll count"), candidate->base);
  15510. return false;
  15511. }
  15512. if (candidate->IsMemSet())
  15513. {
  15514. Assert(!PHASE_OFF(Js::MemSetPhase, this->func));
  15515. emitData = JitAnew(this->alloc, MemSetEmitData);
  15516. }
  15517. else
  15518. {
  15519. Assert(!PHASE_OFF(Js::MemCopyPhase, this->func));
  15520. // Specific check for memcopy
  15521. Assert(candidate->IsMemCopy());
  15522. Loop::MemCopyCandidate* memcopyCandidate = candidate->AsMemCopy();
  15523. if (memcopyCandidate->base == Js::Constants::InvalidSymID
  15524. || memcopyCandidate->ldBase == Js::Constants::InvalidSymID
  15525. || (memcopyCandidate->ldCount != memcopyCandidate->count))
  15526. {
  15527. TRACE_MEMOP_PHASE(MemCopy, loop, nullptr, _u("(s%d): not matching ldElem and stElem"), candidate->base);
  15528. return false;
  15529. }
  15530. emitData = JitAnew(this->alloc, MemCopyEmitData);
  15531. }
  15532. Assert(emitData);
  15533. emitData->block = bblock;
  15534. emitData->inductionVar = inductionVariableChangeInfo;
  15535. emitData->candidate = candidate;
  15536. }
  15537. bool errorInInstr = false;
  15538. bool candidateFound = candidate->IsMemSet() ?
  15539. InspectInstrForMemSetCandidate(loop, instr, (MemSetEmitData*)emitData, errorInInstr)
  15540. : InspectInstrForMemCopyCandidate(loop, instr, (MemCopyEmitData*)emitData, errorInInstr);
  15541. if (errorInInstr)
  15542. {
  15543. JitAdelete(this->alloc, emitData);
  15544. return false;
  15545. }
  15546. if (candidateFound)
  15547. {
  15548. AnalysisAssert(iEmitData > 0);
  15549. if (iEmitData == 0)
  15550. {
  15551. // Explicit for OACR
  15552. break;
  15553. }
  15554. inOrderEmitData[--iEmitData] = emitData;
  15555. candidate = nullptr;
  15556. emitData = nullptr;
  15557. }
  15558. } NEXT_INSTR_BACKWARD_IN_BLOCK;
  15559. if (iter.IsValid())
  15560. {
  15561. TRACE_MEMOP(loop, nullptr, _u("Candidates not found in loop while validating"));
  15562. return false;
  15563. }
  15564. return true;
  15565. }
  15566. void
  15567. GlobOpt::ProcessMemOp()
  15568. {
  15569. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  15570. {
  15571. if (HasMemOp(loop))
  15572. {
  15573. const int candidateCount = loop->memOpInfo->candidates->Count();
  15574. Assert(candidateCount > 0);
  15575. LoopCount * loopCount = GetOrGenerateLoopCountForMemOp(loop);
  15576. // If loopCount is not available we can not continue with memop
  15577. if (!loopCount || !(loopCount->LoopCountMinusOneSym() || loopCount->LoopCountMinusOneConstantValue()))
  15578. {
  15579. TRACE_MEMOP(loop, nullptr, _u("MemOp skipped for no loop count"));
  15580. loop->doMemOp = false;
  15581. loop->memOpInfo->candidates->Clear();
  15582. continue;
  15583. }
  15584. // The list is reversed, check them and place them in order in the following array
  15585. MemOpEmitData** inOrderCandidates = JitAnewArray(this->alloc, MemOpEmitData*, candidateCount);
  15586. int i = candidateCount;
  15587. if (ValidateMemOpCandidates(loop, inOrderCandidates, i))
  15588. {
  15589. Assert(i == 0);
  15590. // Process the valid MemOp candidate in order.
  15591. for (; i < candidateCount; ++i)
  15592. {
  15593. // Emit
  15594. EmitMemop(loop, loopCount, inOrderCandidates[i]);
  15595. JitAdelete(this->alloc, inOrderCandidates[i]);
  15596. }
  15597. }
  15598. else
  15599. {
  15600. Assert(i != 0);
  15601. for (; i < candidateCount; ++i)
  15602. {
  15603. JitAdelete(this->alloc, inOrderCandidates[i]);
  15604. }
  15605. // One of the memop candidates did not validate. Do not emit for this loop.
  15606. loop->doMemOp = false;
  15607. loop->memOpInfo->candidates->Clear();
  15608. }
  15609. // Free memory
  15610. JitAdeleteArray(this->alloc, candidateCount, inOrderCandidates);
  15611. }
  15612. } NEXT_LOOP_EDITING;
  15613. }
  15614. void GlobOpt::PRE::FieldPRE(Loop *loop)
  15615. {
  15616. JitArenaAllocator *alloc = this->globOpt->tempAlloc;
  15617. this->FindPossiblePRECandidates(loop, alloc);
  15618. this->PreloadPRECandidates(loop);
  15619. this->RemoveOverlyOptimisticInitialValues(loop);
  15620. }
  15621. bool
  15622. GlobOpt::PRE::InsertSymDefinitionInLandingPad(StackSym * sym, Loop * loop, Sym ** objPtrCopyPropSym)
  15623. {
  15624. Assert(sym->IsSingleDef());
  15625. IR::Instr * symDefInstr = sym->GetInstrDef();
  15626. if (!GlobOpt::IsPREInstrSequenceCandidateLoad(symDefInstr->m_opcode))
  15627. {
  15628. return false;
  15629. }
  15630. IR::Opnd * symDefInstrSrc1 = symDefInstr->GetSrc1();
  15631. if (symDefInstrSrc1->IsSymOpnd())
  15632. {
  15633. Assert(symDefInstrSrc1->AsSymOpnd()->m_sym->IsPropertySym());
  15634. // $L1
  15635. // T1 = o.x (v1|T3)
  15636. // T2 = T1.y (v2|T4) <-- T1 is not live in the loop landing pad
  15637. // jmp $L1
  15638. // Trying to make T1 live in the landing pad
  15639. // o.x
  15640. PropertySym* propSym = symDefInstrSrc1->AsSymOpnd()->m_sym->AsPropertySym();
  15641. if (candidates->candidatesBv->Test(propSym->m_id))
  15642. {
  15643. // If propsym is a PRE candidate, then it must have had the same value on all back edges.
  15644. // So, just look up the value on one of the back edges.
  15645. BasicBlock* loopTail = loop->GetAnyTailBlock();
  15646. Value * valueOnBackEdge = loopTail->globOptData.FindValue(propSym);
  15647. // If o.x is not invariant in the loop, we can't use the preloaded value of o.x.y in the landing pad
  15648. Value * valueInLandingPad = loop->landingPad->globOptData.FindValue(propSym);
  15649. if (valueOnBackEdge->GetValueNumber() != valueInLandingPad->GetValueNumber())
  15650. {
  15651. return false;
  15652. }
  15653. *objPtrCopyPropSym = valueOnBackEdge->GetValueInfo()->GetSymStore();
  15654. if (candidates->candidatesToProcess->Test(propSym->m_id))
  15655. {
  15656. GlobHashBucket bucket;
  15657. bucket.element = valueOnBackEdge;
  15658. bucket.value = propSym;
  15659. if (!PreloadPRECandidate(loop, &bucket))
  15660. {
  15661. return false;
  15662. }
  15663. Assert(!candidates->candidatesToProcess->Test(propSym->m_id));
  15664. Assert(loop->landingPad->globOptData.IsLive(valueOnBackEdge->GetValueInfo()->GetSymStore()));
  15665. // Inserted T3 = o.x
  15666. // Now, we want to
  15667. // 1. Insert T1 = o.x
  15668. // 2. Insert T4 = T1.y
  15669. // 3. Indentify T3 as the objptr copy prop sym for T1, and make T3.y live on the back-edges
  15670. // #1 is done next. #2 and #3 are done as part of preloading T1.y
  15671. // Insert T1 = o.x
  15672. if (!InsertPropertySymPreloadInLandingPad(symDefInstr->Copy(), loop, propSym))
  15673. {
  15674. return false;
  15675. }
  15676. return true;
  15677. }
  15678. else
  15679. {
  15680. // o.x was already processed as a PRE candidate. If we were successful in preloading o.x,
  15681. // we can now insert T1 = o.x
  15682. if (loop->landingPad->globOptData.IsLive(*objPtrCopyPropSym))
  15683. {
  15684. // insert T1 = o.x
  15685. if (!InsertPropertySymPreloadInLandingPad(symDefInstr->Copy(), loop, propSym))
  15686. {
  15687. return false;
  15688. }
  15689. return true;
  15690. }
  15691. else
  15692. {
  15693. return false;
  15694. }
  15695. }
  15696. }
  15697. else
  15698. {
  15699. return false;
  15700. }
  15701. }
  15702. else if (symDefInstrSrc1->IsRegOpnd())
  15703. {
  15704. // T2 = T1
  15705. // T3 = T2.y
  15706. // trying to insert def of T2
  15707. // T1
  15708. StackSym * symDefInstrSrc1Sym = symDefInstrSrc1->AsRegOpnd()->GetStackSym();
  15709. if (!loop->landingPad->globOptData.IsLive(symDefInstrSrc1Sym))
  15710. {
  15711. if (symDefInstrSrc1Sym->IsSingleDef())
  15712. {
  15713. if (!InsertSymDefinitionInLandingPad(symDefInstrSrc1Sym, loop, objPtrCopyPropSym))
  15714. {
  15715. return false;
  15716. }
  15717. }
  15718. }
  15719. else
  15720. {
  15721. *objPtrCopyPropSym = symDefInstrSrc1Sym;
  15722. }
  15723. if (!(OpCodeAttr::TempNumberTransfer(symDefInstr->m_opcode) && OpCodeAttr::TempObjectTransfer(symDefInstr->m_opcode)))
  15724. {
  15725. *objPtrCopyPropSym = sym;
  15726. }
  15727. IR::Instr * instr = symDefInstr->Copy();
  15728. if (instr->m_opcode == Js::OpCode::BytecodeArgOutCapture)
  15729. {
  15730. instr->m_opcode = Js::OpCode::Ld_A;
  15731. }
  15732. InsertInstrInLandingPad(instr, loop);
  15733. return true;
  15734. }
  15735. else
  15736. {
  15737. return false;
  15738. }
  15739. }
  15740. void
  15741. GlobOpt::PRE::InsertInstrInLandingPad(IR::Instr * instr, Loop * loop)
  15742. {
  15743. instr->GetSrc1()->SetIsJITOptimizedReg(true);
  15744. if (instr->GetDst())
  15745. {
  15746. instr->GetDst()->SetIsJITOptimizedReg(true);
  15747. loop->landingPad->globOptData.liveVarSyms->Set(instr->GetDst()->GetStackSym()->m_id);
  15748. }
  15749. if (instr->HasAnyImplicitCalls())
  15750. {
  15751. IR::Instr * bailInstr = globOpt->EnsureDisableImplicitCallRegion(loop);
  15752. bailInstr->InsertBefore(instr);
  15753. }
  15754. else if (loop->endDisableImplicitCall)
  15755. {
  15756. loop->endDisableImplicitCall->InsertBefore(instr);
  15757. }
  15758. else
  15759. {
  15760. loop->landingPad->InsertAfter(instr);
  15761. }
  15762. instr->ClearByteCodeOffset();
  15763. instr->SetByteCodeOffset(loop->landingPad->GetFirstInstr());
  15764. }
  15765. IR::Instr *
  15766. GlobOpt::PRE::InsertPropertySymPreloadInLandingPad(IR::Instr * ldInstr, Loop * loop, PropertySym * propertySym)
  15767. {
  15768. IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  15769. if (ldSrc->m_sym != propertySym)
  15770. {
  15771. // It's possible that the property syms are different but have equivalent objPtrs. Verify their values.
  15772. Value *val1 = globOpt->CurrentBlockData()->FindValue(ldSrc->m_sym->AsPropertySym()->m_stackSym);
  15773. Value *val2 = globOpt->CurrentBlockData()->FindValue(propertySym->m_stackSym);
  15774. if (!val1 || !val2 || val1->GetValueNumber() != val2->GetValueNumber())
  15775. {
  15776. return nullptr;
  15777. }
  15778. }
  15779. // Consider: Shouldn't be necessary once we have copy-prop in prepass...
  15780. ldInstr->GetSrc1()->AsSymOpnd()->m_sym = propertySym;
  15781. ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  15782. if (ldSrc->IsPropertySymOpnd())
  15783. {
  15784. IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
  15785. IR::PropertySymOpnd *newPropSymOpnd;
  15786. newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->globOpt->func);
  15787. ldInstr->ReplaceSrc1(newPropSymOpnd);
  15788. }
  15789. if (ldInstr->GetDst())
  15790. {
  15791. loop->landingPad->globOptData.liveVarSyms->Set(ldInstr->GetDst()->GetStackSym()->m_id);
  15792. }
  15793. InsertInstrInLandingPad(ldInstr, loop);
  15794. return ldInstr;
  15795. }
  15796. void
  15797. GlobOpt::PRE::MakePropertySymLiveOnBackEdges(PropertySym * propertySym, Loop * loop, Value * valueToAdd)
  15798. {
  15799. BasicBlock * loopHeader = loop->GetHeadBlock();
  15800. FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
  15801. {
  15802. if (!loop->IsDescendentOrSelf(blockPred->loop))
  15803. {
  15804. // Not a loop back-edge
  15805. continue;
  15806. }
  15807. // Insert it in the value table
  15808. blockPred->globOptData.SetValue(valueToAdd, propertySym);
  15809. // Make it a live field
  15810. blockPred->globOptData.liveFields->Set(propertySym->m_id);
  15811. } NEXT_PREDECESSOR_BLOCK;
  15812. }
  15813. void GlobOpt::PRE::RemoveOverlyOptimisticInitialValues(Loop * loop)
  15814. {
  15815. BasicBlock * landingPad = loop->landingPad;
  15816. // For a property sym whose obj ptr sym wasn't live in the landing pad, we can optimistically (if the obj ptr sym was
  15817. // single def) insert an initial value in the landing pad, with the hope that PRE could make the obj ptr sym live.
  15818. // But, if PRE couldn't make the obj ptr sym live, we need to clear the value for the property sym from the landing pad
  15819. for (auto it = loop->initialValueFieldMap.GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  15820. {
  15821. PropertySym * propertySym = it.CurrentKey();
  15822. StackSym * objPtrSym = propertySym->m_stackSym;
  15823. if (!landingPad->globOptData.IsLive(objPtrSym))
  15824. {
  15825. Value * landingPadPropSymValue = landingPad->globOptData.FindValue(propertySym);
  15826. Assert(landingPadPropSymValue);
  15827. Assert(landingPadPropSymValue->GetValueNumber() == it.CurrentValue()->GetValueNumber());
  15828. Assert(landingPadPropSymValue->GetValueInfo()->GetSymStore() == propertySym);
  15829. landingPad->globOptData.ClearSymValue(propertySym);
  15830. it.RemoveCurrent();
  15831. }
  15832. }
  15833. }
  15834. #if DBG_DUMP
  15835. void GlobOpt::PRE::TraceFailedPreloadInLandingPad(const Loop *const loop, PropertySym * propertySym, const char16* reason) const
  15836. {
  15837. if (PHASE_TRACE(Js::FieldPREPhase, this->globOpt->func))
  15838. {
  15839. int32 propertyId = propertySym->m_propertyId;
  15840. SymID objectSymId = propertySym->m_stackSym->m_id;
  15841. char16 propSymStr[32];
  15842. switch (propertySym->m_fieldKind)
  15843. {
  15844. case PropertyKindData:
  15845. if (JITManager::GetJITManager()->IsOOPJITEnabled())
  15846. {
  15847. swprintf_s(propSymStr, _u("s%d->#%d"), objectSymId, propertyId);
  15848. }
  15849. else
  15850. {
  15851. Js::PropertyRecord const* fieldName = propertySym->m_func->GetInProcThreadContext()->GetPropertyRecord(propertyId);
  15852. swprintf_s(propSymStr, _u("s%d->%s"), objectSymId, fieldName->GetBuffer());
  15853. }
  15854. break;
  15855. case PropertyKindSlots:
  15856. case PropertyKindSlotArray:
  15857. swprintf_s(propSymStr, _u("s%d[%d]"), objectSymId, propertyId);
  15858. break;
  15859. case PropertyKindLocalSlots:
  15860. swprintf_s(propSymStr, _u("s%dl[%d]"), objectSymId, propertyId);
  15861. break;
  15862. default:
  15863. AssertMsg(0, "Unknown field kind");
  15864. break;
  15865. }
  15866. Output::Print(_u("** TRACE: Field PRE: "));
  15867. this->globOpt->func->DumpFullFunctionName();
  15868. Output::Print(_u(": Failed to pre-load (%s) in landing pad of loop #%d. Reason: %s "), propSymStr, loop->GetLoopNumber(), reason);
  15869. Output::Print(_u("\n"));
  15870. }
  15871. }
  15872. #endif