GlobOpt.cpp 827 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7. #define TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  8. if(PHASE_TESTTRACE(phase, this->func)) \
  9. { \
  10. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  11. Output::Print( \
  12. _u("Testtrace: %s function %s (%s): "), \
  13. Js::PhaseNames[phase], \
  14. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  15. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  16. Output::Print(__VA_ARGS__); \
  17. Output::Flush(); \
  18. }
  19. #else // ENABLE_DEBUG_CONFIG_OPTIONS
  20. #define TESTTRACE_PHASE_INSTR(phase, instr, ...)
  21. #endif // ENABLE_DEBUG_CONFIG_OPTIONS
  22. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  23. #define GOPT_TRACE_OPND(opnd, ...) \
  24. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  25. { \
  26. Output::Print(_u("TRACE: ")); \
  27. opnd->Dump(); \
  28. Output::Print(_u(" : ")); \
  29. Output::Print(__VA_ARGS__); \
  30. Output::Flush(); \
  31. }
  32. #define GOPT_TRACE(...) \
  33. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  34. { \
  35. Output::Print(_u("TRACE: ")); \
  36. Output::Print(__VA_ARGS__); \
  37. Output::Flush(); \
  38. }
  39. #define GOPT_TRACE_INSTRTRACE(instr) \
  40. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  41. { \
  42. instr->Dump(); \
  43. Output::Flush(); \
  44. }
  45. #define GOPT_TRACE_INSTR(instr, ...) \
  46. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  47. { \
  48. Output::Print(_u("TRACE: ")); \
  49. Output::Print(__VA_ARGS__); \
  50. instr->Dump(); \
  51. Output::Flush(); \
  52. }
  53. #define GOPT_TRACE_BLOCK(block, before) \
  54. this->Trace(block, before); \
  55. Output::Flush();
  56. // TODO: OOP JIT, add back line number
  57. #define TRACE_PHASE_INSTR(phase, instr, ...) \
  58. if(PHASE_TRACE(phase, this->func)) \
  59. { \
  60. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  61. Output::Print( \
  62. _u("Function %s (%s)"), \
  63. this->func->GetJITFunctionBody()->GetDisplayName(), \
  64. this->func->GetDebugNumberSet(debugStringBuffer)); \
  65. if(this->func->IsLoopBody()) \
  66. { \
  67. Output::Print(_u(", loop %u"), this->func->GetWorkItem()->GetLoopNumber()); \
  68. } \
  69. if(instr->m_func != this->func) \
  70. { \
  71. Output::Print( \
  72. _u(", Inlinee %s (%s)"), \
  73. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  74. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  75. } \
  76. Output::Print(_u(" - %s\n "), Js::PhaseNames[phase]); \
  77. instr->Dump(); \
  78. Output::Print(_u(" ")); \
  79. Output::Print(__VA_ARGS__); \
  80. Output::Flush(); \
  81. }
  82. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...) \
  83. if(CONFIG_FLAG(Verbose)) \
  84. { \
  85. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  86. }
  87. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  88. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  89. TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  90. #else // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  91. #define GOPT_TRACE(...)
  92. #define GOPT_TRACE_OPND(opnd, ...)
  93. #define GOPT_TRACE_INSTRTRACE(instr)
  94. #define GOPT_TRACE_INSTR(instr, ...)
  95. #define GOPT_TRACE_BLOCK(block, before)
  96. #define TRACE_PHASE_INSTR(phase, instr, ...)
  97. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...)
  98. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  99. #endif // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  100. #if DBG_DUMP
  101. #define DO_MEMOP_TRACE() (PHASE_TRACE(Js::MemOpPhase, this->func) ||\
  102. PHASE_TRACE(Js::MemSetPhase, this->func) ||\
  103. PHASE_TRACE(Js::MemCopyPhase, this->func))
  104. #define DO_MEMOP_TRACE_PHASE(phase) (PHASE_TRACE(Js::MemOpPhase, this->func) || PHASE_TRACE(Js::phase ## Phase, this->func))
  105. #define OUTPUT_MEMOP_TRACE(loop, instr, ...) {\
  106. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];\
  107. Output::Print(15, _u("Function: %s%s, Loop: %u: "), this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer), loop->GetLoopNumber());\
  108. Output::Print(__VA_ARGS__);\
  109. IR::Instr* __instr__ = instr;\
  110. if(__instr__) __instr__->DumpByteCodeOffset();\
  111. if(__instr__) Output::Print(_u(" (%s)"), Js::OpCodeUtil::GetOpCodeName(__instr__->m_opcode));\
  112. Output::Print(_u("\n"));\
  113. Output::Flush(); \
  114. }
  115. #define TRACE_MEMOP(loop, instr, ...) \
  116. if (DO_MEMOP_TRACE()) {\
  117. Output::Print(_u("TRACE MemOp:"));\
  118. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  119. }
  120. #define TRACE_MEMOP_VERBOSE(loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP(loop, instr, __VA_ARGS__)}
  121. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...) \
  122. if (DO_MEMOP_TRACE_PHASE(phase))\
  123. {\
  124. Output::Print(_u("TRACE ") _u(#phase) _u(":"));\
  125. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  126. }
  127. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP_PHASE(phase, loop, instr, __VA_ARGS__)}
  128. #else
  129. #define DO_MEMOP_TRACE()
  130. #define DO_MEMOP_TRACE_PHASE(phase)
  131. #define OUTPUT_MEMOP_TRACE(loop, instr, ...)
  132. #define TRACE_MEMOP(loop, instr, ...)
  133. #define TRACE_MEMOP_VERBOSE(loop, instr, ...)
  134. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...)
  135. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...)
  136. #endif
  137. class AutoRestoreVal
  138. {
  139. private:
  140. Value *const originalValue;
  141. Value *const tempValue;
  142. Value * *const valueRef;
  143. public:
  144. AutoRestoreVal(Value *const originalValue, Value * *const tempValueRef)
  145. : originalValue(originalValue), tempValue(*tempValueRef), valueRef(tempValueRef)
  146. {
  147. }
  148. ~AutoRestoreVal()
  149. {
  150. if(*valueRef == tempValue)
  151. {
  152. *valueRef = originalValue;
  153. }
  154. }
  155. PREVENT_COPY(AutoRestoreVal);
  156. };
  157. GlobOpt::GlobOpt(Func * func)
  158. : func(func),
  159. intConstantToStackSymMap(nullptr),
  160. intConstantToValueMap(nullptr),
  161. currentValue(FirstNewValueNumber),
  162. prePassLoop(nullptr),
  163. alloc(nullptr),
  164. isCallHelper(false),
  165. inInlinedBuiltIn(false),
  166. rootLoopPrePass(nullptr),
  167. noImplicitCallUsesToInsert(nullptr),
  168. valuesCreatedForClone(nullptr),
  169. valuesCreatedForMerge(nullptr),
  170. blockData(func),
  171. instrCountSinceLastCleanUp(0),
  172. isRecursiveCallOnLandingPad(false),
  173. updateInductionVariableValueNumber(false),
  174. isPerformingLoopBackEdgeCompensation(false),
  175. currentRegion(nullptr),
  176. changedSymsAfterIncBailoutCandidate(nullptr),
  177. doTypeSpec(
  178. !IsTypeSpecPhaseOff(func)),
  179. doAggressiveIntTypeSpec(
  180. doTypeSpec &&
  181. DoAggressiveIntTypeSpec(func)),
  182. doAggressiveMulIntTypeSpec(
  183. doTypeSpec &&
  184. !PHASE_OFF(Js::AggressiveMulIntTypeSpecPhase, func) &&
  185. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveMulIntTypeSpecDisabled(func->IsLoopBody()))),
  186. doDivIntTypeSpec(
  187. doAggressiveIntTypeSpec &&
  188. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsDivIntTypeSpecDisabled(func->IsLoopBody()))),
  189. doLossyIntTypeSpec(
  190. doTypeSpec &&
  191. DoLossyIntTypeSpec(func)),
  192. doFloatTypeSpec(
  193. doTypeSpec &&
  194. DoFloatTypeSpec(func)),
  195. doArrayCheckHoist(
  196. DoArrayCheckHoist(func)),
  197. doArrayMissingValueCheckHoist(
  198. doArrayCheckHoist &&
  199. DoArrayMissingValueCheckHoist(func)),
  200. doArraySegmentHoist(
  201. doArrayCheckHoist &&
  202. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array), func)),
  203. doJsArraySegmentHoist(
  204. doArraySegmentHoist &&
  205. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array), func)),
  206. doArrayLengthHoist(
  207. doArrayCheckHoist &&
  208. DoArrayLengthHoist(func)),
  209. doEliminateArrayAccessHelperCall(
  210. doArrayCheckHoist &&
  211. !PHASE_OFF(Js::EliminateArrayAccessHelperCallPhase, func)),
  212. doTrackRelativeIntBounds(
  213. doAggressiveIntTypeSpec &&
  214. DoPathDependentValues() &&
  215. !PHASE_OFF(Js::Phase::TrackRelativeIntBoundsPhase, func)),
  216. doBoundCheckElimination(
  217. doTrackRelativeIntBounds &&
  218. !PHASE_OFF(Js::Phase::BoundCheckEliminationPhase, func)),
  219. doBoundCheckHoist(
  220. doEliminateArrayAccessHelperCall &&
  221. doBoundCheckElimination &&
  222. DoConstFold() &&
  223. !PHASE_OFF(Js::Phase::BoundCheckHoistPhase, func) &&
  224. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsBoundCheckHoistDisabled(func->IsLoopBody()))),
  225. doLoopCountBasedBoundCheckHoist(
  226. doBoundCheckHoist &&
  227. !PHASE_OFF(Js::Phase::LoopCountBasedBoundCheckHoistPhase, func) &&
  228. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLoopCountBasedBoundCheckHoistDisabled(func->IsLoopBody()))),
  229. doPowIntIntTypeSpec(
  230. doAggressiveIntTypeSpec &&
  231. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsPowIntIntTypeSpecDisabled())),
  232. doTagChecks(
  233. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTagCheckDisabled())),
  234. isAsmJSFunc(func->GetJITFunctionBody()->IsAsmJsMode())
  235. {
  236. }
  237. void
  238. GlobOpt::BackwardPass(Js::Phase tag)
  239. {
  240. BEGIN_CODEGEN_PHASE(this->func, tag);
  241. ::BackwardPass backwardPass(this->func, this, tag);
  242. backwardPass.Optimize();
  243. END_CODEGEN_PHASE(this->func, tag);
  244. }
  245. void
  246. GlobOpt::Optimize()
  247. {
  248. this->objectTypeSyms = nullptr;
  249. this->func->argInsCount = this->func->GetInParamsCount() - 1; //Don't include "this" pointer in the count.
  250. if (!func->DoGlobOpt())
  251. {
  252. this->lengthEquivBv = nullptr;
  253. this->argumentsEquivBv = nullptr;
  254. this->callerEquivBv = nullptr;
  255. // Still need to run the dead store phase to calculate the live reg on back edge
  256. this->BackwardPass(Js::DeadStorePhase);
  257. CannotAllocateArgumentsObjectOnStack();
  258. return;
  259. }
  260. {
  261. this->lengthEquivBv = this->func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::length, nullptr); // Used to kill live "length" properties
  262. this->argumentsEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::arguments, nullptr); // Used to kill live "arguments" properties
  263. this->callerEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::caller, nullptr); // Used to kill live "caller" properties
  264. // The backward phase needs the glob opt's allocator to allocate the propertyTypeValueMap
  265. // in GlobOpt::EnsurePropertyTypeValue and ranges of instructions where int overflow may be ignored.
  266. // (see BackwardPass::TrackIntUsage)
  267. PageAllocator * pageAllocator = this->func->m_alloc->GetPageAllocator();
  268. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-GlobOpt"), pageAllocator, Js::Throw::OutOfMemory);
  269. this->alloc = &localAlloc;
  270. NoRecoverMemoryJitArenaAllocator localTempAlloc(_u("BE-GlobOpt temp"), pageAllocator, Js::Throw::OutOfMemory);
  271. this->tempAlloc = &localTempAlloc;
  272. // The forward passes use info (upwardExposedUses) from the backward pass. This info
  273. // isn't available for some of the symbols created during the backward pass, or the forward pass.
  274. // Keep track of the last symbol for which we're guaranteed to have data.
  275. this->maxInitialSymID = this->func->m_symTable->GetMaxSymID();
  276. this->BackwardPass(Js::BackwardPhase);
  277. this->ForwardPass();
  278. }
  279. this->BackwardPass(Js::DeadStorePhase);
  280. this->TailDupPass();
  281. }
  282. bool GlobOpt::ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd)
  283. {
  284. Assert(indirOpnd);
  285. if(!indirOpnd->GetIndexOpnd())
  286. {
  287. return indirOpnd->GetOffset() >= 0;
  288. }
  289. IR::RegOpnd *const indexOpnd = indirOpnd->GetIndexOpnd();
  290. if(indexOpnd->m_sym->m_isNotInt)
  291. {
  292. // Typically, single-def or any sym-specific information for type-specialized syms should not be used because all of
  293. // their defs will not have been accounted for until after the forward pass. But m_isNotInt is only ever changed from
  294. // false to true, so it's okay in this case.
  295. return false;
  296. }
  297. StackSym *indexVarSym = indexOpnd->m_sym;
  298. if(indexVarSym->IsTypeSpec())
  299. {
  300. indexVarSym = indexVarSym->GetVarEquivSym(nullptr);
  301. Assert(indexVarSym);
  302. }
  303. else if(!IsLoopPrePass())
  304. {
  305. // Don't use single-def info or const flags for type-specialized syms, as all of their defs will not have been accounted
  306. // for until after the forward pass. Also, don't use the const flags in a loop prepass because the const flags may not
  307. // be up-to-date.
  308. StackSym *const indexSym = indexOpnd->m_sym;
  309. if(indexSym->IsIntConst())
  310. {
  311. return indexSym->GetIntConstValue() >= 0;
  312. }
  313. }
  314. Value *const indexValue = FindValue(indexVarSym);
  315. if(!indexValue)
  316. {
  317. // Treat it as Uninitialized, assume it's going to be valid
  318. return true;
  319. }
  320. ValueInfo *const indexValueInfo = indexValue->GetValueInfo();
  321. int32 indexConstantValue;
  322. if(indexValueInfo->TryGetIntConstantValue(&indexConstantValue))
  323. {
  324. return indexConstantValue >= 0;
  325. }
  326. if(indexValueInfo->IsUninitialized())
  327. {
  328. // Assume it's going to be valid
  329. return true;
  330. }
  331. return indexValueInfo->HasBeenNumber() && !indexValueInfo->HasBeenFloat();
  332. }
  333. //
  334. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  335. //
  336. ValueType GlobOpt::GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize)
  337. {
  338. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  339. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  340. if (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo())
  341. {
  342. ValueType resultType = instr->m_func->GetReadOnlyProfileInfo()->GetDivProfileInfo(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  343. if (resultType.IsLikelyInt())
  344. {
  345. if (specialize && src1ValueInfo && src2ValueInfo
  346. && ((src1ValueInfo->IsInt() && src2ValueInfo->IsInt()) ||
  347. (this->DoDivIntTypeSpec() && src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())))
  348. {
  349. return ValueType::GetInt(true);
  350. }
  351. return resultType;
  352. }
  353. // Consider: Checking that the sources are numbers.
  354. if (resultType.IsLikelyFloat())
  355. {
  356. return ValueType::Float;
  357. }
  358. return resultType;
  359. }
  360. int32 src1IntConstantValue;
  361. if(!src1ValueInfo || !src1ValueInfo->TryGetIntConstantValue(&src1IntConstantValue))
  362. {
  363. return ValueType::Number;
  364. }
  365. if (src1IntConstantValue == 1)
  366. {
  367. return ValueType::Float;
  368. }
  369. int32 src2IntConstantValue;
  370. if(!src2Val || !src2ValueInfo->TryGetIntConstantValue(&src2IntConstantValue))
  371. {
  372. return ValueType::Number;
  373. }
  374. if (src2IntConstantValue // Avoid divide by zero
  375. && !(src1IntConstantValue == 0x80000000 && src2IntConstantValue == -1) // Avoid integer overflow
  376. && (src1IntConstantValue % src2IntConstantValue) != 0)
  377. {
  378. return ValueType::Float;
  379. }
  380. return ValueType::Number;
  381. }
  382. void
  383. GlobOpt::ForwardPass()
  384. {
  385. BEGIN_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  386. #if DBG_DUMP
  387. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  388. {
  389. this->func->DumpHeader();
  390. }
  391. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::GlobOptPhase))
  392. {
  393. this->TraceSettings();
  394. }
  395. #endif
  396. // GetConstantCount() gives us the right size to pick for the SparseArray, but we may need more if we've inlined
  397. // functions with constants. There will be a gap in the symbol numbering between the main constants and
  398. // the inlined ones, so we'll most likely need a new array chunk. Make the min size of the array chunks be 64
  399. // in case we have a main function with very few constants and a bunch of constants from inlined functions.
  400. this->byteCodeConstantValueArray = SparseArray<Value>::New(this->alloc, max(this->func->GetJITFunctionBody()->GetConstCount(), 64U));
  401. this->byteCodeConstantValueNumbersBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  402. this->tempBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  403. this->prePassCopyPropSym = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  404. this->byteCodeUses = nullptr;
  405. this->propertySymUse = nullptr;
  406. // changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
  407. this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  408. #if DBG
  409. this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  410. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
  411. {
  412. Output::Print(_u("TRACE: CanDoFieldCopyProp Func: "));
  413. this->func->DumpFullFunctionName();
  414. Output::Print(_u("\n"));
  415. }
  416. #endif
  417. OpndList localNoImplicitCallUsesToInsert(alloc);
  418. this->noImplicitCallUsesToInsert = &localNoImplicitCallUsesToInsert;
  419. IntConstantToStackSymMap localIntConstantToStackSymMap(alloc);
  420. this->intConstantToStackSymMap = &localIntConstantToStackSymMap;
  421. IntConstantToValueMap localIntConstantToValueMap(alloc);
  422. this->intConstantToValueMap = &localIntConstantToValueMap;
  423. AddrConstantToValueMap localAddrConstantToValueMap(alloc);
  424. this->addrConstantToValueMap = &localAddrConstantToValueMap;
  425. StringConstantToValueMap localStringConstantToValueMap(alloc);
  426. this->stringConstantToValueMap = &localStringConstantToValueMap;
  427. SymIdToInstrMap localPrePassInstrMap(alloc);
  428. this->prePassInstrMap = &localPrePassInstrMap;
  429. ValueSetByValueNumber localValuesCreatedForClone(alloc, 64);
  430. this->valuesCreatedForClone = &localValuesCreatedForClone;
  431. ValueNumberPairToValueMap localValuesCreatedForMerge(alloc, 64);
  432. this->valuesCreatedForMerge = &localValuesCreatedForMerge;
  433. #if DBG
  434. BVSparse<JitArenaAllocator> localFinishedStackLiteralInitFld(alloc);
  435. this->finishedStackLiteralInitFld = &localFinishedStackLiteralInitFld;
  436. #endif
  437. FOREACH_BLOCK_IN_FUNC_EDITING(block, this->func)
  438. {
  439. this->OptBlock(block);
  440. } NEXT_BLOCK_IN_FUNC_EDITING;
  441. if (!PHASE_OFF(Js::MemOpPhase, this->func))
  442. {
  443. ProcessMemOp();
  444. }
  445. this->noImplicitCallUsesToInsert = nullptr;
  446. this->intConstantToStackSymMap = nullptr;
  447. this->intConstantToValueMap = nullptr;
  448. this->addrConstantToValueMap = nullptr;
  449. this->stringConstantToValueMap = nullptr;
  450. #if DBG
  451. this->finishedStackLiteralInitFld = nullptr;
  452. uint freedCount = 0;
  453. uint spilledCount = 0;
  454. #endif
  455. FOREACH_BLOCK_IN_FUNC(block, this->func)
  456. {
  457. #if DBG
  458. if (block->GetDataUseCount() == 0)
  459. {
  460. freedCount++;
  461. }
  462. else
  463. {
  464. spilledCount++;
  465. }
  466. #endif
  467. block->SetDataUseCount(0);
  468. if (block->cloneStrCandidates)
  469. {
  470. JitAdelete(this->alloc, block->cloneStrCandidates);
  471. block->cloneStrCandidates = nullptr;
  472. }
  473. } NEXT_BLOCK_IN_FUNC;
  474. // Make sure we free most of them.
  475. Assert(freedCount >= spilledCount);
  476. // this->alloc will be freed right after return, no need to free it here
  477. this->changedSymsAfterIncBailoutCandidate = nullptr;
  478. END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  479. }
  480. void
  481. GlobOpt::OptBlock(BasicBlock *block)
  482. {
  483. if (this->func->m_fg->RemoveUnreachableBlock(block, this))
  484. {
  485. GOPT_TRACE(_u("Removing unreachable block #%d\n"), block->GetBlockNum());
  486. return;
  487. }
  488. Loop * loop = block->loop;
  489. if (loop && block->isLoopHeader)
  490. {
  491. if (loop != this->prePassLoop)
  492. {
  493. OptLoops(loop);
  494. if (!this->IsLoopPrePass() && DoFieldPRE(loop))
  495. {
  496. // Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
  497. this->FieldPRE(loop);
  498. // Re-optimize the landing pad
  499. BasicBlock *landingPad = loop->landingPad;
  500. this->isRecursiveCallOnLandingPad = true;
  501. this->OptBlock(landingPad);
  502. this->isRecursiveCallOnLandingPad = false;
  503. this->currentBlock = block;
  504. }
  505. }
  506. }
  507. this->currentBlock = block;
  508. PrepareLoopArrayCheckHoist();
  509. this->MergePredBlocksValueMaps(block);
  510. this->intOverflowCurrentlyMattersInRange = true;
  511. this->intOverflowDoesNotMatterRange = this->currentBlock->intOverflowDoesNotMatterRange;
  512. if (loop && DoFieldHoisting(loop))
  513. {
  514. if (block->isLoopHeader)
  515. {
  516. if (!this->IsLoopPrePass())
  517. {
  518. this->PrepareFieldHoisting(loop);
  519. }
  520. else if (loop == this->rootLoopPrePass)
  521. {
  522. this->PreparePrepassFieldHoisting(loop);
  523. }
  524. }
  525. }
  526. else
  527. {
  528. Assert(!TrackHoistableFields() || !HasHoistableFields(&this->blockData));
  529. if (!DoFieldCopyProp() && !DoFieldRefOpts())
  530. {
  531. this->KillAllFields(blockData.liveFields);
  532. }
  533. }
  534. this->tempAlloc->Reset();
  535. if(loop && block->isLoopHeader)
  536. {
  537. loop->firstValueNumberInLoop = this->currentValue;
  538. }
  539. GOPT_TRACE_BLOCK(block, true);
  540. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  541. {
  542. GOPT_TRACE_INSTRTRACE(instr);
  543. BailOutInfo* oldBailOutInfo = nullptr;
  544. bool isCheckAuxBailoutNeeded = this->func->IsJitInDebugMode() && !this->IsLoopPrePass();
  545. if (isCheckAuxBailoutNeeded && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  546. {
  547. oldBailOutInfo = instr->GetBailOutInfo();
  548. Assert(oldBailOutInfo);
  549. }
  550. bool isInstrRemoved = false;
  551. instrNext = this->OptInstr(instr, &isInstrRemoved);
  552. // If we still have instrs with only aux bail out, convert aux bail out back to regular bail out and fill it.
  553. // During OptInstr some instr can be moved out to a different block, in this case bailout info is going to be replaced
  554. // with e.g. loop bailout info which is filled as part of processing that block, thus we don't need to fill it here.
  555. if (isCheckAuxBailoutNeeded && !isInstrRemoved && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  556. {
  557. if (instr->GetBailOutInfo() == oldBailOutInfo)
  558. {
  559. instr->PromoteAuxBailOut();
  560. FillBailOutInfo(block, instr->GetBailOutInfo());
  561. }
  562. else
  563. {
  564. AssertMsg(instr->GetBailOutInfo(), "With aux bailout, the bailout info should not be removed by OptInstr.");
  565. }
  566. }
  567. } NEXT_INSTR_IN_BLOCK_EDITING;
  568. GOPT_TRACE_BLOCK(block, false);
  569. if (block->loop)
  570. {
  571. if (IsLoopPrePass())
  572. {
  573. if (DoBoundCheckHoist())
  574. {
  575. DetectUnknownChangesToInductionVariables(&block->globOptData);
  576. }
  577. }
  578. else
  579. {
  580. isPerformingLoopBackEdgeCompensation = true;
  581. Assert(this->tempBv->IsEmpty());
  582. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  583. // On loop back-edges, we need to restore the state of the type specialized
  584. // symbols to that of the loop header.
  585. FOREACH_SUCCESSOR_BLOCK(succ, block)
  586. {
  587. if (succ->isLoopHeader && succ->loop->IsDescendentOrSelf(block->loop))
  588. {
  589. BVSparse<JitArenaAllocator> *liveOnBackEdge = block->loop->regAlloc.liveOnBackEdgeSyms;
  590. this->tempBv->Minus(block->loop->varSymsOnEntry, block->globOptData.liveVarSyms);
  591. this->tempBv->And(liveOnBackEdge);
  592. this->ToVar(this->tempBv, block);
  593. // Lossy int in the loop header, and no int on the back-edge - need a lossy conversion to int
  594. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->globOptData.liveInt32Syms);
  595. this->tempBv->And(liveOnBackEdge);
  596. this->ToInt32(this->tempBv, block, true /* lossy */);
  597. // Lossless int in the loop header, and no lossless int on the back-edge - need a lossless conversion to int
  598. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  599. tempBv2.Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  600. this->tempBv->Minus(&tempBv2);
  601. this->tempBv->And(liveOnBackEdge);
  602. this->ToInt32(this->tempBv, block, false /* lossy */);
  603. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->globOptData.liveFloat64Syms);
  604. this->tempBv->And(liveOnBackEdge);
  605. this->ToFloat64(this->tempBv, block);
  606. // SIMD_JS
  607. // Compensate on backedge if sym is live on loop entry but not on backedge
  608. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->globOptData.liveSimd128F4Syms);
  609. this->tempBv->And(liveOnBackEdge);
  610. this->ToTypeSpec(this->tempBv, block, TySimd128F4, IR::BailOutSimd128F4Only);
  611. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->globOptData.liveSimd128I4Syms);
  612. this->tempBv->And(liveOnBackEdge);
  613. this->ToTypeSpec(this->tempBv, block, TySimd128I4, IR::BailOutSimd128I4Only);
  614. // For ints and floats, go aggressive and type specialize in the landing pad any symbol which was specialized on
  615. // entry to the loop body (in the loop header), and is still specialized on this tail, but wasn't specialized in
  616. // the landing pad.
  617. // Lossy int in the loop header and no int in the landing pad - need a lossy conversion to int
  618. // (entry.lossyInt32 - landingPad.int32)
  619. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->loop->landingPad->globOptData.liveInt32Syms);
  620. this->tempBv->And(liveOnBackEdge);
  621. this->ToInt32(this->tempBv, block->loop->landingPad, true /* lossy */);
  622. // Lossless int in the loop header, and no lossless int in the landing pad - need a lossless conversion to int
  623. // ((entry.int32 - entry.lossyInt32) - (landingPad.int32 - landingPad.lossyInt32))
  624. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  625. tempBv2.Minus(
  626. block->loop->landingPad->globOptData.liveInt32Syms,
  627. block->loop->landingPad->globOptData.liveLossyInt32Syms);
  628. this->tempBv->Minus(&tempBv2);
  629. this->tempBv->And(liveOnBackEdge);
  630. this->ToInt32(this->tempBv, block->loop->landingPad, false /* lossy */);
  631. // ((entry.float64 - landingPad.float64) & block.float64)
  632. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->loop->landingPad->globOptData.liveFloat64Syms);
  633. this->tempBv->And(block->globOptData.liveFloat64Syms);
  634. this->tempBv->And(liveOnBackEdge);
  635. this->ToFloat64(this->tempBv, block->loop->landingPad);
  636. // SIMD_JS
  637. // compensate on landingpad if live on loopEntry and Backedge.
  638. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128F4Syms);
  639. this->tempBv->And(block->globOptData.liveSimd128F4Syms);
  640. this->tempBv->And(liveOnBackEdge);
  641. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128F4, IR::BailOutSimd128F4Only);
  642. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128I4Syms);
  643. this->tempBv->And(block->globOptData.liveSimd128I4Syms);
  644. this->tempBv->And(liveOnBackEdge);
  645. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128I4, IR::BailOutSimd128I4Only);
  646. // Now that we're done with the liveFields within this loop, trim the set to those syms
  647. // that the backward pass told us were live out of the loop.
  648. // This assumes we have no further need of the liveFields within the loop.
  649. if (block->loop->liveOutFields)
  650. {
  651. block->globOptData.liveFields->And(block->loop->liveOutFields);
  652. }
  653. }
  654. } NEXT_SUCCESSOR_BLOCK;
  655. this->tempBv->ClearAll();
  656. isPerformingLoopBackEdgeCompensation = false;
  657. }
  658. }
  659. block->globOptData.hasCSECandidates = this->blockData.hasCSECandidates;
  660. #if DBG
  661. // The set of live lossy int32 syms should be a subset of all live int32 syms
  662. this->tempBv->And(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  663. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  664. // The set of live lossy int32 syms should be a subset of live var or float syms (var or float sym containing the lossless
  665. // value of the sym should be live)
  666. this->tempBv->Or(block->globOptData.liveVarSyms, block->globOptData.liveFloat64Syms);
  667. this->tempBv->And(block->globOptData.liveLossyInt32Syms);
  668. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  669. this->tempBv->ClearAll();
  670. #endif
  671. }
  672. void
  673. GlobOpt::OptLoops(Loop *loop)
  674. {
  675. Assert(loop != nullptr);
  676. #if DBG
  677. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) &&
  678. !DoFunctionFieldCopyProp() && DoFieldCopyProp(loop))
  679. {
  680. Output::Print(_u("TRACE: CanDoFieldCopyProp Loop: "));
  681. this->func->DumpFullFunctionName();
  682. uint loopNumber = loop->GetLoopNumber();
  683. Assert(loopNumber != Js::LoopHeader::NoLoop);
  684. Output::Print(_u(" Loop: %d\n"), loopNumber);
  685. }
  686. #endif
  687. Loop *previousLoop = this->prePassLoop;
  688. this->prePassLoop = loop;
  689. if (previousLoop == nullptr)
  690. {
  691. Assert(this->rootLoopPrePass == nullptr);
  692. this->rootLoopPrePass = loop;
  693. this->prePassInstrMap->Clear();
  694. if (loop->parent == nullptr)
  695. {
  696. // Outer most loop...
  697. this->prePassCopyPropSym->ClearAll();
  698. }
  699. }
  700. if (loop->symsUsedBeforeDefined == nullptr)
  701. {
  702. loop->symsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  703. loop->likelyIntSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  704. loop->likelyNumberSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  705. loop->likelySimd128F4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  706. loop->likelySimd128I4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  707. loop->forceFloat64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  708. loop->forceSimd128F4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  709. loop->forceSimd128I4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  710. loop->symsDefInLoop = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  711. loop->fieldKilled = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  712. loop->fieldPRESymStore = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  713. loop->allFieldsKilled = false;
  714. }
  715. else
  716. {
  717. loop->symsUsedBeforeDefined->ClearAll();
  718. loop->likelyIntSymsUsedBeforeDefined->ClearAll();
  719. loop->likelyNumberSymsUsedBeforeDefined->ClearAll();
  720. loop->likelySimd128F4SymsUsedBeforeDefined->ClearAll();
  721. loop->likelySimd128I4SymsUsedBeforeDefined->ClearAll();
  722. loop->forceFloat64SymsOnEntry->ClearAll();
  723. loop->forceSimd128F4SymsOnEntry->ClearAll();
  724. loop->forceSimd128I4SymsOnEntry->ClearAll();
  725. loop->symsDefInLoop->ClearAll();
  726. loop->fieldKilled->ClearAll();
  727. loop->allFieldsKilled = false;
  728. loop->initialValueFieldMap.Reset();
  729. }
  730. FOREACH_BLOCK_IN_LOOP(block, loop)
  731. {
  732. block->SetDataUseCount(block->GetSuccList()->Count());
  733. OptBlock(block);
  734. } NEXT_BLOCK_IN_LOOP;
  735. if (previousLoop == nullptr)
  736. {
  737. Assert(this->rootLoopPrePass == loop);
  738. this->rootLoopPrePass = nullptr;
  739. }
  740. this->prePassLoop = previousLoop;
  741. }
  742. void
  743. GlobOpt::TailDupPass()
  744. {
  745. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  746. {
  747. BasicBlock* header = loop->GetHeadBlock();
  748. BasicBlock* loopTail = nullptr;
  749. FOREACH_PREDECESSOR_BLOCK(pred, header)
  750. {
  751. if (loop->IsDescendentOrSelf(pred->loop))
  752. {
  753. loopTail = pred;
  754. break;
  755. }
  756. } NEXT_PREDECESSOR_BLOCK;
  757. if (loopTail)
  758. {
  759. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  760. if (!loopTail->GetPredList()->HasOne())
  761. {
  762. TryTailDup(loopTail->GetLastInstr()->AsBranchInstr());
  763. }
  764. }
  765. } NEXT_LOOP_IN_FUNC_EDITING;
  766. }
  767. bool
  768. GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
  769. {
  770. if (PHASE_OFF(Js::TailDupPhase, tailBranch->m_func->GetTopFunc()))
  771. {
  772. return false;
  773. }
  774. if (tailBranch->IsConditional())
  775. {
  776. return false;
  777. }
  778. IR::Instr *instr;
  779. uint instrCount = 0;
  780. for (instr = tailBranch->GetPrevRealInstrOrLabel(); !instr->IsLabelInstr(); instr = instr->GetPrevRealInstrOrLabel())
  781. {
  782. if (instr->HasBailOutInfo())
  783. {
  784. break;
  785. }
  786. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  787. {
  788. // Consider: We could be more aggressive here
  789. break;
  790. }
  791. instrCount++;
  792. if (instrCount > 1)
  793. {
  794. // Consider: If copy handled single-def tmps renaming, we could do more instrs
  795. break;
  796. }
  797. }
  798. if (!instr->IsLabelInstr())
  799. {
  800. return false;
  801. }
  802. IR::LabelInstr *mergeLabel = instr->AsLabelInstr();
  803. IR::Instr *mergeLabelPrev = mergeLabel->m_prev;
  804. // Skip unreferenced labels
  805. while (mergeLabelPrev->IsLabelInstr() && mergeLabelPrev->AsLabelInstr()->labelRefs.Empty())
  806. {
  807. mergeLabelPrev = mergeLabelPrev->m_prev;
  808. }
  809. BasicBlock* labelBlock = mergeLabel->GetBasicBlock();
  810. uint origPredCount = labelBlock->GetPredList()->Count();
  811. uint dupCount = 0;
  812. // We are good to go. Let's do the tail duplication.
  813. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr*, branchEntry, &mergeLabel->labelRefs, iter)
  814. {
  815. if (branchEntry->IsUnconditional() && !branchEntry->IsMultiBranch() && branchEntry != mergeLabelPrev && branchEntry != tailBranch)
  816. {
  817. for (instr = mergeLabel->m_next; instr != tailBranch; instr = instr->m_next)
  818. {
  819. branchEntry->InsertBefore(instr->Copy());
  820. }
  821. instr = branchEntry;
  822. branchEntry->ReplaceTarget(mergeLabel, tailBranch->GetTarget());
  823. while(!instr->IsLabelInstr())
  824. {
  825. instr = instr->m_prev;
  826. }
  827. BasicBlock* branchBlock = instr->AsLabelInstr()->GetBasicBlock();
  828. labelBlock->RemovePred(branchBlock, func->m_fg);
  829. func->m_fg->AddEdge(branchBlock, tailBranch->GetTarget()->GetBasicBlock());
  830. dupCount++;
  831. }
  832. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  833. // If we've duplicated everywhere, tail block is dead and should be removed.
  834. if (dupCount == origPredCount)
  835. {
  836. AssertMsg(mergeLabel->IsUnreferenced(), "Should not remove block with referenced label.");
  837. func->m_fg->RemoveBlock(labelBlock, nullptr, true);
  838. }
  839. return true;
  840. }
  841. void
  842. GlobOpt::MergePredBlocksValueMaps(BasicBlock *block)
  843. {
  844. Assert(!this->isCallHelper);
  845. if (!this->isRecursiveCallOnLandingPad)
  846. {
  847. this->NulloutBlockData(&this->blockData);
  848. }
  849. else
  850. {
  851. // If we are going over the landing pad again after field PRE, just start again
  852. // with the value table where we left off.
  853. this->CopyBlockData(&this->blockData, &block->globOptData);
  854. return;
  855. }
  856. BVSparse<JitArenaAllocator> symsRequiringCompensation(tempAlloc);
  857. {
  858. BVSparse<JitArenaAllocator> symsCreatedForMerge(tempAlloc);
  859. bool forceTypeSpecOnLoopHeader = true;
  860. FOREACH_PREDECESSOR_BLOCK(pred, block)
  861. {
  862. if (pred->globOptData.callSequence && pred->globOptData.callSequence->Empty())
  863. {
  864. JitAdelete(this->alloc, pred->globOptData.callSequence);
  865. pred->globOptData.callSequence = nullptr;
  866. }
  867. if (block->isLoopHeader && this->IsLoopPrePass() && this->prePassLoop == block->loop && block->loop->IsDescendentOrSelf(pred->loop))
  868. {
  869. // Loop back-edge.
  870. // First pass on loop runs optimistically, without doing transforms.
  871. // Skip this edge for now.
  872. continue;
  873. }
  874. PathDependentInfo *const pathDependentInfo = __edge->GetPathDependentInfo();
  875. PathDependentInfoToRestore pathDependentInfoToRestore;
  876. if (pathDependentInfo)
  877. {
  878. pathDependentInfoToRestore = UpdatePathDependentInfo(pathDependentInfo);
  879. }
  880. Assert(pred->GetDataUseCount());
  881. // First pred?
  882. if (this->blockData.symToValueMap == nullptr)
  883. {
  884. // Only one edge?
  885. if (pred->GetSuccList()->HasOne() && block->GetPredList()->HasOne() && block->loop == nullptr)
  886. {
  887. this->ReuseBlockData(&this->blockData, &pred->globOptData);
  888. // Don't need to restore the old value info
  889. pathDependentInfoToRestore.Clear();
  890. }
  891. else
  892. {
  893. this->CloneBlockData(currentBlock, &this->blockData, pred);
  894. }
  895. }
  896. else
  897. {
  898. const bool isLoopPrePass = IsLoopPrePass();
  899. this->MergeBlockData(
  900. &this->blockData,
  901. block,
  902. pred,
  903. isLoopPrePass ? nullptr : &symsRequiringCompensation,
  904. isLoopPrePass ? nullptr : &symsCreatedForMerge,
  905. forceTypeSpecOnLoopHeader);
  906. forceTypeSpecOnLoopHeader = false; // can force type-spec on the loop header only for the first back edge.
  907. }
  908. // Restore the value for the next edge
  909. if (pathDependentInfo)
  910. {
  911. RestorePathDependentInfo(pathDependentInfo, pathDependentInfoToRestore);
  912. __edge->ClearPathDependentInfo(this->alloc);
  913. }
  914. } NEXT_PREDECESSOR_BLOCK;
  915. }
  916. // Consider: We can recreate values for hoisted field so it can copy prop out of the loop
  917. if (this->blockData.symToValueMap == nullptr)
  918. {
  919. Assert(this->blockData.hoistableFields == nullptr);
  920. this->InitBlockData();
  921. }
  922. else if (this->blockData.hoistableFields)
  923. {
  924. Assert(TrackHoistableFields());
  925. this->blockData.hoistableFields->And(this->blockData.liveFields);
  926. }
  927. if (!this->DoObjTypeSpec())
  928. {
  929. // Object type specialization is off, but if copy prop is on (e.g., /force:fieldhoist) we're not clearing liveFields,
  930. // so we may be letting type syms slip through this block.
  931. this->KillAllObjectTypes();
  932. }
  933. this->CopyBlockData(&block->globOptData, &this->blockData);
  934. if (this->IsLoopPrePass())
  935. {
  936. Assert(block->loop);
  937. if(DoBoundCheckHoist())
  938. {
  939. SetInductionVariableValueNumbers(&blockData);
  940. }
  941. if (block->isLoopHeader && this->rootLoopPrePass == block->loop)
  942. {
  943. // Capture bail out info in case we have optimization that needs it
  944. Assert(block->loop->bailOutInfo == nullptr);
  945. IR::Instr * firstInstr = block->GetFirstInstr();
  946. block->loop->bailOutInfo = JitAnew(this->func->m_alloc, BailOutInfo,
  947. firstInstr->GetByteCodeOffset(), firstInstr->m_func);
  948. this->FillBailOutInfo(block, block->loop->bailOutInfo);
  949. #if ENABLE_DEBUG_CONFIG_OPTIONS
  950. block->loop->bailOutInfo->bailOutOpcode = Js::OpCode::LoopBodyStart;
  951. #endif
  952. }
  953. // If loop pre-pass, don't insert convert from type-spec to var
  954. return;
  955. }
  956. this->CleanUpValueMaps();
  957. Sym *symIV = nullptr;
  958. // Clean up the syms requiring compensation by checking the final value in the merged block to see if the sym still requires
  959. // compensation. All the while, create a mapping from sym to value info in the merged block. This dictionary helps avoid a
  960. // value lookup in the merged block per predecessor.
  961. SymToValueInfoMap symsRequiringCompensationToMergedValueInfoMap(tempAlloc);
  962. if(!symsRequiringCompensation.IsEmpty())
  963. {
  964. const SymTable *const symTable = func->m_symTable;
  965. GlobHashTable *const symToValueMap = blockData.symToValueMap;
  966. FOREACH_BITSET_IN_SPARSEBV(id, &symsRequiringCompensation)
  967. {
  968. Sym *const sym = symTable->Find(id);
  969. Assert(sym);
  970. Value *const value = FindValue(symToValueMap, sym);
  971. if(!value)
  972. {
  973. continue;
  974. }
  975. ValueInfo *const valueInfo = value->GetValueInfo();
  976. if(!valueInfo->IsArrayValueInfo())
  977. {
  978. continue;
  979. }
  980. // At least one new sym was created while merging and associated with the merged value info, so those syms will
  981. // require compensation in predecessors. For now, the dead store phase is relied upon to remove compensation that is
  982. // dead due to no further uses of the new sym.
  983. symsRequiringCompensationToMergedValueInfoMap.Add(sym, valueInfo);
  984. } NEXT_BITSET_IN_SPARSEBV;
  985. symsRequiringCompensation.ClearAll();
  986. }
  987. if (block->isLoopHeader)
  988. {
  989. // Values on the back-edge in the prepass may be conservative for syms defined in the loop, and type specialization in
  990. // the prepass is not reflective of the value, but rather, is used to determine whether the sym should be specialized
  991. // around the loop. Additionally, some syms that are used before defined in the loop may be specialized in the loop
  992. // header despite not being specialized in the landing pad. Now that the type specialization bit-vectors are merged,
  993. // specialize the corresponding value infos in the loop header too.
  994. Assert(tempBv->IsEmpty());
  995. Loop *const loop = block->loop;
  996. SymTable *const symTable = func->m_symTable;
  997. GlobHashTable *const symToValueMap = blockData.symToValueMap;
  998. JitArenaAllocator *const alloc = this->alloc;
  999. // Int-specialized syms
  1000. tempBv->Or(loop->likelyIntSymsUsedBeforeDefined, loop->symsDefInLoop);
  1001. tempBv->And(blockData.liveInt32Syms);
  1002. tempBv->Minus(blockData.liveLossyInt32Syms);
  1003. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1004. {
  1005. StackSym *const varSym = symTable->FindStackSym(id);
  1006. Assert(varSym);
  1007. Value *const value = FindValue(symToValueMap, varSym);
  1008. Assert(value);
  1009. ValueInfo *const valueInfo = value->GetValueInfo();
  1010. if(!valueInfo->IsInt())
  1011. {
  1012. ChangeValueInfo(nullptr, value, valueInfo->SpecializeToInt32(alloc));
  1013. }
  1014. } NEXT_BITSET_IN_SPARSEBV;
  1015. // Float-specialized syms
  1016. tempBv->Or(loop->likelyNumberSymsUsedBeforeDefined, loop->symsDefInLoop);
  1017. tempBv->Or(loop->forceFloat64SymsOnEntry);
  1018. tempBv->And(blockData.liveFloat64Syms);
  1019. GlobOptBlockData &landingPadBlockData = loop->landingPad->globOptData;
  1020. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1021. {
  1022. StackSym *const varSym = symTable->FindStackSym(id);
  1023. Assert(varSym);
  1024. // If the type-spec sym is null or if the sym is not float-specialized in the loop landing pad, the sym may have
  1025. // been merged to float on a loop back-edge when it was live as float on the back-edge, and live as int in the loop
  1026. // header. In this case, compensation inserted in the loop landing pad will use BailOutNumberOnly, and so it is
  1027. // guaranteed that the value will be float. Otherwise, if the type-spec sym exists, its field can be checked to see
  1028. // if it's prevented from being anything but a number.
  1029. StackSym *const typeSpecSym = varSym->GetFloat64EquivSym(nullptr);
  1030. if(!typeSpecSym ||
  1031. typeSpecSym->m_requiresBailOnNotNumber ||
  1032. !IsFloat64TypeSpecialized(varSym, &landingPadBlockData))
  1033. {
  1034. Value *const value = FindValue(symToValueMap, varSym);
  1035. if(value)
  1036. {
  1037. ValueInfo *const valueInfo = value->GetValueInfo();
  1038. if(!valueInfo->IsNumber())
  1039. {
  1040. ChangeValueInfo(block, value, valueInfo->SpecializeToFloat64(alloc));
  1041. }
  1042. }
  1043. else
  1044. {
  1045. SetValue(&block->globOptData, NewGenericValue(ValueType::Float), varSym);
  1046. }
  1047. }
  1048. } NEXT_BITSET_IN_SPARSEBV;
  1049. // SIMD_JS
  1050. // Simd128 type-spec syms
  1051. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1052. // For syms we made alive in loop header because of hoisting, use-before-def, or def in Loop body, set their valueInfo to definite.
  1053. // Make live on header AND in one of forceSimd128* or likelySimd128* vectors.
  1054. tempBv->Or(loop->likelySimd128F4SymsUsedBeforeDefined, loop->symsDefInLoop);
  1055. tempBv->Or(loop->likelySimd128I4SymsUsedBeforeDefined);
  1056. tempBv->Or(loop->forceSimd128F4SymsOnEntry);
  1057. tempBv->Or(loop->forceSimd128I4SymsOnEntry);
  1058. tempBv2.Or(blockData.liveSimd128F4Syms, blockData.liveSimd128I4Syms);
  1059. tempBv->And(&tempBv2);
  1060. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1061. {
  1062. StackSym * typeSpecSym = nullptr;
  1063. StackSym *const varSym = symTable->FindStackSym(id);
  1064. Assert(varSym);
  1065. if (blockData.liveSimd128F4Syms->Test(id))
  1066. {
  1067. typeSpecSym = varSym->GetSimd128F4EquivSym(nullptr);
  1068. if (!typeSpecSym || !IsSimd128F4TypeSpecialized(varSym, &landingPadBlockData))
  1069. {
  1070. Value *const value = FindValue(symToValueMap, varSym);
  1071. if (value)
  1072. {
  1073. ValueInfo *const valueInfo = value->GetValueInfo();
  1074. if (!valueInfo->IsSimd128Float32x4())
  1075. {
  1076. ChangeValueInfo(block, value, valueInfo->SpecializeToSimd128F4(alloc));
  1077. }
  1078. }
  1079. else
  1080. {
  1081. SetValue(&block->globOptData, NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Float32x4), varSym), varSym);
  1082. }
  1083. }
  1084. }
  1085. else if (blockData.liveSimd128I4Syms->Test(id))
  1086. {
  1087. typeSpecSym = varSym->GetSimd128I4EquivSym(nullptr);
  1088. if (!typeSpecSym || !IsSimd128I4TypeSpecialized(varSym, &landingPadBlockData))
  1089. {
  1090. Value *const value = FindValue(symToValueMap, varSym);
  1091. if (value)
  1092. {
  1093. ValueInfo *const valueInfo = value->GetValueInfo();
  1094. if (!valueInfo->IsSimd128Int32x4())
  1095. {
  1096. ChangeValueInfo(block, value, valueInfo->SpecializeToSimd128I4(alloc));
  1097. }
  1098. }
  1099. else
  1100. {
  1101. SetValue(&block->globOptData, NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Int32x4), varSym), varSym);
  1102. }
  1103. }
  1104. }
  1105. else
  1106. {
  1107. Assert(UNREACHED);
  1108. }
  1109. } NEXT_BITSET_IN_SPARSEBV;
  1110. tempBv->ClearAll();
  1111. }
  1112. // We need to handle the case where a symbol is type-spec'd coming from some predecessors,
  1113. // but not from others.
  1114. //
  1115. // We can do this by inserting the right conversion in the predecessor block, but we
  1116. // can only do this if we are the first successor of that block, since the previous successors
  1117. // would have already been processed. Instead, we'll need to break the edge and insert a block
  1118. // (airlock block) to put in the conversion code.
  1119. Assert(this->tempBv->IsEmpty());
  1120. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1121. BVSparse<JitArenaAllocator> tempBv3(this->tempAlloc);
  1122. BVSparse<JitArenaAllocator> tempBv4(this->tempAlloc);
  1123. // SIMD_JS
  1124. BVSparse<JitArenaAllocator> simd128F4SymsToUnbox(this->tempAlloc);
  1125. BVSparse<JitArenaAllocator> simd128I4SymsToUnbox(this->tempAlloc);
  1126. FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)
  1127. {
  1128. BasicBlock *pred = edge->GetPred();
  1129. if (pred->loop && pred->loop->GetHeadBlock() == block)
  1130. {
  1131. pred->DecrementDataUseCount();
  1132. // Skip loop back-edges. We will handle these when we get to the exit blocks.
  1133. continue;
  1134. }
  1135. BasicBlock *orgPred = nullptr;
  1136. if (pred->isAirLockCompensationBlock)
  1137. {
  1138. Assert(pred->GetPredList()->HasOne());
  1139. orgPred = pred;
  1140. pred = (pred->GetPredList()->Head())->GetPred();
  1141. }
  1142. // Lossy int in the merged block, and no int in the predecessor - need a lossy conversion to int
  1143. tempBv2.Minus(this->blockData.liveLossyInt32Syms, pred->globOptData.liveInt32Syms);
  1144. // Lossless int in the merged block, and no lossless int in the predecessor - need a lossless conversion to int
  1145. tempBv3.Minus(this->blockData.liveInt32Syms, this->blockData.liveLossyInt32Syms);
  1146. this->tempBv->Minus(pred->globOptData.liveInt32Syms, pred->globOptData.liveLossyInt32Syms);
  1147. tempBv3.Minus(this->tempBv);
  1148. this->tempBv->Minus(this->blockData.liveVarSyms, pred->globOptData.liveVarSyms);
  1149. tempBv4.Minus(this->blockData.liveFloat64Syms, pred->globOptData.liveFloat64Syms);
  1150. bool symIVNeedsSpecializing = (symIV && !pred->globOptData.liveInt32Syms->Test(symIV->m_id) && !tempBv3.Test(symIV->m_id));
  1151. // SIMD_JS
  1152. simd128F4SymsToUnbox.Minus(this->blockData.liveSimd128F4Syms, pred->globOptData.liveSimd128F4Syms);
  1153. simd128I4SymsToUnbox.Minus(this->blockData.liveSimd128I4Syms, pred->globOptData.liveSimd128I4Syms);
  1154. if (!this->tempBv->IsEmpty() ||
  1155. !tempBv2.IsEmpty() ||
  1156. !tempBv3.IsEmpty() ||
  1157. !tempBv4.IsEmpty() ||
  1158. !simd128F4SymsToUnbox.IsEmpty() ||
  1159. !simd128I4SymsToUnbox.IsEmpty() ||
  1160. symIVNeedsSpecializing ||
  1161. symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  1162. {
  1163. // We can't un-specialize a symbol in a predecessor if we've already processed
  1164. // a successor of that block. Instead, insert a new block on the flow edge
  1165. // (an airlock block) and do the un-specialization there.
  1166. //
  1167. // Alternatively, the current block could be an exit block out of this loop, and so the predecessor may exit the
  1168. // loop. In that case, if the predecessor may continue into the loop without exiting, then we need an airlock block
  1169. // to do the appropriate conversions only on the exit path (preferring not to do the conversions inside the loop).
  1170. // If, on the other hand, the predecessor always flows into the current block, then it always exits, so we don't need
  1171. // an airlock block and can just do the conversions in the predecessor.
  1172. if (pred->GetSuccList()->Head()->GetSucc() != block ||
  1173. (pred->loop && pred->loop->parent == block->loop && pred->GetSuccList()->Count() > 1))
  1174. {
  1175. BasicBlock *airlockBlock = nullptr;
  1176. if (!orgPred)
  1177. {
  1178. GOPT_TRACE(_u("Inserting airlock block to convert syms to var between block %d and %d\n"),
  1179. pred->GetBlockNum(), block->GetBlockNum());
  1180. airlockBlock = this->func->m_fg->InsertAirlockBlock(edge);
  1181. }
  1182. else
  1183. {
  1184. Assert(orgPred->isAirLockCompensationBlock);
  1185. airlockBlock = orgPred;
  1186. pred->DecrementDataUseCount();
  1187. airlockBlock->isAirLockCompensationBlock = false; // This is airlock block now. So remove the attribute.
  1188. }
  1189. this->CloneBlockData(airlockBlock, pred);
  1190. pred = airlockBlock;
  1191. }
  1192. if (!this->tempBv->IsEmpty())
  1193. {
  1194. this->ToVar(this->tempBv, pred);
  1195. }
  1196. if (!tempBv2.IsEmpty())
  1197. {
  1198. this->ToInt32(&tempBv2, pred, true /* lossy */);
  1199. }
  1200. if (!tempBv3.IsEmpty())
  1201. {
  1202. this->ToInt32(&tempBv3, pred, false /* lossy */);
  1203. }
  1204. if (!tempBv4.IsEmpty())
  1205. {
  1206. this->ToFloat64(&tempBv4, pred);
  1207. }
  1208. if (symIVNeedsSpecializing)
  1209. {
  1210. this->tempBv->ClearAll();
  1211. this->tempBv->Set(symIV->m_id);
  1212. this->ToInt32(this->tempBv, pred, false /* lossy */);
  1213. }
  1214. if(symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  1215. {
  1216. InsertValueCompensation(pred, symsRequiringCompensationToMergedValueInfoMap);
  1217. }
  1218. // SIMD_JS
  1219. if (!simd128F4SymsToUnbox.IsEmpty())
  1220. {
  1221. this->ToTypeSpec(&simd128F4SymsToUnbox, pred, TySimd128F4, IR::BailOutSimd128F4Only);
  1222. }
  1223. if (!simd128I4SymsToUnbox.IsEmpty())
  1224. {
  1225. this->ToTypeSpec(&simd128I4SymsToUnbox, pred, TySimd128I4, IR::BailOutSimd128I4Only);
  1226. }
  1227. }
  1228. } NEXT_PREDECESSOR_EDGE_EDITING;
  1229. FOREACH_PREDECESSOR_EDGE(edge, block)
  1230. {
  1231. // Peak Memory optimization:
  1232. // These are in an arena, but putting them on the free list greatly reduces
  1233. // the peak memory used by the global optimizer for complex flow graphs.
  1234. BasicBlock *pred = edge->GetPred();
  1235. if (!block->isLoopHeader || block->loop != pred->loop)
  1236. {
  1237. // Skip airlock compensation block as we are not going to walk this block.
  1238. if (pred->isAirLockCompensationBlock)
  1239. {
  1240. pred->DecrementDataUseCount();
  1241. Assert(pred->GetPredList()->HasOne());
  1242. pred = (pred->GetPredList()->Head())->GetPred();
  1243. }
  1244. if (pred->DecrementDataUseCount() == 0 && (!block->loop || block->loop->landingPad != pred))
  1245. {
  1246. if (!(pred->GetSuccList()->HasOne() && block->GetPredList()->HasOne() && block->loop == nullptr))
  1247. {
  1248. this->DeleteBlockData(&pred->globOptData);
  1249. }
  1250. else
  1251. {
  1252. this->NulloutBlockData(&pred->globOptData);
  1253. }
  1254. }
  1255. }
  1256. } NEXT_PREDECESSOR_EDGE;
  1257. this->tempBv->ClearAll();
  1258. Assert(!this->IsLoopPrePass()); // We already early return if we are in prepass
  1259. if (block->isLoopHeader)
  1260. {
  1261. Loop *const loop = block->loop;
  1262. // Save values live on loop entry, such that we can adjust the state of the
  1263. // values on the back-edge to match.
  1264. loop->varSymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1265. loop->varSymsOnEntry->Copy(block->globOptData.liveVarSyms);
  1266. loop->int32SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1267. loop->int32SymsOnEntry->Copy(block->globOptData.liveInt32Syms);
  1268. loop->lossyInt32SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1269. loop->lossyInt32SymsOnEntry->Copy(block->globOptData.liveLossyInt32Syms);
  1270. loop->float64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1271. loop->float64SymsOnEntry->Copy(block->globOptData.liveFloat64Syms);
  1272. // SIMD_JS
  1273. loop->simd128F4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1274. loop->simd128F4SymsOnEntry->Copy(block->globOptData.liveSimd128F4Syms);
  1275. loop->simd128I4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1276. loop->simd128I4SymsOnEntry->Copy(block->globOptData.liveSimd128I4Syms);
  1277. loop->liveFieldsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1278. loop->liveFieldsOnEntry->Copy(block->globOptData.liveFields);
  1279. if(DoBoundCheckHoist() && loop->inductionVariables)
  1280. {
  1281. FinalizeInductionVariables(loop, &blockData);
  1282. if(DoLoopCountBasedBoundCheckHoist())
  1283. {
  1284. DetermineDominatingLoopCountableBlock(loop, block);
  1285. }
  1286. }
  1287. }
  1288. else if (!block->loop)
  1289. {
  1290. block->SetDataUseCount(block->GetSuccList()->Count());
  1291. }
  1292. else if(block == block->loop->dominatingLoopCountableBlock)
  1293. {
  1294. DetermineLoopCount(block->loop);
  1295. }
  1296. }
  1297. void
  1298. GlobOpt::NulloutBlockData(GlobOptBlockData *data)
  1299. {
  1300. data->symToValueMap = nullptr;
  1301. data->exprToValueMap = nullptr;
  1302. data->liveFields = nullptr;
  1303. data->maybeWrittenTypeSyms = nullptr;
  1304. data->isTempSrc = nullptr;
  1305. data->liveVarSyms = nullptr;
  1306. data->liveInt32Syms = nullptr;
  1307. data->liveLossyInt32Syms = nullptr;
  1308. data->liveFloat64Syms = nullptr;
  1309. // SIMD_JS
  1310. data->liveSimd128F4Syms = nullptr;
  1311. data->liveSimd128I4Syms = nullptr;
  1312. data->hoistableFields = nullptr;
  1313. data->argObjSyms = nullptr;
  1314. data->maybeTempObjectSyms = nullptr;
  1315. data->canStoreTempObjectSyms = nullptr;
  1316. data->valuesToKillOnCalls = nullptr;
  1317. data->inductionVariables = nullptr;
  1318. data->availableIntBoundChecks = nullptr;
  1319. data->callSequence = nullptr;
  1320. data->startCallCount = 0;
  1321. data->argOutCount = 0;
  1322. data->totalOutParamCount = 0;
  1323. data->inlinedArgOutCount = 0;
  1324. data->hasCSECandidates = false;
  1325. data->curFunc = this->func;
  1326. data->stackLiteralInitFldDataMap = nullptr;
  1327. data->capturedValues = nullptr;
  1328. data->changedSyms = nullptr;
  1329. data->OnDataUnreferenced();
  1330. }
  1331. void
  1332. GlobOpt::InitBlockData()
  1333. {
  1334. GlobOptBlockData *const data = &this->blockData;
  1335. JitArenaAllocator *const alloc = this->alloc;
  1336. data->symToValueMap = GlobHashTable::New(alloc, 64);
  1337. data->exprToValueMap = ExprHashTable::New(alloc, 64);
  1338. data->liveFields = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1339. data->liveArrayValues = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1340. data->isTempSrc = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1341. data->liveVarSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1342. data->liveInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1343. data->liveLossyInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1344. data->liveFloat64Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1345. // SIMD_JS
  1346. data->liveSimd128F4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1347. data->liveSimd128I4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1348. data->hoistableFields = nullptr;
  1349. data->argObjSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1350. data->maybeTempObjectSyms = nullptr;
  1351. data->canStoreTempObjectSyms = nullptr;
  1352. data->valuesToKillOnCalls = JitAnew(alloc, ValueSet, alloc);
  1353. if(DoBoundCheckHoist())
  1354. {
  1355. data->inductionVariables = IsLoopPrePass() ? JitAnew(alloc, InductionVariableSet, alloc) : nullptr;
  1356. data->availableIntBoundChecks = JitAnew(alloc, IntBoundCheckSet, alloc);
  1357. }
  1358. data->maybeWrittenTypeSyms = nullptr;
  1359. data->callSequence = nullptr;
  1360. data->startCallCount = 0;
  1361. data->argOutCount = 0;
  1362. data->totalOutParamCount = 0;
  1363. data->inlinedArgOutCount = 0;
  1364. data->hasCSECandidates = false;
  1365. data->curFunc = this->func;
  1366. data->stackLiteralInitFldDataMap = nullptr;
  1367. data->changedSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1368. data->OnDataInitialized(alloc);
  1369. }
  1370. void
  1371. GlobOpt::ReuseBlockData(GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1372. {
  1373. // Reuse dead map
  1374. toData->symToValueMap = fromData->symToValueMap;
  1375. toData->exprToValueMap = fromData->exprToValueMap;
  1376. toData->liveFields = fromData->liveFields;
  1377. toData->liveArrayValues = fromData->liveArrayValues;
  1378. toData->maybeWrittenTypeSyms = fromData->maybeWrittenTypeSyms;
  1379. toData->isTempSrc = fromData->isTempSrc;
  1380. toData->liveVarSyms = fromData->liveVarSyms;
  1381. toData->liveInt32Syms = fromData->liveInt32Syms;
  1382. toData->liveLossyInt32Syms = fromData->liveLossyInt32Syms;
  1383. toData->liveFloat64Syms = fromData->liveFloat64Syms;
  1384. // SIMD_JS
  1385. toData->liveSimd128F4Syms = fromData->liveSimd128F4Syms;
  1386. toData->liveSimd128I4Syms = fromData->liveSimd128I4Syms;
  1387. if (TrackHoistableFields())
  1388. {
  1389. toData->hoistableFields = fromData->hoistableFields;
  1390. }
  1391. if (TrackArgumentsObject())
  1392. {
  1393. toData->argObjSyms = fromData->argObjSyms;
  1394. }
  1395. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms;
  1396. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms;
  1397. toData->curFunc = fromData->curFunc;
  1398. toData->valuesToKillOnCalls = fromData->valuesToKillOnCalls;
  1399. toData->inductionVariables = fromData->inductionVariables;
  1400. toData->availableIntBoundChecks = fromData->availableIntBoundChecks;
  1401. toData->callSequence = fromData->callSequence;
  1402. toData->startCallCount = fromData->startCallCount;
  1403. toData->argOutCount = fromData->argOutCount;
  1404. toData->totalOutParamCount = fromData->totalOutParamCount;
  1405. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1406. toData->hasCSECandidates = fromData->hasCSECandidates;
  1407. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap;
  1408. toData->changedSyms = fromData->changedSyms;
  1409. toData->changedSyms->ClearAll();
  1410. toData->OnDataReused(fromData);
  1411. }
  1412. void
  1413. GlobOpt::CopyBlockData(GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1414. {
  1415. toData->symToValueMap = fromData->symToValueMap;
  1416. toData->exprToValueMap = fromData->exprToValueMap;
  1417. toData->liveFields = fromData->liveFields;
  1418. toData->liveArrayValues = fromData->liveArrayValues;
  1419. toData->maybeWrittenTypeSyms = fromData->maybeWrittenTypeSyms;
  1420. toData->isTempSrc = fromData->isTempSrc;
  1421. toData->liveVarSyms = fromData->liveVarSyms;
  1422. toData->liveInt32Syms = fromData->liveInt32Syms;
  1423. toData->liveLossyInt32Syms = fromData->liveLossyInt32Syms;
  1424. toData->liveFloat64Syms = fromData->liveFloat64Syms;
  1425. // SIMD_JS
  1426. toData->liveSimd128F4Syms = fromData->liveSimd128F4Syms;
  1427. toData->liveSimd128I4Syms = fromData->liveSimd128I4Syms;
  1428. toData->hoistableFields = fromData->hoistableFields;
  1429. toData->argObjSyms = fromData->argObjSyms;
  1430. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms;
  1431. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms;
  1432. toData->curFunc = fromData->curFunc;
  1433. toData->valuesToKillOnCalls = fromData->valuesToKillOnCalls;
  1434. toData->inductionVariables = fromData->inductionVariables;
  1435. toData->availableIntBoundChecks = fromData->availableIntBoundChecks;
  1436. toData->callSequence = fromData->callSequence;
  1437. toData->startCallCount = fromData->startCallCount;
  1438. toData->argOutCount = fromData->argOutCount;
  1439. toData->totalOutParamCount = fromData->totalOutParamCount;
  1440. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1441. toData->hasCSECandidates = fromData->hasCSECandidates;
  1442. toData->changedSyms = fromData->changedSyms;
  1443. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap;
  1444. toData->OnDataReused(fromData);
  1445. }
  1446. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock)
  1447. {
  1448. CloneBlockData(toBlock, &toBlock->globOptData, fromBlock);
  1449. }
  1450. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock)
  1451. {
  1452. GlobOptBlockData *const fromData = &fromBlock->globOptData;
  1453. JitArenaAllocator *const alloc = this->alloc;
  1454. toData->symToValueMap = fromData->symToValueMap->Copy();
  1455. toData->exprToValueMap = fromData->exprToValueMap->Copy();
  1456. // Clone the values as well to allow for flow-sensitive ValueInfo
  1457. this->CloneValues(toBlock, toData, fromData);
  1458. if(DoBoundCheckHoist())
  1459. {
  1460. CloneBoundCheckHoistBlockData(toBlock, toData, fromBlock, fromData);
  1461. }
  1462. toData->liveFields = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1463. toData->liveFields->Copy(fromData->liveFields);
  1464. toData->liveArrayValues = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1465. toData->liveArrayValues->Copy(fromData->liveArrayValues);
  1466. if (fromData->maybeWrittenTypeSyms)
  1467. {
  1468. toData->maybeWrittenTypeSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1469. toData->maybeWrittenTypeSyms->Copy(fromData->maybeWrittenTypeSyms);
  1470. }
  1471. toData->isTempSrc = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1472. toData->isTempSrc->Copy(fromData->isTempSrc);
  1473. toData->liveVarSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1474. toData->liveVarSyms->Copy(fromData->liveVarSyms);
  1475. toData->liveInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1476. toData->liveInt32Syms->Copy(fromData->liveInt32Syms);
  1477. toData->liveLossyInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1478. toData->liveLossyInt32Syms->Copy(fromData->liveLossyInt32Syms);
  1479. toData->liveFloat64Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1480. toData->liveFloat64Syms->Copy(fromData->liveFloat64Syms);
  1481. // SIMD_JS
  1482. toData->liveSimd128F4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1483. toData->liveSimd128F4Syms->Copy(fromData->liveSimd128F4Syms);
  1484. toData->liveSimd128I4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1485. toData->liveSimd128I4Syms->Copy(fromData->liveSimd128I4Syms);
  1486. if (TrackHoistableFields())
  1487. {
  1488. if (fromData->hoistableFields)
  1489. {
  1490. toData->hoistableFields = fromData->hoistableFields->CopyNew(alloc);
  1491. }
  1492. }
  1493. if (TrackArgumentsObject() && fromData->argObjSyms)
  1494. {
  1495. toData->argObjSyms = fromData->argObjSyms->CopyNew(alloc);
  1496. }
  1497. if (fromData->maybeTempObjectSyms && !fromData->maybeTempObjectSyms->IsEmpty())
  1498. {
  1499. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms->CopyNew(alloc);
  1500. if (fromData->canStoreTempObjectSyms && !fromData->canStoreTempObjectSyms->IsEmpty())
  1501. {
  1502. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms->CopyNew(alloc);
  1503. }
  1504. }
  1505. else
  1506. {
  1507. Assert(fromData->canStoreTempObjectSyms == nullptr || fromData->canStoreTempObjectSyms->IsEmpty());
  1508. }
  1509. toData->curFunc = fromData->curFunc;
  1510. if (fromData->callSequence != nullptr)
  1511. {
  1512. toData->callSequence = JitAnew(alloc, SListBase<IR::Opnd *>);
  1513. fromData->callSequence->CopyTo(alloc, *(toData->callSequence));
  1514. }
  1515. else
  1516. {
  1517. toData->callSequence = nullptr;
  1518. }
  1519. toData->startCallCount = fromData->startCallCount;
  1520. toData->argOutCount = fromData->argOutCount;
  1521. toData->totalOutParamCount = fromData->totalOutParamCount;
  1522. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1523. toData->hasCSECandidates = fromData->hasCSECandidates;
  1524. // Although we don't need the data on loop pre pass, we need to do it for the loop header
  1525. // because we capture the loop header bailout on loop prepass
  1526. if (fromData->stackLiteralInitFldDataMap != nullptr &&
  1527. (!this->IsLoopPrePass() || (toBlock->isLoopHeader && toBlock->loop == this->rootLoopPrePass)))
  1528. {
  1529. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap->Clone();
  1530. }
  1531. else
  1532. {
  1533. toData->stackLiteralInitFldDataMap = nullptr;
  1534. }
  1535. toData->changedSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1536. toData->changedSyms->Copy(fromData->changedSyms);
  1537. Assert(fromData->HasData());
  1538. toData->OnDataInitialized(alloc);
  1539. }
  1540. void
  1541. GlobOpt::CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1542. {
  1543. ValueSet *const valuesToKillOnCalls = JitAnew(this->alloc, ValueSet, this->alloc);
  1544. toData->valuesToKillOnCalls = valuesToKillOnCalls;
  1545. // Values are shared between symbols with the same ValueNumber.
  1546. // Use a dictionary to share the clone values.
  1547. ValueSetByValueNumber *const valuesCreatedForClone = this->valuesCreatedForClone;
  1548. Assert(valuesCreatedForClone);
  1549. Assert(valuesCreatedForClone->Count() == 0);
  1550. DebugOnly(ValueSetByValueNumber originalValues(tempAlloc, 64));
  1551. const uint tableSize = toData->symToValueMap->tableSize;
  1552. SListBase<GlobHashBucket> *const table = toData->symToValueMap->table;
  1553. for (uint i = 0; i < tableSize; i++)
  1554. {
  1555. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &table[i])
  1556. {
  1557. Value *value = bucket.element;
  1558. ValueNumber valueNum = value->GetValueNumber();
  1559. #if DBG
  1560. // Ensure that the set of values in fromData contains only one value per value number. Byte-code constant values
  1561. // are reused in multiple blocks without cloning, so exclude those value numbers.
  1562. {
  1563. Value *const previouslyClonedOriginalValue = originalValues.Lookup(valueNum);
  1564. if (previouslyClonedOriginalValue)
  1565. {
  1566. if (!byteCodeConstantValueNumbersBv->Test(valueNum))
  1567. {
  1568. Assert(value == previouslyClonedOriginalValue);
  1569. }
  1570. }
  1571. else
  1572. {
  1573. originalValues.Add(value);
  1574. }
  1575. }
  1576. #endif
  1577. Value *newValue = valuesCreatedForClone->Lookup(valueNum);
  1578. if (!newValue)
  1579. {
  1580. newValue = CopyValue(value, valueNum);
  1581. TrackMergedValueForKills(newValue, toData, nullptr);
  1582. valuesCreatedForClone->Add(newValue);
  1583. }
  1584. bucket.element = newValue;
  1585. } NEXT_SLISTBASE_ENTRY;
  1586. }
  1587. valuesCreatedForClone->Clear();
  1588. ProcessValueKills(toBlock, toData);
  1589. }
  1590. template <typename CapturedList, typename CapturedItemsAreEqual>
  1591. void
  1592. GlobOpt::MergeCapturedValues(
  1593. GlobOptBlockData * toData,
  1594. SListBase<CapturedList> * toList,
  1595. SListBase<CapturedList> * fromList,
  1596. CapturedItemsAreEqual itemsAreEqual)
  1597. {
  1598. typename SListBase<CapturedList>::Iterator iterTo(toList);
  1599. typename SListBase<CapturedList>::Iterator iterFrom(fromList);
  1600. bool hasTo = iterTo.Next();
  1601. bool hasFrom = fromList == nullptr ? false : iterFrom.Next();
  1602. // to be conservative, only copy the captured value for common sym Ids
  1603. // in from and to CapturedList, mark all non-common sym Ids for re-capture
  1604. while (hasFrom && hasTo)
  1605. {
  1606. Sym * symFrom = iterFrom.Data().Key();
  1607. Sym * symTo = iterTo.Data().Key();
  1608. if (symFrom->m_id < symTo->m_id)
  1609. {
  1610. toData->changedSyms->Set(symFrom->m_id);
  1611. hasFrom = iterFrom.Next();
  1612. }
  1613. else if(symFrom->m_id > symTo->m_id)
  1614. {
  1615. toData->changedSyms->Set(symTo->m_id);
  1616. hasTo = iterTo.Next();
  1617. }
  1618. else
  1619. {
  1620. if (!itemsAreEqual(&iterFrom.Data(), &iterTo.Data()))
  1621. {
  1622. toData->changedSyms->Set(symTo->m_id);
  1623. }
  1624. hasFrom = iterFrom.Next();
  1625. hasTo = iterTo.Next();
  1626. }
  1627. }
  1628. bool hasRemain = hasFrom || hasTo;
  1629. if (hasRemain)
  1630. {
  1631. typename SListBase<CapturedList>::Iterator iterRemain(hasFrom ? iterFrom : iterTo);
  1632. do
  1633. {
  1634. Sym * symRemain = iterRemain.Data().Key();
  1635. toData->changedSyms->Set(symRemain->m_id);
  1636. hasRemain = iterRemain.Next();
  1637. } while (hasRemain);
  1638. }
  1639. }
  1640. void
  1641. GlobOpt::MergeBlockData(
  1642. GlobOptBlockData *toData,
  1643. BasicBlock *toBlock,
  1644. BasicBlock *fromBlock,
  1645. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  1646. BVSparse<JitArenaAllocator> *const symsCreatedForMerge,
  1647. bool forceTypeSpecOnLoopHeader)
  1648. {
  1649. GlobOptBlockData *fromData = &(fromBlock->globOptData);
  1650. if(DoBoundCheckHoist())
  1651. {
  1652. // Do this before merging values so that it can see whether a sym's value was changed on one side or the other
  1653. MergeBoundCheckHoistBlockData(toBlock, toData, fromBlock, fromData);
  1654. }
  1655. bool isLoopBackEdge = toBlock->isLoopHeader;
  1656. this->MergeValueMaps(toData, toBlock, fromBlock, symsRequiringCompensation, symsCreatedForMerge);
  1657. this->InsertCloneStrs(toBlock, toData, fromData);
  1658. toData->liveFields->And(fromData->liveFields);
  1659. toData->liveArrayValues->And(fromData->liveArrayValues);
  1660. toData->isTempSrc->And(fromData->isTempSrc);
  1661. toData->hasCSECandidates &= fromData->hasCSECandidates;
  1662. if (toData->capturedValues == nullptr)
  1663. {
  1664. toData->capturedValues = fromData->capturedValues;
  1665. toData->changedSyms->Or(fromData->changedSyms);
  1666. }
  1667. else
  1668. {
  1669. MergeCapturedValues(
  1670. toData,
  1671. &toData->capturedValues->constantValues,
  1672. fromData->capturedValues == nullptr ? nullptr : &fromData->capturedValues->constantValues,
  1673. [&](ConstantStackSymValue * symValueFrom, ConstantStackSymValue * symValueTo)
  1674. {
  1675. return symValueFrom->Value().IsEqual(symValueTo->Value());
  1676. });
  1677. MergeCapturedValues(
  1678. toData,
  1679. &toData->capturedValues->copyPropSyms,
  1680. fromData->capturedValues == nullptr ? nullptr : &fromData->capturedValues->copyPropSyms,
  1681. [&](CopyPropSyms * copyPropSymFrom, CopyPropSyms * copyPropSymTo)
  1682. {
  1683. if (copyPropSymFrom->Value()->m_id == copyPropSymTo->Value()->m_id)
  1684. {
  1685. Value * val = FindValue(copyPropSymFrom->Key());
  1686. Value * copyVal = FindValue(copyPropSymTo->Key());
  1687. return (val != nullptr && copyVal != nullptr &&
  1688. val->GetValueNumber() == copyVal->GetValueNumber());
  1689. }
  1690. return false;
  1691. });
  1692. }
  1693. if (fromData->maybeWrittenTypeSyms)
  1694. {
  1695. if (toData->maybeWrittenTypeSyms == nullptr)
  1696. {
  1697. toData->maybeWrittenTypeSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1698. toData->maybeWrittenTypeSyms->Copy(fromData->maybeWrittenTypeSyms);
  1699. }
  1700. else
  1701. {
  1702. toData->maybeWrittenTypeSyms->Or(fromData->maybeWrittenTypeSyms);
  1703. }
  1704. }
  1705. {
  1706. // - Keep the var sym live if any of the following is true:
  1707. // - The var sym is live on both sides
  1708. // - The var sym is the only live sym that contains the lossless value of the sym on a side (that is, the lossless
  1709. // int32 sym is not live, and the float64 sym is not live on that side), and the sym of any type is live on the
  1710. // other side
  1711. // - On a side, the var and float64 syms are live, the lossless int32 sym is not live, the sym's merged value is
  1712. // likely int, and the sym of any type is live on the other side. Since the value is likely int, it may be
  1713. // int-specialized (with lossless conversion) later. Keeping only the float64 sym live requires doing a lossless
  1714. // conversion from float64 to int32, with bailout if the value of the float is not a true 32-bit integer. Checking
  1715. // that is costly, and if the float64 sym is converted back to var, it does not become a tagged int, causing a
  1716. // guaranteed bailout if a lossless conversion to int happens later. Keep the var sym live to preserve its
  1717. // tagged-ness so that it can be int-specialized while avoiding unnecessary bailouts.
  1718. // - Keep the int32 sym live if it's live on both sides
  1719. // - Mark the sym as lossy if it's lossy on any side
  1720. // - Keep the float64 sym live if it's live on a side and the sym of a specialized lossless type is live on the other
  1721. // side
  1722. //
  1723. // fromData.temp =
  1724. // (fromData.var - (fromData.int32 - fromData.lossyInt32)) &
  1725. // (toData.var | toData.int32 | toData.float64)
  1726. // toData.temp =
  1727. // (toData.var - (toData.int32 - toData.lossyInt32)) &
  1728. // (fromData.var | fromData.int32 | fromData.float64)
  1729. // toData.var =
  1730. // (fromData.var & toData.var) |
  1731. // (fromData.temp - fromData.float64) |
  1732. // (toData.temp - toData.float64) |
  1733. // (fromData.temp & fromData.float64 | toData.temp & toData.float64) & (value ~ int)
  1734. //
  1735. // toData.float64 =
  1736. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64) |
  1737. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1738. // toData.int32 &= fromData.int32
  1739. // toData.lossyInt32 = (fromData.lossyInt32 | toData.lossyInt32) & toData.int32
  1740. BVSparse<JitArenaAllocator> tempBv1(this->tempAlloc);
  1741. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1742. if (isLoopBackEdge && forceTypeSpecOnLoopHeader)
  1743. {
  1744. Loop *const loop = toBlock->loop;
  1745. // Force to lossless int32:
  1746. // forceLosslessInt32 =
  1747. // ((fromData.int32 - fromData.lossyInt32) - (toData.int32 - toData.lossyInt32)) &
  1748. // loop.likelyIntSymsUsedBeforeDefined &
  1749. // toData.var
  1750. tempBv1.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1751. tempBv2.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1752. tempBv1.Minus(&tempBv2);
  1753. tempBv1.And(loop->likelyIntSymsUsedBeforeDefined);
  1754. tempBv1.And(toData->liveVarSyms);
  1755. toData->liveInt32Syms->Or(&tempBv1);
  1756. toData->liveLossyInt32Syms->Minus(&tempBv1);
  1757. if(DoLossyIntTypeSpec())
  1758. {
  1759. // Force to lossy int32:
  1760. // forceLossyInt32 = (fromData.int32 - toData.int32) & loop.symsUsedBeforeDefined & toData.var
  1761. tempBv1.Minus(fromData->liveInt32Syms, toData->liveInt32Syms);
  1762. tempBv1.And(loop->symsUsedBeforeDefined);
  1763. tempBv1.And(toData->liveVarSyms);
  1764. toData->liveInt32Syms->Or(&tempBv1);
  1765. toData->liveLossyInt32Syms->Or(&tempBv1);
  1766. }
  1767. // Force to float64:
  1768. // forceFloat64 =
  1769. // fromData.float64 & loop.forceFloat64 |
  1770. // (fromData.float64 - toData.float64) & loop.likelyNumberSymsUsedBeforeDefined
  1771. tempBv1.And(fromData->liveFloat64Syms, loop->forceFloat64SymsOnEntry);
  1772. toData->liveFloat64Syms->Or(&tempBv1);
  1773. tempBv1.Minus(fromData->liveFloat64Syms, toData->liveFloat64Syms);
  1774. tempBv1.And(loop->likelyNumberSymsUsedBeforeDefined);
  1775. toData->liveFloat64Syms->Or(&tempBv1);
  1776. // Force to Simd128 type:
  1777. // if live on the backedge and we are hoisting the operand.
  1778. // or if live on the backedge only and used before def in the loop.
  1779. tempBv1.And(fromData->liveSimd128F4Syms, loop->forceSimd128F4SymsOnEntry);
  1780. toData->liveSimd128F4Syms->Or(&tempBv1);
  1781. tempBv1.Minus(fromData->liveSimd128F4Syms, toData->liveSimd128F4Syms);
  1782. tempBv1.And(loop->likelySimd128F4SymsUsedBeforeDefined);
  1783. toData->liveSimd128F4Syms->Or(&tempBv1);
  1784. tempBv1.And(fromData->liveSimd128I4Syms, loop->forceSimd128I4SymsOnEntry);
  1785. toData->liveSimd128I4Syms->Or(&tempBv1);
  1786. tempBv1.Minus(fromData->liveSimd128I4Syms, toData->liveSimd128I4Syms);
  1787. tempBv1.And(loop->likelySimd128I4SymsUsedBeforeDefined);
  1788. toData->liveSimd128I4Syms->Or(&tempBv1);
  1789. }
  1790. BVSparse<JitArenaAllocator> simdSymsToVar(this->tempAlloc);
  1791. {
  1792. // SIMD_JS
  1793. // If we have simd128 type-spec sym live as one type on one side, but not of same type on the other, we look at the merged ValueType.
  1794. // If it's Likely the simd128 type, we choose to keep the type-spec sym (compensate with a FromVar), if the following is true:
  1795. // - We are not in jitLoopBody. Introducing a FromVar for compensation extends bytecode syms lifetime. If the value
  1796. // is actually dead, and we enter the loop-body after bailing out from SimpleJit, the value will not be restored in
  1797. // the bailout code.
  1798. // - Value was never Undefined/Null. Avoid unboxing of possibly uninitialized values.
  1799. // - Not loop back-edge. To keep unboxed value, the value has to be used-before def in the loop-body. This is done
  1800. // separately in forceSimd128*SymsOnEntry and included in loop-header.
  1801. // Live syms as F4 on one edge only
  1802. tempBv1.Xor(fromData->liveSimd128F4Syms, toData->liveSimd128F4Syms);
  1803. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv1)
  1804. {
  1805. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1806. Assert(stackSym);
  1807. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1808. ValueInfo * valueInfo = value ? value->GetValueInfo() : nullptr;
  1809. // There are two possible representations for Simd128F4 Value: F4 or Var.
  1810. // If the merged ValueType is LikelySimd128F4, then on the edge where F4 is dead, Var must be alive.
  1811. // Unbox to F4 type-spec sym.
  1812. if (
  1813. valueInfo && valueInfo->IsLikelySimd128Float32x4() &&
  1814. !valueInfo->HasBeenUndefined() && !valueInfo->HasBeenNull() &&
  1815. !isLoopBackEdge && !func->IsLoopBody()
  1816. )
  1817. {
  1818. toData->liveSimd128F4Syms->Set(id);
  1819. }
  1820. else
  1821. {
  1822. // If live on both edges, box it.
  1823. if (IsLive(stackSym, fromData) && IsLive(stackSym, toData))
  1824. {
  1825. simdSymsToVar.Set(id);
  1826. }
  1827. // kill F4 sym
  1828. toData->liveSimd128F4Syms->Clear(id);
  1829. }
  1830. } NEXT_BITSET_IN_SPARSEBV;
  1831. // Same for I4
  1832. tempBv1.Xor(fromData->liveSimd128I4Syms, toData->liveSimd128I4Syms);
  1833. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv1)
  1834. {
  1835. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1836. Assert(stackSym);
  1837. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1838. ValueInfo * valueInfo = value ? value->GetValueInfo() : nullptr;
  1839. if (
  1840. valueInfo && valueInfo->IsLikelySimd128Int32x4() &&
  1841. !valueInfo->HasBeenUndefined() && !valueInfo->HasBeenNull() &&
  1842. !isLoopBackEdge && !func->IsLoopBody()
  1843. )
  1844. {
  1845. toData->liveSimd128I4Syms->Set(id);
  1846. }
  1847. else
  1848. {
  1849. if (IsLive(stackSym, fromData) && IsLive(stackSym, toData))
  1850. {
  1851. simdSymsToVar.Set(id);
  1852. }
  1853. toData->liveSimd128I4Syms->Clear(id);
  1854. }
  1855. } NEXT_BITSET_IN_SPARSEBV;
  1856. }
  1857. {
  1858. BVSparse<JitArenaAllocator> tempBv3(this->tempAlloc);
  1859. // fromData.temp =
  1860. // (fromData.var - (fromData.int32 - fromData.lossyInt32)) &
  1861. // (toData.var | toData.int32 | toData.float64)
  1862. tempBv2.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1863. tempBv1.Minus(fromData->liveVarSyms, &tempBv2);
  1864. tempBv2.Or(toData->liveVarSyms, toData->liveInt32Syms);
  1865. tempBv2.Or(toData->liveFloat64Syms);
  1866. tempBv1.And(&tempBv2);
  1867. // toData.temp =
  1868. // (toData.var - (toData.int32 - toData.lossyInt32)) &
  1869. // (fromData.var | fromData.int32 | fromData.float64)
  1870. tempBv3.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1871. tempBv2.Minus(toData->liveVarSyms, &tempBv3);
  1872. tempBv3.Or(fromData->liveVarSyms, fromData->liveInt32Syms);
  1873. tempBv3.Or(fromData->liveFloat64Syms);
  1874. tempBv2.And(&tempBv3);
  1875. {
  1876. BVSparse<JitArenaAllocator> tempBv4(this->tempAlloc);
  1877. // fromData.temp & fromData.float64 | toData.temp & toData.float64
  1878. tempBv3.And(&tempBv1, fromData->liveFloat64Syms);
  1879. tempBv4.And(&tempBv2, toData->liveFloat64Syms);
  1880. tempBv3.Or(&tempBv4);
  1881. }
  1882. // (fromData.temp - fromData.float64) |
  1883. // (toData.temp - toData.float64)
  1884. tempBv1.Minus(fromData->liveFloat64Syms);
  1885. tempBv2.Minus(toData->liveFloat64Syms);
  1886. tempBv1.Or(&tempBv2);
  1887. // toData.var =
  1888. // (fromData.var & toData.var) |
  1889. // (fromData.temp - fromData.float64) |
  1890. // (toData.temp - toData.float64)
  1891. toData->liveVarSyms->And(fromData->liveVarSyms);
  1892. toData->liveVarSyms->Or(&tempBv1);
  1893. // toData.var |=
  1894. // (fromData.temp & fromData.float64 | toData.temp & toData.float64) & (value ~ int)
  1895. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv3)
  1896. {
  1897. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1898. Assert(stackSym);
  1899. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1900. if(value)
  1901. {
  1902. ValueInfo *const valueInfo = value->GetValueInfo();
  1903. if(valueInfo->IsInt() || (valueInfo->IsLikelyInt() && DoAggressiveIntTypeSpec()))
  1904. {
  1905. toData->liveVarSyms->Set(id);
  1906. }
  1907. }
  1908. } NEXT_BITSET_IN_SPARSEBV;
  1909. // SIMD_JS
  1910. // Simd syms that need boxing
  1911. toData->liveVarSyms->Or(&simdSymsToVar);
  1912. }
  1913. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64)
  1914. tempBv1.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1915. tempBv1.Or(toData->liveFloat64Syms);
  1916. tempBv1.And(fromData->liveFloat64Syms);
  1917. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1918. tempBv2.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1919. tempBv2.Or(fromData->liveFloat64Syms);
  1920. tempBv2.And(toData->liveFloat64Syms);
  1921. // toData.float64 =
  1922. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64) |
  1923. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1924. toData->liveFloat64Syms->Or(&tempBv1, &tempBv2);
  1925. // toData.int32 &= fromData.int32
  1926. // toData.lossyInt32 = (fromData.lossyInt32 | toData.lossyInt32) & toData.int32
  1927. toData->liveInt32Syms->And(fromData->liveInt32Syms);
  1928. toData->liveLossyInt32Syms->Or(fromData->liveLossyInt32Syms);
  1929. toData->liveLossyInt32Syms->And(toData->liveInt32Syms);
  1930. }
  1931. if (TrackHoistableFields() && HasHoistableFields(fromData))
  1932. {
  1933. if (toData->hoistableFields)
  1934. {
  1935. toData->hoistableFields->Or(fromData->hoistableFields);
  1936. }
  1937. else
  1938. {
  1939. toData->hoistableFields = fromData->hoistableFields->CopyNew(this->alloc);
  1940. }
  1941. }
  1942. if (TrackArgumentsObject())
  1943. {
  1944. if (!toData->argObjSyms->Equal(fromData->argObjSyms))
  1945. {
  1946. CannotAllocateArgumentsObjectOnStack();
  1947. }
  1948. }
  1949. if (fromData->maybeTempObjectSyms && !fromData->maybeTempObjectSyms->IsEmpty())
  1950. {
  1951. if (toData->maybeTempObjectSyms)
  1952. {
  1953. toData->maybeTempObjectSyms->Or(fromData->maybeTempObjectSyms);
  1954. }
  1955. else
  1956. {
  1957. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms->CopyNew(this->alloc);
  1958. }
  1959. if (fromData->canStoreTempObjectSyms && !fromData->canStoreTempObjectSyms->IsEmpty())
  1960. {
  1961. if (toData->canStoreTempObjectSyms)
  1962. {
  1963. // Both need to be temp object
  1964. toData->canStoreTempObjectSyms->And(fromData->canStoreTempObjectSyms);
  1965. }
  1966. }
  1967. else if (toData->canStoreTempObjectSyms)
  1968. {
  1969. toData->canStoreTempObjectSyms->ClearAll();
  1970. }
  1971. }
  1972. else
  1973. {
  1974. Assert(!fromData->canStoreTempObjectSyms || fromData->canStoreTempObjectSyms->IsEmpty());
  1975. if (toData->canStoreTempObjectSyms)
  1976. {
  1977. toData->canStoreTempObjectSyms->ClearAll();
  1978. }
  1979. }
  1980. Assert(toData->curFunc == fromData->curFunc);
  1981. Assert((toData->callSequence == nullptr && fromData->callSequence == nullptr) || toData->callSequence->Equals(*(fromData->callSequence)));
  1982. Assert(toData->startCallCount == fromData->startCallCount);
  1983. Assert(toData->argOutCount == fromData->argOutCount);
  1984. Assert(toData->totalOutParamCount == fromData->totalOutParamCount);
  1985. Assert(toData->inlinedArgOutCount == fromData->inlinedArgOutCount);
  1986. // stackLiteralInitFldDataMap is a union of the stack literal from two path.
  1987. // Although we don't need the data on loop prepass, we need to do it for the loop header
  1988. // because we capture the loop header bailout on loop prepass.
  1989. if (fromData->stackLiteralInitFldDataMap != nullptr &&
  1990. (!this->IsLoopPrePass() || (toBlock->isLoopHeader && toBlock->loop == this->rootLoopPrePass)))
  1991. {
  1992. if (toData->stackLiteralInitFldDataMap == nullptr)
  1993. {
  1994. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap->Clone();
  1995. }
  1996. else
  1997. {
  1998. StackLiteralInitFldDataMap * toMap = toData->stackLiteralInitFldDataMap;
  1999. fromData->stackLiteralInitFldDataMap->Map([toMap](StackSym * stackSym, StackLiteralInitFldData const& data)
  2000. {
  2001. if (toMap->AddNew(stackSym, data) == -1)
  2002. {
  2003. // If there is an existing data for the stackSym, both path should match
  2004. DebugOnly(StackLiteralInitFldData const * currentData);
  2005. Assert(toMap->TryGetReference(stackSym, &currentData));
  2006. Assert(currentData->currentInitFldCount == data.currentInitFldCount);
  2007. Assert(currentData->propIds == data.propIds);
  2008. }
  2009. });
  2010. }
  2011. }
  2012. }
  2013. void
  2014. GlobOpt::DeleteBlockData(GlobOptBlockData *data)
  2015. {
  2016. JitArenaAllocator *const alloc = this->alloc;
  2017. data->symToValueMap->Delete();
  2018. data->exprToValueMap->Delete();
  2019. JitAdelete(alloc, data->liveFields);
  2020. JitAdelete(alloc, data->liveArrayValues);
  2021. if (data->maybeWrittenTypeSyms)
  2022. {
  2023. JitAdelete(alloc, data->maybeWrittenTypeSyms);
  2024. }
  2025. JitAdelete(alloc, data->isTempSrc);
  2026. JitAdelete(alloc, data->liveVarSyms);
  2027. JitAdelete(alloc, data->liveInt32Syms);
  2028. JitAdelete(alloc, data->liveLossyInt32Syms);
  2029. JitAdelete(alloc, data->liveFloat64Syms);
  2030. // SIMD_JS
  2031. JitAdelete(alloc, data->liveSimd128F4Syms);
  2032. JitAdelete(alloc, data->liveSimd128I4Syms);
  2033. if (data->hoistableFields)
  2034. {
  2035. JitAdelete(alloc, data->hoistableFields);
  2036. }
  2037. if (data->argObjSyms)
  2038. {
  2039. JitAdelete(alloc, data->argObjSyms);
  2040. }
  2041. if (data->maybeTempObjectSyms)
  2042. {
  2043. JitAdelete(alloc, data->maybeTempObjectSyms);
  2044. if (data->canStoreTempObjectSyms)
  2045. {
  2046. JitAdelete(alloc, data->canStoreTempObjectSyms);
  2047. }
  2048. }
  2049. else
  2050. {
  2051. Assert(!data->canStoreTempObjectSyms);
  2052. }
  2053. JitAdelete(alloc, data->valuesToKillOnCalls);
  2054. if(data->inductionVariables)
  2055. {
  2056. JitAdelete(alloc, data->inductionVariables);
  2057. }
  2058. if(data->availableIntBoundChecks)
  2059. {
  2060. JitAdelete(alloc, data->availableIntBoundChecks);
  2061. }
  2062. if (data->stackLiteralInitFldDataMap)
  2063. {
  2064. JitAdelete(alloc, data->stackLiteralInitFldDataMap);
  2065. }
  2066. JitAdelete(alloc, data->changedSyms);
  2067. data->changedSyms = nullptr;
  2068. data->OnDataDeleted();
  2069. }
  2070. void
  2071. GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  2072. {
  2073. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  2074. {
  2075. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  2076. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
  2077. IR::Instr *lastInstr = block->GetLastInstr();
  2078. if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
  2079. {
  2080. // If branch is using this symbol, hoist the operand as the ToVar load will get
  2081. // inserted right before the branch.
  2082. IR::Opnd *src1 = lastInstr->GetSrc1();
  2083. if (src1)
  2084. {
  2085. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  2086. {
  2087. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  2088. }
  2089. IR::Opnd *src2 = lastInstr->GetSrc2();
  2090. if (src2)
  2091. {
  2092. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  2093. {
  2094. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  2095. }
  2096. }
  2097. }
  2098. this->ToVar(lastInstr, newOpnd, block, nullptr, false);
  2099. }
  2100. else
  2101. {
  2102. IR::Instr *lastNextInstr = lastInstr->m_next;
  2103. this->ToVar(lastNextInstr, newOpnd, block, nullptr, false);
  2104. }
  2105. } NEXT_BITSET_IN_SPARSEBV;
  2106. }
  2107. void
  2108. GlobOpt::ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr)
  2109. {
  2110. return this->ToTypeSpec(bv, block, TyInt32, IR::BailOutIntOnly, lossy, insertBeforeInstr);
  2111. }
  2112. void
  2113. GlobOpt::ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  2114. {
  2115. return this->ToTypeSpec(bv, block, TyFloat64, IR::BailOutNumberOnly);
  2116. }
  2117. void
  2118. GlobOpt::ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  2119. {
  2120. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  2121. {
  2122. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  2123. IRType fromType;
  2124. // Win8 bug: 757126. If we are trying to type specialize the arguments object,
  2125. // let's make sure stack args optimization is not enabled. This is a problem, particularly,
  2126. // if the instruction comes from an unreachable block. In other cases, the pass on the
  2127. // instruction itself should disable arguments object optimization.
  2128. if(block->globOptData.argObjSyms && IsArgumentsSymID(id, block->globOptData))
  2129. {
  2130. CannotAllocateArgumentsObjectOnStack();
  2131. }
  2132. if (block->globOptData.liveVarSyms->Test(id))
  2133. {
  2134. fromType = TyVar;
  2135. }
  2136. else if (block->globOptData.liveInt32Syms->Test(id) && !block->globOptData.liveLossyInt32Syms->Test(id))
  2137. {
  2138. fromType = TyInt32;
  2139. stackSym = stackSym->GetInt32EquivSym(this->func);
  2140. }
  2141. else if (block->globOptData.liveFloat64Syms->Test(id))
  2142. {
  2143. fromType = TyFloat64;
  2144. stackSym = stackSym->GetFloat64EquivSym(this->func);
  2145. }
  2146. else
  2147. {
  2148. Assert(IsLiveAsSimd128(stackSym, &block->globOptData));
  2149. if (IsLiveAsSimd128F4(stackSym, &block->globOptData))
  2150. {
  2151. fromType = TySimd128F4;
  2152. stackSym = stackSym->GetSimd128F4EquivSym(this->func);
  2153. }
  2154. else
  2155. {
  2156. fromType = TySimd128I4;
  2157. stackSym = stackSym->GetSimd128I4EquivSym(this->func);
  2158. }
  2159. }
  2160. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, fromType, this->func);
  2161. IR::Instr *lastInstr = block->GetLastInstr();
  2162. if (!insertBeforeInstr && lastInstr->IsBranchInstr())
  2163. {
  2164. // If branch is using this symbol, hoist the operand as the ToInt32 load will get
  2165. // inserted right before the branch.
  2166. IR::Instr *instrPrev = lastInstr->m_prev;
  2167. IR::Opnd *src1 = lastInstr->GetSrc1();
  2168. if (src1)
  2169. {
  2170. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  2171. {
  2172. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  2173. }
  2174. IR::Opnd *src2 = lastInstr->GetSrc2();
  2175. if (src2)
  2176. {
  2177. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  2178. {
  2179. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  2180. }
  2181. }
  2182. // Did we insert anything?
  2183. if (lastInstr->m_prev != instrPrev)
  2184. {
  2185. // If we had ByteCodeUses right before the branch, move them back down.
  2186. IR::Instr *insertPoint = lastInstr;
  2187. for (IR::Instr *instrBytecode = instrPrev; instrBytecode->m_opcode == Js::OpCode::ByteCodeUses; instrBytecode = instrBytecode->m_prev)
  2188. {
  2189. instrBytecode->Unlink();
  2190. insertPoint->InsertBefore(instrBytecode);
  2191. insertPoint = instrBytecode;
  2192. }
  2193. }
  2194. }
  2195. }
  2196. this->ToTypeSpecUse(nullptr, newOpnd, block, nullptr, nullptr, toType, bailOutKind, lossy, insertBeforeInstr);
  2197. } NEXT_BITSET_IN_SPARSEBV;
  2198. }
  2199. void
  2200. GlobOpt::CleanUpValueMaps()
  2201. {
  2202. // Don't do cleanup if it's been done recently.
  2203. // Landing pad could get optimized twice...
  2204. // We want the same info out the first and second time. So always cleanup.
  2205. // Increasing the cleanup threshold count for asmjs to 500
  2206. uint cleanupCount = (!GetIsAsmJSFunc()) ? CONFIG_FLAG(GoptCleanupThreshold) : CONFIG_FLAG(AsmGoptCleanupThreshold);
  2207. if (!this->currentBlock->IsLandingPad() && this->instrCountSinceLastCleanUp < cleanupCount)
  2208. {
  2209. return;
  2210. }
  2211. this->instrCountSinceLastCleanUp = 0;
  2212. GlobHashTable *thisTable = this->blockData.symToValueMap;
  2213. BVSparse<JitArenaAllocator> deadSymsBv(this->tempAlloc);
  2214. BVSparse<JitArenaAllocator> keepAliveSymsBv(this->tempAlloc);
  2215. BVSparse<JitArenaAllocator> availableValueNumbers(this->tempAlloc);
  2216. availableValueNumbers.Copy(byteCodeConstantValueNumbersBv);
  2217. BVSparse<JitArenaAllocator> *upwardExposedUses = this->currentBlock->upwardExposedUses;
  2218. BVSparse<JitArenaAllocator> *upwardExposedFields = this->currentBlock->upwardExposedFields;
  2219. bool isInLoop = !!this->currentBlock->loop;
  2220. BVSparse<JitArenaAllocator> symsInCallSequence(this->tempAlloc);
  2221. SListBase<IR::Opnd *> * callSequence = this->currentBlock->globOptData.callSequence;
  2222. if (callSequence && !callSequence->Empty())
  2223. {
  2224. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, callSequence)
  2225. {
  2226. StackSym * sym = opnd->GetStackSym();
  2227. symsInCallSequence.Set(sym->m_id);
  2228. }
  2229. }
  2230. NEXT_SLISTBASE_ENTRY;
  2231. for (uint i = 0; i < thisTable->tableSize; i++)
  2232. {
  2233. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  2234. {
  2235. bool isSymUpwardExposed = upwardExposedUses->Test(bucket.value->m_id) || upwardExposedFields->Test(bucket.value->m_id);
  2236. if (!isSymUpwardExposed && symsInCallSequence.Test(bucket.value->m_id))
  2237. {
  2238. // Don't remove/shrink sym-value pair if the sym is referenced in callSequence even if the sym is dead according to backward data flow.
  2239. // This is possible in some edge cases that an infinite loop is involved when evaluating parameter for a function (between StartCall and Call),
  2240. // there is no backward data flow into the infinite loop block, but non empty callSequence still populates to it in this (forward) pass
  2241. // which causes error when looking up value for the syms in callSequence (cannot find the value).
  2242. // It would cause error to fill out the bailout information for the loop blocks.
  2243. // Remove dead syms from callSequence has some risk because there are various associated counters which need to be consistent.
  2244. continue;
  2245. }
  2246. // Make sure symbol was created before backward pass.
  2247. // If symbols isn't upward exposed, mark it as dead.
  2248. // If a symbol was copy-prop'd in a loop prepass, the upwardExposedUses info could be wrong. So wait until we are out of the loop before clearing it.
  2249. if ((SymID)bucket.value->m_id <= this->maxInitialSymID && !isSymUpwardExposed
  2250. && (!isInLoop || !this->prePassCopyPropSym->Test(bucket.value->m_id)))
  2251. {
  2252. Value *val = bucket.element;
  2253. ValueInfo *valueInfo = val->GetValueInfo();
  2254. Sym * sym = bucket.value;
  2255. Sym *symStore = valueInfo->GetSymStore();
  2256. if (symStore && symStore == bucket.value)
  2257. {
  2258. // Keep constants around, as we don't know if there will be further uses
  2259. if (!bucket.element->GetValueInfo()->IsVarConstant() && !bucket.element->GetValueInfo()->HasIntConstantValue())
  2260. {
  2261. // Symbol may still be a copy-prop candidate. Wait before deleting it.
  2262. deadSymsBv.Set(bucket.value->m_id);
  2263. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  2264. // created in backward pass and so their symIds > maxInitialSymID.
  2265. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  2266. {
  2267. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  2268. }
  2269. }
  2270. availableValueNumbers.Set(val->GetValueNumber());
  2271. }
  2272. else
  2273. {
  2274. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  2275. // created in backward pass and so their symIds > maxInitialSymID. Perhaps we could remove
  2276. // it explicitly here, but would it work alright with the iterator?
  2277. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  2278. {
  2279. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  2280. }
  2281. // Not a copy-prop candidate; delete it right away.
  2282. iter.RemoveCurrent(thisTable->alloc);
  2283. this->blockData.liveInt32Syms->Clear(sym->m_id);
  2284. this->blockData.liveLossyInt32Syms->Clear(sym->m_id);
  2285. this->blockData.liveFloat64Syms->Clear(sym->m_id);
  2286. }
  2287. }
  2288. else
  2289. {
  2290. Sym * sym = bucket.value;
  2291. if (sym->IsPropertySym() && !this->blockData.liveFields->Test(sym->m_id))
  2292. {
  2293. // Remove propertySyms which are not live anymore.
  2294. iter.RemoveCurrent(thisTable->alloc);
  2295. this->blockData.liveInt32Syms->Clear(sym->m_id);
  2296. this->blockData.liveLossyInt32Syms->Clear(sym->m_id);
  2297. this->blockData.liveFloat64Syms->Clear(sym->m_id);
  2298. }
  2299. else
  2300. {
  2301. // Look at the copy-prop candidate. We don't want to get rid of the data for a symbol which is
  2302. // a copy-prop candidate.
  2303. Value *val = bucket.element;
  2304. ValueInfo *valueInfo = val->GetValueInfo();
  2305. Sym *symStore = valueInfo->GetSymStore();
  2306. if (symStore && symStore != bucket.value)
  2307. {
  2308. keepAliveSymsBv.Set(symStore->m_id);
  2309. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  2310. {
  2311. keepAliveSymsBv.Set(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  2312. }
  2313. }
  2314. availableValueNumbers.Set(val->GetValueNumber());
  2315. }
  2316. }
  2317. } NEXT_SLISTBASE_ENTRY_EDITING;
  2318. }
  2319. deadSymsBv.Minus(&keepAliveSymsBv);
  2320. // Now cleanup exprToValueMap table
  2321. ExprHashTable *thisExprTable = this->blockData.exprToValueMap;
  2322. bool oldHasCSECandidatesValue = this->currentBlock->globOptData.hasCSECandidates; // Could be false if none need bailout.
  2323. this->currentBlock->globOptData.hasCSECandidates = false;
  2324. for (uint i = 0; i < thisExprTable->tableSize; i++)
  2325. {
  2326. FOREACH_SLISTBASE_ENTRY_EDITING(ExprHashBucket, bucket, &thisExprTable->table[i], iter)
  2327. {
  2328. ExprHash hash = bucket.value;
  2329. ValueNumber src1ValNum = hash.GetSrc1ValueNumber();
  2330. ValueNumber src2ValNum = hash.GetSrc2ValueNumber();
  2331. // If src1Val or src2Val are not available anymore, no point keeping this CSE candidate
  2332. bool removeCurrent = false;
  2333. if ((src1ValNum && !availableValueNumbers.Test(src1ValNum))
  2334. || (src2ValNum && !availableValueNumbers.Test(src2ValNum)))
  2335. {
  2336. removeCurrent = true;
  2337. }
  2338. else
  2339. {
  2340. // If we are keeping this value, make sure we also keep the symStore in the value table
  2341. removeCurrent = true; // Remove by default, unless it's set to false later below.
  2342. Value *val = bucket.element;
  2343. if (val)
  2344. {
  2345. Sym *symStore = val->GetValueInfo()->GetSymStore();
  2346. if (symStore)
  2347. {
  2348. Value *symStoreVal = this->FindValue(this->currentBlock->globOptData.symToValueMap, symStore);
  2349. if (symStoreVal && symStoreVal->GetValueNumber() == val->GetValueNumber())
  2350. {
  2351. removeCurrent = false;
  2352. deadSymsBv.Clear(symStore->m_id);
  2353. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  2354. {
  2355. deadSymsBv.Clear(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  2356. }
  2357. }
  2358. }
  2359. }
  2360. }
  2361. if(removeCurrent)
  2362. {
  2363. iter.RemoveCurrent(thisExprTable->alloc);
  2364. }
  2365. else
  2366. {
  2367. this->currentBlock->globOptData.hasCSECandidates = oldHasCSECandidatesValue;
  2368. }
  2369. } NEXT_SLISTBASE_ENTRY_EDITING;
  2370. }
  2371. FOREACH_BITSET_IN_SPARSEBV(dead_id, &deadSymsBv)
  2372. {
  2373. thisTable->Clear(dead_id);
  2374. }
  2375. NEXT_BITSET_IN_SPARSEBV;
  2376. if (!deadSymsBv.IsEmpty())
  2377. {
  2378. if (this->func->IsJitInDebugMode())
  2379. {
  2380. // Do not remove non-temp local vars from liveVarSyms (i.e. do not let them become dead).
  2381. // We will need to restore all initialized/used so far non-temp local during bail out.
  2382. // (See BackwardPass::ProcessBailOutInfo)
  2383. Assert(this->func->m_nonTempLocalVars);
  2384. BVSparse<JitArenaAllocator> tempBv(this->tempAlloc);
  2385. tempBv.Minus(&deadSymsBv, this->func->m_nonTempLocalVars);
  2386. this->blockData.liveVarSyms->Minus(&tempBv);
  2387. #if DBG
  2388. tempBv.And(this->blockData.liveInt32Syms, this->func->m_nonTempLocalVars);
  2389. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveInt32Syms?");
  2390. tempBv.And(this->blockData.liveLossyInt32Syms, this->func->m_nonTempLocalVars);
  2391. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveLossyInt32Syms?");
  2392. tempBv.And(this->blockData.liveFloat64Syms, this->func->m_nonTempLocalVars);
  2393. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveFloat64Syms?");
  2394. #endif
  2395. }
  2396. else
  2397. {
  2398. this->blockData.liveVarSyms->Minus(&deadSymsBv);
  2399. }
  2400. this->blockData.liveInt32Syms->Minus(&deadSymsBv);
  2401. this->blockData.liveLossyInt32Syms->Minus(&deadSymsBv);
  2402. this->blockData.liveFloat64Syms->Minus(&deadSymsBv);
  2403. }
  2404. JitAdelete(this->alloc, upwardExposedUses);
  2405. this->currentBlock->upwardExposedUses = nullptr;
  2406. JitAdelete(this->alloc, upwardExposedFields);
  2407. this->currentBlock->upwardExposedFields = nullptr;
  2408. if (this->currentBlock->cloneStrCandidates)
  2409. {
  2410. JitAdelete(this->alloc, this->currentBlock->cloneStrCandidates);
  2411. this->currentBlock->cloneStrCandidates = nullptr;
  2412. }
  2413. }
  2414. PRECandidatesList * GlobOpt::FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc)
  2415. {
  2416. // Iterate over the value table looking for propertySyms which are candidates to
  2417. // pre-load in the landing pad for field PRE
  2418. GlobHashTable *valueTable = block->globOptData.symToValueMap;
  2419. Loop *loop = block->loop;
  2420. PRECandidatesList *candidates = nullptr;
  2421. for (uint i = 0; i < valueTable->tableSize; i++)
  2422. {
  2423. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &valueTable->table[i])
  2424. {
  2425. Sym *sym = bucket.value;
  2426. if (!sym->IsPropertySym())
  2427. {
  2428. continue;
  2429. }
  2430. PropertySym *propertySym = sym->AsPropertySym();
  2431. // Field should be live on the back-edge
  2432. if (!block->globOptData.liveFields->Test(propertySym->m_id))
  2433. {
  2434. continue;
  2435. }
  2436. // Field should be live in the landing pad as well
  2437. if (!loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  2438. {
  2439. continue;
  2440. }
  2441. Value *value = bucket.element;
  2442. Sym *symStore = value->GetValueInfo()->GetSymStore();
  2443. if (!symStore || !symStore->IsStackSym())
  2444. {
  2445. continue;
  2446. }
  2447. // Check upwardExposed in case of:
  2448. // s1 = 0;
  2449. // loop:
  2450. // = o.x;
  2451. // foo();
  2452. // o.x = s1;
  2453. // Can't thrash s1 in loop top.
  2454. if (!symStore->AsStackSym()->IsSingleDef() || loop->GetHeadBlock()->upwardExposedUses->Test(symStore->m_id))
  2455. {
  2456. // If symStore isn't singleDef, we need to make sure it still has the same value.
  2457. // This usually fails if we are not aggressive at transferring values in the prepass.
  2458. Value **pSymStoreFromValue = valueTable->Get(symStore->m_id);
  2459. // Consider: We should be fine if symStore isn't live in landing pad...
  2460. if (!pSymStoreFromValue || (*pSymStoreFromValue)->GetValueNumber() != value->GetValueNumber())
  2461. {
  2462. continue;
  2463. }
  2464. }
  2465. BasicBlock *landingPad = loop->landingPad;
  2466. Value *landingPadValue = this->FindValue(landingPad->globOptData.symToValueMap, propertySym);
  2467. if (!landingPadValue)
  2468. {
  2469. // Value should be added as initial value or already be there.
  2470. return nullptr;
  2471. }
  2472. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  2473. if (!ldInstr)
  2474. {
  2475. continue;
  2476. }
  2477. if (!candidates)
  2478. {
  2479. candidates = Anew(alloc, PRECandidatesList, alloc);
  2480. }
  2481. candidates->Prepend(&bucket);
  2482. } NEXT_SLISTBASE_ENTRY;
  2483. }
  2484. return candidates;
  2485. }
  2486. PRECandidatesList * GlobOpt::RemoveUnavailableCandidates(BasicBlock *block, PRECandidatesList *candidates, JitArenaAllocator *alloc)
  2487. {
  2488. // In case of multiple back-edges to the loop, make sure the candidates are still valid.
  2489. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates, iter)
  2490. {
  2491. Value *candidateValue = candidate->element;
  2492. PropertySym *candidatePropertySym = candidate->value->AsPropertySym();
  2493. ValueNumber valueNumber = candidateValue->GetValueNumber();
  2494. Sym *symStore = candidateValue->GetValueInfo()->GetSymStore();
  2495. Value *blockValue = this->FindValue(block->globOptData.symToValueMap, candidatePropertySym);
  2496. if (blockValue && blockValue->GetValueNumber() == valueNumber
  2497. && blockValue->GetValueInfo()->GetSymStore() == symStore)
  2498. {
  2499. Value *symStoreValue = this->FindValue(block->globOptData.symToValueMap, symStore);
  2500. if (symStoreValue && symStoreValue->GetValueNumber() == valueNumber)
  2501. {
  2502. continue;
  2503. }
  2504. }
  2505. iter.RemoveCurrent();
  2506. } NEXT_SLIST_ENTRY_EDITING;
  2507. return candidates;
  2508. }
  2509. PRECandidatesList * GlobOpt::FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc)
  2510. {
  2511. // Find the set of PRE candidates
  2512. BasicBlock *loopHeader = loop->GetHeadBlock();
  2513. PRECandidatesList *candidates = nullptr;
  2514. bool firstBackEdge = true;
  2515. FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
  2516. {
  2517. if (!loop->IsDescendentOrSelf(blockPred->loop))
  2518. {
  2519. // Not a loop back-edge
  2520. continue;
  2521. }
  2522. if (firstBackEdge)
  2523. {
  2524. candidates = this->FindBackEdgePRECandidates(blockPred, alloc);
  2525. }
  2526. else
  2527. {
  2528. candidates = this->RemoveUnavailableCandidates(blockPred, candidates, alloc);
  2529. }
  2530. } NEXT_PREDECESSOR_BLOCK;
  2531. return candidates;
  2532. }
  2533. BOOL GlobOpt::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
  2534. {
  2535. // Insert a load for each field PRE candidate.
  2536. PropertySym *propertySym = candidate->value->AsPropertySym();
  2537. StackSym *objPtrSym = propertySym->m_stackSym;
  2538. // If objPtr isn't live, we'll retry later.
  2539. // Another PRE candidate may insert a load for it.
  2540. if (!this->IsLive(objPtrSym, loop->landingPad))
  2541. {
  2542. return false;
  2543. }
  2544. BasicBlock *landingPad = loop->landingPad;
  2545. Value *value = candidate->element;
  2546. Sym *symStore = value->GetValueInfo()->GetSymStore();
  2547. // The symStore can't be live into the loop
  2548. // The symStore needs to still have the same value
  2549. Assert(symStore && symStore->IsStackSym());
  2550. if (this->IsLive(symStore, loop->landingPad))
  2551. {
  2552. // May have already been hoisted:
  2553. // o.x = t1;
  2554. // o.y = t1;
  2555. return false;
  2556. }
  2557. Value *landingPadValue = this->FindValue(landingPad->globOptData.symToValueMap, propertySym);
  2558. // Value should be added as initial value or already be there.
  2559. Assert(landingPadValue);
  2560. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  2561. Assert(ldInstr);
  2562. // Create instr to put in landing pad for compensation
  2563. Assert(IsPREInstrCandidateLoad(ldInstr->m_opcode));
  2564. IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  2565. if (ldSrc->m_sym != propertySym)
  2566. {
  2567. // It's possible that the propertySym but have equivalent objPtrs. Verify their values.
  2568. Value *val1 = this->FindValue(ldSrc->m_sym->AsPropertySym()->m_stackSym);
  2569. Value *val2 = this->FindValue(propertySym->m_stackSym);
  2570. if (!val1 || !val2 || val1->GetValueNumber() != val2->GetValueNumber())
  2571. {
  2572. return false;
  2573. }
  2574. }
  2575. ldInstr = ldInstr->Copy();
  2576. // Consider: Shouldn't be necessary once we have copy-prop in prepass...
  2577. ldInstr->GetSrc1()->AsSymOpnd()->m_sym = propertySym;
  2578. ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  2579. if (ldSrc->IsPropertySymOpnd())
  2580. {
  2581. IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
  2582. IR::PropertySymOpnd *newPropSymOpnd;
  2583. newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->func);
  2584. ldInstr->ReplaceSrc1(newPropSymOpnd);
  2585. }
  2586. if (ldInstr->GetDst()->AsRegOpnd()->m_sym != symStore)
  2587. {
  2588. ldInstr->ReplaceDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->func));
  2589. }
  2590. ldInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  2591. ldInstr->GetDst()->SetIsJITOptimizedReg(true);
  2592. landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
  2593. loop->fieldPRESymStore->Set(symStore->m_id);
  2594. ValueType valueType(ValueType::Uninitialized);
  2595. Value *initialValue;
  2596. if (loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue))
  2597. {
  2598. if (ldInstr->IsProfiledInstr())
  2599. {
  2600. if (initialValue->GetValueNumber() == value->GetValueNumber())
  2601. {
  2602. if (value->GetValueInfo()->IsUninitialized())
  2603. {
  2604. valueType = ldInstr->AsProfiledInstr()->u.FldInfo().valueType;
  2605. }
  2606. else
  2607. {
  2608. valueType = value->GetValueInfo()->Type();
  2609. }
  2610. }
  2611. else
  2612. {
  2613. valueType = ValueType::Uninitialized;
  2614. }
  2615. ldInstr->AsProfiledInstr()->u.FldInfo().valueType = valueType;
  2616. }
  2617. }
  2618. else
  2619. {
  2620. valueType = landingPadValue->GetValueInfo()->Type();
  2621. }
  2622. loop->symsUsedBeforeDefined->Set(symStore->m_id);
  2623. if (valueType.IsLikelyNumber())
  2624. {
  2625. loop->likelyNumberSymsUsedBeforeDefined->Set(symStore->m_id);
  2626. if (DoAggressiveIntTypeSpec() ? valueType.IsLikelyInt() : valueType.IsInt())
  2627. {
  2628. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  2629. // specialization is enabled
  2630. loop->likelyIntSymsUsedBeforeDefined->Set(symStore->m_id);
  2631. }
  2632. }
  2633. // Insert in landing pad
  2634. if (ldInstr->HasAnyImplicitCalls())
  2635. {
  2636. IR::Instr * bailInstr = EnsureDisableImplicitCallRegion(loop);
  2637. bailInstr->InsertBefore(ldInstr);
  2638. }
  2639. else if (loop->endDisableImplicitCall)
  2640. {
  2641. loop->endDisableImplicitCall->InsertBefore(ldInstr);
  2642. }
  2643. else
  2644. {
  2645. loop->landingPad->InsertAfter(ldInstr);
  2646. }
  2647. ldInstr->ClearByteCodeOffset();
  2648. ldInstr->SetByteCodeOffset(landingPad->GetFirstInstr());
  2649. #if DBG_DUMP
  2650. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2651. {
  2652. Output::Print(_u("** TRACE: Field PRE: field pre-loaded in landing pad of loop head #%-3d: "), loop->GetHeadBlock()->GetBlockNum());
  2653. ldInstr->Dump();
  2654. Output::Print(_u("\n"));
  2655. }
  2656. #endif
  2657. return true;
  2658. }
  2659. void GlobOpt::PreloadPRECandidates(Loop *loop, PRECandidatesList *candidates)
  2660. {
  2661. // Insert loads in landing pad for field PRE candidates. Iterate while(changed)
  2662. // for the o.x.y cases.
  2663. BOOL changed = true;
  2664. if (!candidates)
  2665. {
  2666. return;
  2667. }
  2668. Assert(loop->landingPad->GetFirstInstr() == loop->landingPad->GetLastInstr());
  2669. while (changed)
  2670. {
  2671. changed = false;
  2672. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates, iter)
  2673. {
  2674. if (this->PreloadPRECandidate(loop, candidate))
  2675. {
  2676. changed = true;
  2677. iter.RemoveCurrent();
  2678. }
  2679. } NEXT_SLIST_ENTRY_EDITING;
  2680. }
  2681. }
  2682. void GlobOpt::FieldPRE(Loop *loop)
  2683. {
  2684. if (!DoFieldPRE(loop))
  2685. {
  2686. return;
  2687. }
  2688. PRECandidatesList *candidates;
  2689. JitArenaAllocator *alloc = this->tempAlloc;
  2690. candidates = this->FindPossiblePRECandidates(loop, alloc);
  2691. this->PreloadPRECandidates(loop, candidates);
  2692. }
  2693. void GlobOpt::InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  2694. {
  2695. if (toBlock->isLoopHeader // isLoopBackEdge
  2696. && toBlock->cloneStrCandidates
  2697. && !IsLoopPrePass())
  2698. {
  2699. Loop *loop = toBlock->loop;
  2700. BasicBlock *landingPad = loop->landingPad;
  2701. const SymTable *const symTable = func->m_symTable;
  2702. Assert(tempBv->IsEmpty());
  2703. tempBv->And(toBlock->cloneStrCandidates, fromData->isTempSrc);
  2704. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  2705. {
  2706. StackSym *const sym = (StackSym *)symTable->Find(id);
  2707. Assert(sym);
  2708. if (!landingPad->globOptData.liveVarSyms->Test(id)
  2709. || !fromData->liveVarSyms->Test(id))
  2710. {
  2711. continue;
  2712. }
  2713. Value * landingPadValue = FindValue(landingPad->globOptData.symToValueMap, sym);
  2714. if (landingPadValue == nullptr)
  2715. {
  2716. continue;
  2717. }
  2718. Value * loopValue = FindValue(fromData->symToValueMap, sym);
  2719. if (loopValue == nullptr)
  2720. {
  2721. continue;
  2722. }
  2723. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  2724. ValueInfo *loopValueInfo = loopValue->GetValueInfo();
  2725. if (landingPadValueInfo->IsLikelyString()
  2726. && loopValueInfo->IsLikelyString())
  2727. {
  2728. IR::Instr *cloneStr = IR::Instr::New(Js::OpCode::CloneStr, this->func);
  2729. IR::RegOpnd *opnd = IR::RegOpnd::New(sym, IRType::TyVar, this->func);
  2730. cloneStr->SetDst(opnd);
  2731. cloneStr->SetSrc1(opnd);
  2732. if (loop->bailOutInfo->bailOutInstr)
  2733. {
  2734. loop->bailOutInfo->bailOutInstr->InsertBefore(cloneStr);
  2735. }
  2736. else
  2737. {
  2738. landingPad->InsertAfter(cloneStr);
  2739. }
  2740. toData->isTempSrc->Set(id);
  2741. }
  2742. }
  2743. NEXT_BITSET_IN_SPARSEBV;
  2744. tempBv->ClearAll();
  2745. }
  2746. }
  2747. void
  2748. GlobOpt::MergeValueMaps(
  2749. GlobOptBlockData *toData,
  2750. BasicBlock *toBlock,
  2751. BasicBlock *fromBlock,
  2752. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2753. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2754. {
  2755. GlobOptBlockData *fromData = &(fromBlock->globOptData);
  2756. bool isLoopBackEdge = toBlock->isLoopHeader;
  2757. Loop *loop = toBlock->loop;
  2758. bool isLoopPrepass = (loop && this->prePassLoop == loop);
  2759. Assert(valuesCreatedForMerge->Count() == 0);
  2760. DebugOnly(ValueSetByValueNumber mergedValues(tempAlloc, 64));
  2761. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills = tempBv;
  2762. Assert(mergedValueTypesTrackedForKills->IsEmpty());
  2763. toData->valuesToKillOnCalls->Clear(); // the tracking will be reevaluated based on merged value types
  2764. GlobHashTable *thisTable = toData->symToValueMap;
  2765. GlobHashTable *otherTable = fromData->symToValueMap;
  2766. for (uint i = 0; i < thisTable->tableSize; i++)
  2767. {
  2768. SListBase<GlobHashBucket>::Iterator iter2(&otherTable->table[i]);
  2769. iter2.Next();
  2770. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  2771. {
  2772. while (iter2.IsValid() && bucket.value->m_id < iter2.Data().value->m_id)
  2773. {
  2774. iter2.Next();
  2775. }
  2776. Value *newValue = nullptr;
  2777. if (iter2.IsValid() && bucket.value->m_id == iter2.Data().value->m_id)
  2778. {
  2779. newValue =
  2780. MergeValues(
  2781. bucket.element,
  2782. iter2.Data().element,
  2783. iter2.Data().value,
  2784. toData,
  2785. fromData,
  2786. isLoopBackEdge,
  2787. symsRequiringCompensation,
  2788. symsCreatedForMerge);
  2789. }
  2790. if (newValue == nullptr)
  2791. {
  2792. iter.RemoveCurrent(thisTable->alloc);
  2793. continue;
  2794. }
  2795. else
  2796. {
  2797. #if DBG
  2798. // Ensure that only one value per value number is produced by merge. Byte-code constant values are reused in
  2799. // multiple blocks without cloning, so exclude those value numbers.
  2800. {
  2801. Value *const previouslyMergedValue = mergedValues.Lookup(newValue->GetValueNumber());
  2802. if (previouslyMergedValue)
  2803. {
  2804. if (!byteCodeConstantValueNumbersBv->Test(newValue->GetValueNumber()))
  2805. {
  2806. Assert(newValue == previouslyMergedValue);
  2807. }
  2808. }
  2809. else
  2810. {
  2811. mergedValues.Add(newValue);
  2812. }
  2813. }
  2814. #endif
  2815. TrackMergedValueForKills(newValue, toData, mergedValueTypesTrackedForKills);
  2816. bucket.element = newValue;
  2817. }
  2818. iter2.Next();
  2819. } NEXT_SLISTBASE_ENTRY_EDITING;
  2820. if (isLoopPrepass && !this->rootLoopPrePass->allFieldsKilled)
  2821. {
  2822. while (iter2.IsValid())
  2823. {
  2824. iter2.Next();
  2825. }
  2826. }
  2827. }
  2828. valuesCreatedForMerge->Clear();
  2829. DebugOnly(mergedValues.Reset());
  2830. mergedValueTypesTrackedForKills->ClearAll();
  2831. toData->exprToValueMap->And(fromData->exprToValueMap);
  2832. ProcessValueKills(toBlock, toData);
  2833. bool isLastLoopBackEdge = false;
  2834. if (isLoopBackEdge)
  2835. {
  2836. ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(toBlock, toData);
  2837. BasicBlock *lastBlock = nullptr;
  2838. FOREACH_PREDECESSOR_BLOCK(pred, toBlock)
  2839. {
  2840. Assert(!lastBlock || pred->GetBlockNum() > lastBlock->GetBlockNum());
  2841. lastBlock = pred;
  2842. }NEXT_PREDECESSOR_BLOCK;
  2843. isLastLoopBackEdge = (lastBlock == fromBlock);
  2844. }
  2845. }
  2846. Value *
  2847. GlobOpt::MergeValues(
  2848. Value *toDataValue,
  2849. Value *fromDataValue,
  2850. Sym *fromDataSym,
  2851. GlobOptBlockData *toData,
  2852. GlobOptBlockData *fromData,
  2853. bool isLoopBackEdge,
  2854. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2855. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2856. {
  2857. // Same map
  2858. if (toDataValue == fromDataValue)
  2859. {
  2860. return toDataValue;
  2861. }
  2862. const ValueNumberPair sourceValueNumberPair(toDataValue->GetValueNumber(), fromDataValue->GetValueNumber());
  2863. const bool sameValueNumber = sourceValueNumberPair.First() == sourceValueNumberPair.Second();
  2864. ValueInfo *newValueInfo =
  2865. this->MergeValueInfo(
  2866. toDataValue,
  2867. fromDataValue,
  2868. fromDataSym,
  2869. fromData,
  2870. isLoopBackEdge,
  2871. sameValueNumber,
  2872. symsRequiringCompensation,
  2873. symsCreatedForMerge);
  2874. if (newValueInfo == nullptr)
  2875. {
  2876. return nullptr;
  2877. }
  2878. if (sameValueNumber && newValueInfo == toDataValue->GetValueInfo())
  2879. {
  2880. return toDataValue;
  2881. }
  2882. // There may be other syms in toData that haven't been merged yet, referring to the current toData value for this sym. If
  2883. // the merge produced a new value info, don't corrupt the value info for the other sym by changing the same value. Instead,
  2884. // create one value per source value number pair per merge and reuse that for new value infos.
  2885. Value *newValue = valuesCreatedForMerge->Lookup(sourceValueNumberPair, nullptr);
  2886. if(newValue)
  2887. {
  2888. Assert(sameValueNumber == (newValue->GetValueNumber() == toDataValue->GetValueNumber()));
  2889. // This is an exception where Value::SetValueInfo is called directly instead of GlobOpt::ChangeValueInfo, because we're
  2890. // actually generating new value info through merges.
  2891. newValue->SetValueInfo(newValueInfo);
  2892. }
  2893. else
  2894. {
  2895. newValue = NewValue(sameValueNumber ? sourceValueNumberPair.First() : NewValueNumber(), newValueInfo);
  2896. valuesCreatedForMerge->Add(sourceValueNumberPair, newValue);
  2897. }
  2898. // Set symStore if same on both paths.
  2899. if (toDataValue->GetValueInfo()->GetSymStore() == fromDataValue->GetValueInfo()->GetSymStore())
  2900. {
  2901. this->SetSymStoreDirect(newValueInfo, toDataValue->GetValueInfo()->GetSymStore());
  2902. }
  2903. return newValue;
  2904. }
  2905. ValueInfo *
  2906. GlobOpt::MergeValueInfo(
  2907. Value *toDataVal,
  2908. Value *fromDataVal,
  2909. Sym *fromDataSym,
  2910. GlobOptBlockData *fromData,
  2911. bool isLoopBackEdge,
  2912. bool sameValueNumber,
  2913. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2914. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2915. {
  2916. ValueInfo *const toDataValueInfo = toDataVal->GetValueInfo();
  2917. ValueInfo *const fromDataValueInfo = fromDataVal->GetValueInfo();
  2918. // Same value
  2919. if (toDataValueInfo == fromDataValueInfo)
  2920. {
  2921. return toDataValueInfo;
  2922. }
  2923. if (toDataValueInfo->IsJsType() || fromDataValueInfo->IsJsType())
  2924. {
  2925. Assert(toDataValueInfo->IsJsType() && fromDataValueInfo->IsJsType());
  2926. return MergeJsTypeValueInfo(toDataValueInfo->AsJsType(), fromDataValueInfo->AsJsType(), isLoopBackEdge, sameValueNumber);
  2927. }
  2928. ValueType newValueType(toDataValueInfo->Type().Merge(fromDataValueInfo->Type()));
  2929. if (newValueType.IsLikelyInt())
  2930. {
  2931. return MergeLikelyIntValueInfo(toDataVal, fromDataVal, newValueType);
  2932. }
  2933. if(newValueType.IsLikelyAnyOptimizedArray())
  2934. {
  2935. if(newValueType.IsLikelyArrayOrObjectWithArray() &&
  2936. toDataValueInfo->IsLikelyArrayOrObjectWithArray() &&
  2937. fromDataValueInfo->IsLikelyArrayOrObjectWithArray())
  2938. {
  2939. // Value type merge for missing values is aggressive by default (for profile data) - if either side likely has no
  2940. // missing values, then the merged value type also likely has no missing values. This is because arrays often start
  2941. // off having missing values but are eventually filled up. In GlobOpt however, we need to be conservative because
  2942. // the existence of a value type that likely has missing values indicates that it is more likely for it to have
  2943. // missing values than not. Also, StElems that are likely to create missing values are tracked in profile data and
  2944. // will update value types to say they are now likely to have missing values, and that needs to be propagated
  2945. // conservatively.
  2946. newValueType =
  2947. newValueType.SetHasNoMissingValues(
  2948. toDataValueInfo->HasNoMissingValues() && fromDataValueInfo->HasNoMissingValues());
  2949. if(toDataValueInfo->HasIntElements() != fromDataValueInfo->HasIntElements() ||
  2950. toDataValueInfo->HasFloatElements() != fromDataValueInfo->HasFloatElements())
  2951. {
  2952. // When merging arrays with different native storage types, make the merged value type a likely version to force
  2953. // array checks to be done again and cause a conversion and/or bailout as necessary
  2954. newValueType = newValueType.ToLikely();
  2955. }
  2956. }
  2957. if(!(newValueType.IsObject() && toDataValueInfo->IsArrayValueInfo() && fromDataValueInfo->IsArrayValueInfo()))
  2958. {
  2959. return ValueInfo::New(alloc, newValueType);
  2960. }
  2961. return
  2962. MergeArrayValueInfo(
  2963. newValueType,
  2964. toDataValueInfo->AsArrayValueInfo(),
  2965. fromDataValueInfo->AsArrayValueInfo(),
  2966. fromDataSym,
  2967. symsRequiringCompensation,
  2968. symsCreatedForMerge);
  2969. }
  2970. // Consider: If both values are VarConstantValueInfo with the same value, we could
  2971. // merge them preserving the value.
  2972. return ValueInfo::New(this->alloc, newValueType);
  2973. }
  2974. ValueInfo *
  2975. GlobOpt::MergeLikelyIntValueInfo(Value *toDataVal, Value *fromDataVal, ValueType const newValueType)
  2976. {
  2977. Assert(newValueType.IsLikelyInt());
  2978. ValueInfo *const toDataValueInfo = toDataVal->GetValueInfo();
  2979. ValueInfo *const fromDataValueInfo = fromDataVal->GetValueInfo();
  2980. Assert(toDataValueInfo != fromDataValueInfo);
  2981. bool wasNegativeZeroPreventedByBailout;
  2982. if(newValueType.IsInt())
  2983. {
  2984. int32 toDataIntConstantValue, fromDataIntConstantValue;
  2985. if (toDataValueInfo->TryGetIntConstantValue(&toDataIntConstantValue) &&
  2986. fromDataValueInfo->TryGetIntConstantValue(&fromDataIntConstantValue) &&
  2987. toDataIntConstantValue == fromDataIntConstantValue)
  2988. {
  2989. // A new value number must be created to register the fact that the value has changed. Otherwise, if the value
  2990. // changed inside a loop, the sym may look invariant on the loop back-edge (and hence not turned into a number
  2991. // value), and its constant value from the first iteration may be incorrectly propagated after the loop.
  2992. return IntConstantValueInfo::New(this->alloc, toDataIntConstantValue);
  2993. }
  2994. wasNegativeZeroPreventedByBailout =
  2995. toDataValueInfo->WasNegativeZeroPreventedByBailout() ||
  2996. fromDataValueInfo->WasNegativeZeroPreventedByBailout();
  2997. }
  2998. else
  2999. {
  3000. wasNegativeZeroPreventedByBailout = false;
  3001. }
  3002. const IntBounds *const toDataValBounds =
  3003. toDataValueInfo->IsIntBounded() ? toDataValueInfo->AsIntBounded()->Bounds() : nullptr;
  3004. const IntBounds *const fromDataValBounds =
  3005. fromDataValueInfo->IsIntBounded() ? fromDataValueInfo->AsIntBounded()->Bounds() : nullptr;
  3006. if(toDataValBounds || fromDataValBounds)
  3007. {
  3008. const IntBounds *mergedBounds;
  3009. if(toDataValBounds && fromDataValBounds)
  3010. {
  3011. mergedBounds = IntBounds::Merge(toDataVal, toDataValBounds, fromDataVal, fromDataValBounds);
  3012. }
  3013. else
  3014. {
  3015. IntConstantBounds constantBounds;
  3016. if(toDataValBounds)
  3017. {
  3018. mergedBounds =
  3019. fromDataValueInfo->TryGetIntConstantBounds(&constantBounds, true)
  3020. ? IntBounds::Merge(toDataVal, toDataValBounds, fromDataVal, constantBounds)
  3021. : nullptr;
  3022. }
  3023. else
  3024. {
  3025. Assert(fromDataValBounds);
  3026. mergedBounds =
  3027. toDataValueInfo->TryGetIntConstantBounds(&constantBounds, true)
  3028. ? IntBounds::Merge(fromDataVal, fromDataValBounds, toDataVal, constantBounds)
  3029. : nullptr;
  3030. }
  3031. }
  3032. if(mergedBounds)
  3033. {
  3034. if(mergedBounds->RequiresIntBoundedValueInfo(newValueType))
  3035. {
  3036. return IntBoundedValueInfo::New(newValueType, mergedBounds, wasNegativeZeroPreventedByBailout, alloc);
  3037. }
  3038. mergedBounds->Delete();
  3039. }
  3040. }
  3041. if(newValueType.IsInt())
  3042. {
  3043. int32 min1, max1, min2, max2;
  3044. toDataValueInfo->GetIntValMinMax(&min1, &max1, false);
  3045. fromDataValueInfo->GetIntValMinMax(&min2, &max2, false);
  3046. return NewIntRangeValueInfo(min(min1, min2), max(max1, max2), wasNegativeZeroPreventedByBailout);
  3047. }
  3048. return ValueInfo::New(alloc, newValueType);
  3049. }
  3050. JsTypeValueInfo* GlobOpt::MergeJsTypeValueInfo(JsTypeValueInfo * toValueInfo, JsTypeValueInfo * fromValueInfo, bool isLoopBackEdge, bool sameValueNumber)
  3051. {
  3052. Assert(toValueInfo != fromValueInfo);
  3053. // On loop back edges we must be conservative and only consider type values which are invariant throughout the loop.
  3054. // That's because in dead store pass we can't correctly track object pointer assignments (o = p), and we may not
  3055. // be able to register correct type checks for the right properties upstream. If we ever figure out how to enhance
  3056. // the dead store pass to track this info we could go more aggressively, as below.
  3057. if (isLoopBackEdge && !sameValueNumber)
  3058. {
  3059. return nullptr;
  3060. }
  3061. const JITTypeHolder toType = toValueInfo->GetJsType();
  3062. const JITTypeHolder fromType = fromValueInfo->GetJsType();
  3063. const JITTypeHolder mergedType = toType == fromType ? toType : JITTypeHolder(nullptr);
  3064. Js::EquivalentTypeSet* toTypeSet = toValueInfo->GetJsTypeSet();
  3065. Js::EquivalentTypeSet* fromTypeSet = fromValueInfo->GetJsTypeSet();
  3066. Js::EquivalentTypeSet* mergedTypeSet = (toTypeSet != nullptr && fromTypeSet != nullptr && AreTypeSetsIdentical(toTypeSet, fromTypeSet)) ? toTypeSet : nullptr;
  3067. #if DBG_DUMP
  3068. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3069. {
  3070. Output::Print(_u("ObjTypeSpec: Merging type value info:\n"));
  3071. Output::Print(_u(" from (shared %d): "), fromValueInfo->GetIsShared());
  3072. fromValueInfo->Dump();
  3073. Output::Print(_u("\n to (shared %d): "), toValueInfo->GetIsShared());
  3074. toValueInfo->Dump();
  3075. }
  3076. #endif
  3077. if (mergedType == toType && mergedTypeSet == toTypeSet)
  3078. {
  3079. #if DBG_DUMP
  3080. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3081. {
  3082. Output::Print(_u("\n result (shared %d): "), toValueInfo->GetIsShared());
  3083. toValueInfo->Dump();
  3084. Output::Print(_u("\n"));
  3085. }
  3086. #endif
  3087. return toValueInfo;
  3088. }
  3089. if (mergedType == nullptr && mergedTypeSet == nullptr)
  3090. {
  3091. // No info, so don't bother making a value.
  3092. return nullptr;
  3093. }
  3094. if (toValueInfo->GetIsShared())
  3095. {
  3096. JsTypeValueInfo* mergedValueInfo = JsTypeValueInfo::New(this->alloc, mergedType, mergedTypeSet);
  3097. #if DBG_DUMP
  3098. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3099. {
  3100. Output::Print(_u("\n result (shared %d): "), mergedValueInfo->GetIsShared());
  3101. mergedValueInfo->Dump();
  3102. Output::Print(_u("\n"));
  3103. }
  3104. #endif
  3105. return mergedValueInfo;
  3106. }
  3107. else
  3108. {
  3109. toValueInfo->SetJsType(mergedType);
  3110. toValueInfo->SetJsTypeSet(mergedTypeSet);
  3111. #if DBG_DUMP
  3112. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3113. {
  3114. Output::Print(_u("\n result (shared %d): "), toValueInfo->GetIsShared());
  3115. toValueInfo->Dump();
  3116. Output::Print(_u("\n"));
  3117. }
  3118. #endif
  3119. return toValueInfo;
  3120. }
  3121. }
  3122. ValueInfo *GlobOpt::MergeArrayValueInfo(
  3123. const ValueType mergedValueType,
  3124. const ArrayValueInfo *const toDataValueInfo,
  3125. const ArrayValueInfo *const fromDataValueInfo,
  3126. Sym *const arraySym,
  3127. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  3128. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  3129. {
  3130. Assert(mergedValueType.IsAnyOptimizedArray());
  3131. Assert(toDataValueInfo);
  3132. Assert(fromDataValueInfo);
  3133. Assert(toDataValueInfo != fromDataValueInfo);
  3134. Assert(arraySym);
  3135. Assert(!symsRequiringCompensation == IsLoopPrePass());
  3136. Assert(!symsCreatedForMerge == IsLoopPrePass());
  3137. // Merge the segment and segment length syms. If we have the segment and/or the segment length syms available on both sides
  3138. // but in different syms, create a new sym and record that the array sym requires compensation. Compensation will be
  3139. // inserted later to initialize this new sym from all predecessors of the merged block.
  3140. StackSym *newHeadSegmentSym;
  3141. if(toDataValueInfo->HeadSegmentSym() && fromDataValueInfo->HeadSegmentSym())
  3142. {
  3143. if(toDataValueInfo->HeadSegmentSym() == fromDataValueInfo->HeadSegmentSym())
  3144. {
  3145. newHeadSegmentSym = toDataValueInfo->HeadSegmentSym();
  3146. }
  3147. else
  3148. {
  3149. Assert(!IsLoopPrePass());
  3150. Assert(symsRequiringCompensation);
  3151. symsRequiringCompensation->Set(arraySym->m_id);
  3152. Assert(symsCreatedForMerge);
  3153. if(symsCreatedForMerge->Test(toDataValueInfo->HeadSegmentSym()->m_id))
  3154. {
  3155. newHeadSegmentSym = toDataValueInfo->HeadSegmentSym();
  3156. }
  3157. else
  3158. {
  3159. newHeadSegmentSym = StackSym::New(TyMachPtr, func);
  3160. symsCreatedForMerge->Set(newHeadSegmentSym->m_id);
  3161. }
  3162. }
  3163. }
  3164. else
  3165. {
  3166. newHeadSegmentSym = nullptr;
  3167. }
  3168. StackSym *newHeadSegmentLengthSym;
  3169. if(toDataValueInfo->HeadSegmentLengthSym() && fromDataValueInfo->HeadSegmentLengthSym())
  3170. {
  3171. if(toDataValueInfo->HeadSegmentLengthSym() == fromDataValueInfo->HeadSegmentLengthSym())
  3172. {
  3173. newHeadSegmentLengthSym = toDataValueInfo->HeadSegmentLengthSym();
  3174. }
  3175. else
  3176. {
  3177. Assert(!IsLoopPrePass());
  3178. Assert(symsRequiringCompensation);
  3179. symsRequiringCompensation->Set(arraySym->m_id);
  3180. Assert(symsCreatedForMerge);
  3181. if(symsCreatedForMerge->Test(toDataValueInfo->HeadSegmentLengthSym()->m_id))
  3182. {
  3183. newHeadSegmentLengthSym = toDataValueInfo->HeadSegmentLengthSym();
  3184. }
  3185. else
  3186. {
  3187. newHeadSegmentLengthSym = StackSym::New(TyUint32, func);
  3188. symsCreatedForMerge->Set(newHeadSegmentLengthSym->m_id);
  3189. }
  3190. }
  3191. }
  3192. else
  3193. {
  3194. newHeadSegmentLengthSym = nullptr;
  3195. }
  3196. StackSym *newLengthSym;
  3197. if(toDataValueInfo->LengthSym() && fromDataValueInfo->LengthSym())
  3198. {
  3199. if(toDataValueInfo->LengthSym() == fromDataValueInfo->LengthSym())
  3200. {
  3201. newLengthSym = toDataValueInfo->LengthSym();
  3202. }
  3203. else
  3204. {
  3205. Assert(!IsLoopPrePass());
  3206. Assert(symsRequiringCompensation);
  3207. symsRequiringCompensation->Set(arraySym->m_id);
  3208. Assert(symsCreatedForMerge);
  3209. if(symsCreatedForMerge->Test(toDataValueInfo->LengthSym()->m_id))
  3210. {
  3211. newLengthSym = toDataValueInfo->LengthSym();
  3212. }
  3213. else
  3214. {
  3215. newLengthSym = StackSym::New(TyUint32, func);
  3216. symsCreatedForMerge->Set(newLengthSym->m_id);
  3217. }
  3218. }
  3219. }
  3220. else
  3221. {
  3222. newLengthSym = nullptr;
  3223. }
  3224. if(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym)
  3225. {
  3226. return ArrayValueInfo::New(alloc, mergedValueType, newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  3227. }
  3228. if(symsRequiringCompensation)
  3229. {
  3230. symsRequiringCompensation->Clear(arraySym->m_id);
  3231. }
  3232. return ValueInfo::New(alloc, mergedValueType);
  3233. }
  3234. void GlobOpt::InsertValueCompensation(
  3235. BasicBlock *const predecessor,
  3236. const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap)
  3237. {
  3238. Assert(predecessor);
  3239. Assert(symsRequiringCompensationToMergedValueInfoMap.Count() != 0);
  3240. IR::Instr *insertBeforeInstr = predecessor->GetLastInstr();
  3241. Func *const func = insertBeforeInstr->m_func;
  3242. bool setLastInstrInPredecessor;
  3243. if(insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  3244. {
  3245. // Don't insert code between the branch and the corresponding ByteCodeUses instructions
  3246. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  3247. {
  3248. insertBeforeInstr = insertBeforeInstr->m_prev;
  3249. }
  3250. setLastInstrInPredecessor = false;
  3251. }
  3252. else
  3253. {
  3254. // Insert at the end of the block and set the last instruction
  3255. Assert(insertBeforeInstr->m_next);
  3256. insertBeforeInstr = insertBeforeInstr->m_next; // Instruction after the last instruction in the predecessor
  3257. setLastInstrInPredecessor = true;
  3258. }
  3259. GlobOptBlockData &predecessorBlockData = predecessor->globOptData;
  3260. GlobHashTable *const predecessorSymToValueMap = predecessor->globOptData.symToValueMap;
  3261. GlobOptBlockData &successorBlockData = blockData;
  3262. GlobHashTable *const successorSymToValueMap = blockData.symToValueMap;
  3263. for(auto it = symsRequiringCompensationToMergedValueInfoMap.GetIterator(); it.IsValid(); it.MoveNext())
  3264. {
  3265. const auto &entry = it.Current();
  3266. Sym *const sym = entry.Key();
  3267. Value *const predecessorValue = FindValue(predecessorSymToValueMap, sym);
  3268. Assert(predecessorValue);
  3269. ValueInfo *const predecessorValueInfo = predecessorValue->GetValueInfo();
  3270. // Currently, array value infos are the only ones that require compensation based on values
  3271. Assert(predecessorValueInfo->IsAnyOptimizedArray());
  3272. const ArrayValueInfo *const predecessorArrayValueInfo = predecessorValueInfo->AsArrayValueInfo();
  3273. StackSym *const predecessorHeadSegmentSym = predecessorArrayValueInfo->HeadSegmentSym();
  3274. StackSym *const predecessorHeadSegmentLengthSym = predecessorArrayValueInfo->HeadSegmentLengthSym();
  3275. StackSym *const predecessorLengthSym = predecessorArrayValueInfo->LengthSym();
  3276. ValueInfo *const mergedValueInfo = entry.Value();
  3277. const ArrayValueInfo *const mergedArrayValueInfo = mergedValueInfo->AsArrayValueInfo();
  3278. StackSym *const mergedHeadSegmentSym = mergedArrayValueInfo->HeadSegmentSym();
  3279. StackSym *const mergedHeadSegmentLengthSym = mergedArrayValueInfo->HeadSegmentLengthSym();
  3280. StackSym *const mergedLengthSym = mergedArrayValueInfo->LengthSym();
  3281. Assert(!mergedHeadSegmentSym || predecessorHeadSegmentSym);
  3282. Assert(!mergedHeadSegmentLengthSym || predecessorHeadSegmentLengthSym);
  3283. Assert(!mergedLengthSym || predecessorLengthSym);
  3284. bool compensated = false;
  3285. if(mergedHeadSegmentSym && predecessorHeadSegmentSym != mergedHeadSegmentSym)
  3286. {
  3287. IR::Instr *const newInstr =
  3288. IR::Instr::New(
  3289. Js::OpCode::Ld_A,
  3290. IR::RegOpnd::New(mergedHeadSegmentSym, mergedHeadSegmentSym->GetType(), func),
  3291. IR::RegOpnd::New(predecessorHeadSegmentSym, predecessorHeadSegmentSym->GetType(), func),
  3292. func);
  3293. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3294. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3295. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3296. insertBeforeInstr->InsertBefore(newInstr);
  3297. compensated = true;
  3298. }
  3299. if(mergedHeadSegmentLengthSym && predecessorHeadSegmentLengthSym != mergedHeadSegmentLengthSym)
  3300. {
  3301. IR::Instr *const newInstr =
  3302. IR::Instr::New(
  3303. Js::OpCode::Ld_I4,
  3304. IR::RegOpnd::New(mergedHeadSegmentLengthSym, mergedHeadSegmentLengthSym->GetType(), func),
  3305. IR::RegOpnd::New(predecessorHeadSegmentLengthSym, predecessorHeadSegmentLengthSym->GetType(), func),
  3306. func);
  3307. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3308. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3309. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3310. insertBeforeInstr->InsertBefore(newInstr);
  3311. compensated = true;
  3312. // Merge the head segment length value
  3313. Assert(predecessorBlockData.liveVarSyms->Test(predecessorHeadSegmentLengthSym->m_id));
  3314. predecessorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  3315. successorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  3316. Value *const predecessorHeadSegmentLengthValue =
  3317. FindValue(predecessorSymToValueMap, predecessorHeadSegmentLengthSym);
  3318. Assert(predecessorHeadSegmentLengthValue);
  3319. SetValue(&predecessorBlockData, predecessorHeadSegmentLengthValue, mergedHeadSegmentLengthSym);
  3320. Value *const mergedHeadSegmentLengthValue = FindValue(successorSymToValueMap, mergedHeadSegmentLengthSym);
  3321. if(mergedHeadSegmentLengthValue)
  3322. {
  3323. Assert(mergedHeadSegmentLengthValue->GetValueNumber() != predecessorHeadSegmentLengthValue->GetValueNumber());
  3324. if(predecessorHeadSegmentLengthValue->GetValueInfo() != mergedHeadSegmentLengthValue->GetValueInfo())
  3325. {
  3326. mergedHeadSegmentLengthValue->SetValueInfo(
  3327. MergeLikelyIntValueInfo(
  3328. mergedHeadSegmentLengthValue,
  3329. predecessorHeadSegmentLengthValue,
  3330. mergedHeadSegmentLengthValue->GetValueInfo()->Type()
  3331. .Merge(predecessorHeadSegmentLengthValue->GetValueInfo()->Type())));
  3332. }
  3333. }
  3334. else
  3335. {
  3336. SetValue(&successorBlockData, CopyValue(predecessorHeadSegmentLengthValue), mergedHeadSegmentLengthSym);
  3337. }
  3338. }
  3339. if(mergedLengthSym && predecessorLengthSym != mergedLengthSym)
  3340. {
  3341. IR::Instr *const newInstr =
  3342. IR::Instr::New(
  3343. Js::OpCode::Ld_I4,
  3344. IR::RegOpnd::New(mergedLengthSym, mergedLengthSym->GetType(), func),
  3345. IR::RegOpnd::New(predecessorLengthSym, predecessorLengthSym->GetType(), func),
  3346. func);
  3347. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3348. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3349. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3350. insertBeforeInstr->InsertBefore(newInstr);
  3351. compensated = true;
  3352. // Merge the length value
  3353. Assert(predecessorBlockData.liveVarSyms->Test(predecessorLengthSym->m_id));
  3354. predecessorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  3355. successorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  3356. Value *const predecessorLengthValue = FindValue(predecessorSymToValueMap, predecessorLengthSym);
  3357. Assert(predecessorLengthValue);
  3358. SetValue(&predecessorBlockData, predecessorLengthValue, mergedLengthSym);
  3359. Value *const mergedLengthValue = FindValue(successorSymToValueMap, mergedLengthSym);
  3360. if(mergedLengthValue)
  3361. {
  3362. Assert(mergedLengthValue->GetValueNumber() != predecessorLengthValue->GetValueNumber());
  3363. if(predecessorLengthValue->GetValueInfo() != mergedLengthValue->GetValueInfo())
  3364. {
  3365. mergedLengthValue->SetValueInfo(
  3366. MergeLikelyIntValueInfo(
  3367. mergedLengthValue,
  3368. predecessorLengthValue,
  3369. mergedLengthValue->GetValueInfo()->Type().Merge(predecessorLengthValue->GetValueInfo()->Type())));
  3370. }
  3371. }
  3372. else
  3373. {
  3374. SetValue(&successorBlockData, CopyValue(predecessorLengthValue), mergedLengthSym);
  3375. }
  3376. }
  3377. if(compensated)
  3378. {
  3379. ChangeValueInfo(
  3380. predecessor,
  3381. predecessorValue,
  3382. ArrayValueInfo::New(
  3383. alloc,
  3384. predecessorValueInfo->Type(),
  3385. mergedHeadSegmentSym ? mergedHeadSegmentSym : predecessorHeadSegmentSym,
  3386. mergedHeadSegmentLengthSym ? mergedHeadSegmentLengthSym : predecessorHeadSegmentLengthSym,
  3387. mergedLengthSym ? mergedLengthSym : predecessorLengthSym,
  3388. predecessorValueInfo->GetSymStore()),
  3389. false /*allowIncompatibleType*/,
  3390. compensated);
  3391. }
  3392. }
  3393. if(setLastInstrInPredecessor)
  3394. {
  3395. predecessor->SetLastInstr(insertBeforeInstr->m_prev);
  3396. }
  3397. }
  3398. BOOLEAN
  3399. GlobOpt::IsArgumentsSymID(SymID id, const GlobOptBlockData& blockData)
  3400. {
  3401. return blockData.argObjSyms->Test(id);
  3402. }
  3403. BOOLEAN
  3404. GlobOpt::IsArgumentsOpnd(IR::Opnd* opnd)
  3405. {
  3406. SymID id = 0;
  3407. if (opnd->IsRegOpnd())
  3408. {
  3409. id = opnd->AsRegOpnd()->m_sym->m_id;
  3410. return IsArgumentsSymID(id, this->blockData);
  3411. }
  3412. else if (opnd->IsSymOpnd())
  3413. {
  3414. Sym *sym = opnd->AsSymOpnd()->m_sym;
  3415. if (sym && sym->IsPropertySym())
  3416. {
  3417. PropertySym *propertySym = sym->AsPropertySym();
  3418. id = propertySym->m_stackSym->m_id;
  3419. return IsArgumentsSymID(id, this->blockData);
  3420. }
  3421. return false;
  3422. }
  3423. else if (opnd->IsIndirOpnd())
  3424. {
  3425. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  3426. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  3427. return IsArgumentsSymID(baseOpnd->m_sym->m_id, this->blockData) || (indexOpnd && IsArgumentsSymID(indexOpnd->m_sym->m_id, this->blockData));
  3428. }
  3429. AssertMsg(false, "Unknown type");
  3430. return false;
  3431. }
  3432. void
  3433. GlobOpt::TrackArgumentsSym(IR::RegOpnd* opnd)
  3434. {
  3435. if(!blockData.curFunc->argObjSyms)
  3436. {
  3437. blockData.curFunc->argObjSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  3438. }
  3439. blockData.curFunc->argObjSyms->Set(opnd->m_sym->m_id);
  3440. blockData.argObjSyms->Set(opnd->m_sym->m_id);
  3441. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  3442. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  3443. {
  3444. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3445. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3446. Output::Print(_u("Created a new alias s%d for arguments object in function %s(%s) topFunc %s(%s)\n"),
  3447. opnd->m_sym->m_id,
  3448. blockData.curFunc->GetJITFunctionBody()->GetDisplayName(),
  3449. blockData.curFunc->GetDebugNumberSet(debugStringBuffer),
  3450. this->func->GetJITFunctionBody()->GetDisplayName(),
  3451. this->func->GetDebugNumberSet(debugStringBuffer2)
  3452. );
  3453. Output::Flush();
  3454. }
  3455. #endif
  3456. }
  3457. void
  3458. GlobOpt::ClearArgumentsSym(IR::RegOpnd* opnd)
  3459. {
  3460. // We blindly clear so need to check func has argObjSyms
  3461. if (blockData.curFunc->argObjSyms)
  3462. {
  3463. blockData.curFunc->argObjSyms->Clear(opnd->m_sym->m_id);
  3464. }
  3465. blockData.argObjSyms->Clear(opnd->m_sym->m_id);
  3466. }
  3467. bool
  3468. GlobOpt::AreFromSameBytecodeFunc(IR::RegOpnd* src1, IR::RegOpnd* dst)
  3469. {
  3470. Assert(this->func->m_symTable->FindStackSym(src1->m_sym->m_id) == src1->m_sym);
  3471. Assert(this->func->m_symTable->FindStackSym(dst->m_sym->m_id) == dst->m_sym);
  3472. if (dst->m_sym->HasByteCodeRegSlot() && src1->m_sym->HasByteCodeRegSlot())
  3473. {
  3474. return src1->m_sym->GetByteCodeFunc() == dst->m_sym->GetByteCodeFunc();
  3475. }
  3476. return false;
  3477. }
  3478. BOOLEAN
  3479. GlobOpt::TestAnyArgumentsSym()
  3480. {
  3481. return blockData.argObjSyms->TestEmpty();
  3482. }
  3483. /*
  3484. * This is for scope object removal along with Heap Arguments optimization.
  3485. * We track several instructions to facilitate the removal of scope object.
  3486. * - LdSlotArr - This instr is tracked to keep track of the formals array (the dest)
  3487. * - InlineeStart - To keep track of the stack syms for the formals of the inlinee.
  3488. */
  3489. void
  3490. GlobOpt::TrackInstrsForScopeObjectRemoval(IR::Instr * instr)
  3491. {
  3492. IR::Opnd* dst = instr->GetDst();
  3493. IR::Opnd* src1 = instr->GetSrc1();
  3494. if (instr->m_opcode == Js::OpCode::Ld_A && src1->IsRegOpnd())
  3495. {
  3496. AssertMsg(!instr->m_func->IsStackArgsEnabled() || !src1->IsScopeObjOpnd(instr->m_func), "There can be no aliasing for scope object.");
  3497. }
  3498. // The following is to track formals array for Stack Arguments optimization with Formals
  3499. if (instr->m_func->IsStackArgsEnabled() && !this->IsLoopPrePass())
  3500. {
  3501. if (instr->m_opcode == Js::OpCode::LdSlotArr)
  3502. {
  3503. if (instr->GetSrc1()->IsScopeObjOpnd(instr->m_func))
  3504. {
  3505. AssertMsg(!instr->m_func->GetJITFunctionBody()->HasImplicitArgIns(), "No mapping is required in this case. So it should already be generating ArgIns.");
  3506. instr->m_func->TrackFormalsArraySym(dst->GetStackSym()->m_id);
  3507. }
  3508. }
  3509. else if (instr->m_opcode == Js::OpCode::InlineeStart)
  3510. {
  3511. Assert(instr->m_func->IsInlined());
  3512. Js::ArgSlot actualsCount = instr->m_func->actualCount - 1;
  3513. Js::ArgSlot formalsCount = instr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  3514. Func * func = instr->m_func;
  3515. Func * inlinerFunc = func->GetParentFunc(); //Inliner's func
  3516. IR::Instr * argOutInstr = instr->GetSrc2()->GetStackSym()->GetInstrDef();
  3517. //The argout immediately before the InlineeStart will be the ArgOut for NewScObject
  3518. //So we don't want to track the stack sym for this argout.- Skipping it here.
  3519. if (instr->m_func->IsInlinedConstructor())
  3520. {
  3521. //PRE might introduce a second defintion for the Src1. So assert for the opcode only when it has single definition.
  3522. Assert(argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef() == nullptr ||
  3523. argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::NewScObjectNoCtor);
  3524. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3525. }
  3526. if (formalsCount < actualsCount)
  3527. {
  3528. Js::ArgSlot extraActuals = actualsCount - formalsCount;
  3529. //Skipping extra actuals passed
  3530. for (Js::ArgSlot i = 0; i < extraActuals; i++)
  3531. {
  3532. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3533. }
  3534. }
  3535. StackSym * undefinedSym = nullptr;
  3536. for (Js::ArgSlot param = formalsCount; param > 0; param--)
  3537. {
  3538. StackSym * argOutSym = nullptr;
  3539. if (argOutInstr->GetSrc1())
  3540. {
  3541. if (argOutInstr->GetSrc1()->IsRegOpnd())
  3542. {
  3543. argOutSym = argOutInstr->GetSrc1()->GetStackSym();
  3544. }
  3545. else
  3546. {
  3547. // We will always have ArgOut instr - so the source operand will not be removed.
  3548. argOutSym = StackSym::New(inlinerFunc);
  3549. IR::Opnd * srcOpnd = argOutInstr->GetSrc1();
  3550. IR::Opnd * dstOpnd = IR::RegOpnd::New(argOutSym, TyVar, inlinerFunc);
  3551. IR::Instr * assignInstr = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  3552. instr->InsertBefore(assignInstr);
  3553. }
  3554. }
  3555. Assert(!func->HasStackSymForFormal(param - 1));
  3556. if (param <= actualsCount)
  3557. {
  3558. Assert(argOutSym);
  3559. func->TrackStackSymForFormalIndex(param - 1, argOutSym);
  3560. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3561. }
  3562. else
  3563. {
  3564. /*When param is out of range of actuals count, load undefined*/
  3565. // TODO: saravind: This will insert undefined for each of the param not having an actual. - Clean up this by having a sym for undefined on func ?
  3566. Assert(formalsCount > actualsCount);
  3567. if (undefinedSym == nullptr)
  3568. {
  3569. undefinedSym = StackSym::New(inlinerFunc);
  3570. IR::Opnd * srcOpnd = IR::AddrOpnd::New(inlinerFunc->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicMisc, inlinerFunc);
  3571. IR::Opnd * dstOpnd = IR::RegOpnd::New(undefinedSym, TyVar, inlinerFunc);
  3572. IR::Instr * assignUndefined = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  3573. instr->InsertBefore(assignUndefined);
  3574. }
  3575. func->TrackStackSymForFormalIndex(param - 1, undefinedSym);
  3576. }
  3577. }
  3578. }
  3579. }
  3580. }
  3581. void
  3582. GlobOpt::OptArguments(IR::Instr *instr)
  3583. {
  3584. IR::Opnd* dst = instr->GetDst();
  3585. IR::Opnd* src1 = instr->GetSrc1();
  3586. IR::Opnd* src2 = instr->GetSrc2();
  3587. TrackInstrsForScopeObjectRemoval(instr);
  3588. if (!TrackArgumentsObject())
  3589. {
  3590. return;
  3591. }
  3592. if (instr->HasAnyLoadHeapArgsOpCode())
  3593. {
  3594. if (instr->m_func->IsStackArgsEnabled())
  3595. {
  3596. if (instr->GetSrc1()->IsRegOpnd() && instr->m_func->GetJITFunctionBody()->GetInParamsCount() > 1)
  3597. {
  3598. StackSym * scopeObjSym = instr->GetSrc1()->GetStackSym();
  3599. Assert(scopeObjSym);
  3600. Assert(scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::InitCachedScope || scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::NewScopeObject);
  3601. Assert(instr->m_func->GetScopeObjSym() == scopeObjSym);
  3602. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  3603. {
  3604. Output::Print(_u("StackArgFormals : %s (%d) :Setting scopeObjSym in forward pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetJITFunctionBody()->GetFunctionNumber());
  3605. Output::Flush();
  3606. }
  3607. }
  3608. }
  3609. if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
  3610. {
  3611. CannotAllocateArgumentsObjectOnStack();
  3612. }
  3613. else
  3614. {
  3615. TrackArgumentsSym(dst->AsRegOpnd());
  3616. }
  3617. return;
  3618. }
  3619. // Keep track of arguments objects and its aliases
  3620. // LdHeapArguments loads the arguments object and Ld_A tracks the aliases.
  3621. if ((instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::BytecodeArgOutCapture) && (src1->IsRegOpnd() && IsArgumentsOpnd(src1)))
  3622. {
  3623. // In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
  3624. if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
  3625. {
  3626. CannotAllocateArgumentsObjectOnStack();
  3627. return;
  3628. }
  3629. if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
  3630. {
  3631. TrackArgumentsSym(dst->AsRegOpnd());
  3632. }
  3633. return;
  3634. }
  3635. if (!TestAnyArgumentsSym())
  3636. {
  3637. // There are no syms to track yet, don't start tracking arguments sym.
  3638. return;
  3639. }
  3640. // Avoid loop prepass
  3641. if (this->currentBlock->loop && this->IsLoopPrePass())
  3642. {
  3643. return;
  3644. }
  3645. SymID id = 0;
  3646. switch(instr->m_opcode)
  3647. {
  3648. case Js::OpCode::LdElemI_A:
  3649. case Js::OpCode::TypeofElem:
  3650. {
  3651. Assert(src1->IsIndirOpnd());
  3652. IR::RegOpnd *indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  3653. if (indexOpnd && IsArgumentsSymID(indexOpnd->m_sym->m_id, this->blockData))
  3654. {
  3655. // Pathological test cases such as a[arguments]
  3656. CannotAllocateArgumentsObjectOnStack();
  3657. return;
  3658. }
  3659. IR::RegOpnd *baseOpnd = src1->AsIndirOpnd()->GetBaseOpnd();
  3660. id = baseOpnd->m_sym->m_id;
  3661. if (IsArgumentsSymID(id, this->blockData))
  3662. {
  3663. instr->usesStackArgumentsObject = true;
  3664. }
  3665. break;
  3666. }
  3667. case Js::OpCode::LdLen_A:
  3668. {
  3669. Assert(src1->IsRegOpnd());
  3670. if(IsArgumentsOpnd(src1))
  3671. {
  3672. instr->usesStackArgumentsObject = true;
  3673. }
  3674. break;
  3675. }
  3676. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  3677. {
  3678. if (IsArgumentsOpnd(src1))
  3679. {
  3680. instr->usesStackArgumentsObject = true;
  3681. }
  3682. if (IsArgumentsOpnd(src1) &&
  3683. src1->AsRegOpnd()->m_sym->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture)
  3684. {
  3685. // Apply inlining results in such usage - this is to ignore this sym that is def'd by ByteCodeArgOutCapture
  3686. // It's needed because we do not have block level merging of arguments object and this def due to inlining can turn off stack args opt.
  3687. IR::Instr* builtinStart = instr->GetNextRealInstr();
  3688. if (builtinStart->m_opcode == Js::OpCode::InlineBuiltInStart)
  3689. {
  3690. IR::Opnd* builtinOpnd = builtinStart->GetSrc1();
  3691. if (builtinStart->GetSrc1()->IsAddrOpnd())
  3692. {
  3693. Assert(builtinOpnd->AsAddrOpnd()->m_isFunction);
  3694. Js::BuiltinFunction builtinFunction = Js::JavascriptLibrary::GetBuiltInForFuncInfo(((JITTimeFixedField*)builtinOpnd->AsAddrOpnd()->m_metadata)->GetFuncInfoAddr(), func->GetThreadContextInfo());
  3695. if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
  3696. {
  3697. ClearArgumentsSym(src1->AsRegOpnd());
  3698. }
  3699. }
  3700. else if (builtinOpnd->IsRegOpnd())
  3701. {
  3702. if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
  3703. {
  3704. ClearArgumentsSym(src1->AsRegOpnd());
  3705. }
  3706. }
  3707. }
  3708. }
  3709. break;
  3710. }
  3711. case Js::OpCode::BailOnNotStackArgs:
  3712. case Js::OpCode::ArgOut_A_FromStackArgs:
  3713. case Js::OpCode::BytecodeArgOutUse:
  3714. {
  3715. if (src1 && IsArgumentsOpnd(src1))
  3716. {
  3717. instr->usesStackArgumentsObject = true;
  3718. }
  3719. break;
  3720. }
  3721. default:
  3722. {
  3723. // Super conservative here, if we see the arguments or any of its alias being used in any
  3724. // other opcode just don't do this optimization. Revisit this to optimize further if we see any common
  3725. // case is missed.
  3726. if (src1)
  3727. {
  3728. if (src1->IsRegOpnd() || src1->IsSymOpnd() || src1->IsIndirOpnd())
  3729. {
  3730. if (IsArgumentsOpnd(src1))
  3731. {
  3732. #ifdef PERF_HINT
  3733. if (PHASE_TRACE1(Js::PerfHintPhase))
  3734. {
  3735. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  3736. }
  3737. #endif
  3738. CannotAllocateArgumentsObjectOnStack();
  3739. return;
  3740. }
  3741. }
  3742. }
  3743. if (src2)
  3744. {
  3745. if (src2->IsRegOpnd() || src2->IsSymOpnd() || src2->IsIndirOpnd())
  3746. {
  3747. if (IsArgumentsOpnd(src2))
  3748. {
  3749. #ifdef PERF_HINT
  3750. if (PHASE_TRACE1(Js::PerfHintPhase))
  3751. {
  3752. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  3753. }
  3754. #endif
  3755. CannotAllocateArgumentsObjectOnStack();
  3756. return;
  3757. }
  3758. }
  3759. }
  3760. // We should look at dst last to correctly handle cases where it's the same as one of the src operands.
  3761. if (dst)
  3762. {
  3763. if (dst->IsIndirOpnd() || dst->IsSymOpnd())
  3764. {
  3765. if (IsArgumentsOpnd(dst))
  3766. {
  3767. #ifdef PERF_HINT
  3768. if (PHASE_TRACE1(Js::PerfHintPhase))
  3769. {
  3770. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  3771. }
  3772. #endif
  3773. CannotAllocateArgumentsObjectOnStack();
  3774. return;
  3775. }
  3776. }
  3777. else if (dst->IsRegOpnd())
  3778. {
  3779. if (this->currentBlock->loop && IsArgumentsOpnd(dst))
  3780. {
  3781. #ifdef PERF_HINT
  3782. if (PHASE_TRACE1(Js::PerfHintPhase))
  3783. {
  3784. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  3785. }
  3786. #endif
  3787. CannotAllocateArgumentsObjectOnStack();
  3788. return;
  3789. }
  3790. ClearArgumentsSym(dst->AsRegOpnd());
  3791. }
  3792. }
  3793. }
  3794. break;
  3795. }
  3796. return;
  3797. }
  3798. void
  3799. GlobOpt::MarkArgumentsUsedForBranch(IR::Instr * instr)
  3800. {
  3801. // If it's a conditional branch instruction and the operand used for branching is one of the arguments
  3802. // to the function, tag the m_argUsedForBranch of the functionBody so that it can be used later for inlining decisions.
  3803. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsUnconditional())
  3804. {
  3805. IR::BranchInstr * bInstr = instr->AsBranchInstr();
  3806. IR::Opnd *src1 = bInstr->GetSrc1();
  3807. IR::Opnd *src2 = bInstr->GetSrc2();
  3808. // These are used because we don't want to rely on src1 or src2 to always be the register/constant
  3809. IR::RegOpnd *regOpnd = nullptr;
  3810. if (!src2 && (instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A) && src1->IsRegOpnd())
  3811. {
  3812. regOpnd = src1->AsRegOpnd();
  3813. }
  3814. // We need to check for (0===arg) and (arg===0); this is especially important since some minifiers
  3815. // change all instances of one to the other.
  3816. else if (src2 && src2->IsConstOpnd() && src1->IsRegOpnd())
  3817. {
  3818. regOpnd = src1->AsRegOpnd();
  3819. }
  3820. else if (src2 && src2->IsRegOpnd() && src1->IsConstOpnd())
  3821. {
  3822. regOpnd = src2->AsRegOpnd();
  3823. }
  3824. if (regOpnd != nullptr)
  3825. {
  3826. if (regOpnd->m_sym->IsSingleDef())
  3827. {
  3828. IR::Instr * defInst = regOpnd->m_sym->GetInstrDef();
  3829. IR::Opnd *defSym = defInst->GetSrc1();
  3830. if (defSym && defSym->IsSymOpnd() && defSym->AsSymOpnd()->m_sym->IsStackSym()
  3831. && defSym->AsSymOpnd()->m_sym->AsStackSym()->IsParamSlotSym())
  3832. {
  3833. uint16 param = defSym->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  3834. // We only support functions with 13 arguments to ensure optimal size of callSiteInfo
  3835. if (param < Js::Constants::MaximumArgumentCountForConstantArgumentInlining)
  3836. {
  3837. this->func->GetJITOutput()->SetArgUsedForBranch((uint8)param);
  3838. }
  3839. }
  3840. }
  3841. }
  3842. }
  3843. }
  3844. const InductionVariable*
  3845. GlobOpt::GetInductionVariable(SymID sym, Loop *loop)
  3846. {
  3847. if (loop->inductionVariables)
  3848. {
  3849. for (auto it = loop->inductionVariables->GetIterator(); it.IsValid(); it.MoveNext())
  3850. {
  3851. InductionVariable* iv = &it.CurrentValueReference();
  3852. if (!iv->IsChangeDeterminate() || !iv->IsChangeUnidirectional())
  3853. {
  3854. continue;
  3855. }
  3856. if (iv->Sym()->m_id == sym)
  3857. {
  3858. return iv;
  3859. }
  3860. }
  3861. }
  3862. return nullptr;
  3863. }
  3864. bool
  3865. GlobOpt::IsSymIDInductionVariable(SymID sym, Loop *loop)
  3866. {
  3867. return GetInductionVariable(sym, loop) != nullptr;
  3868. }
  3869. SymID
  3870. GlobOpt::GetVarSymID(StackSym *sym)
  3871. {
  3872. if (sym && sym->m_type != TyVar)
  3873. {
  3874. sym = sym->GetVarEquivSym(nullptr);
  3875. }
  3876. if (!sym)
  3877. {
  3878. return Js::Constants::InvalidSymID;
  3879. }
  3880. return sym->m_id;
  3881. }
  3882. bool
  3883. GlobOpt::IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd)
  3884. {
  3885. Assert(instr);
  3886. if (!baseOpnd || !indexOpnd)
  3887. {
  3888. return false;
  3889. }
  3890. Loop* loop = this->currentBlock->loop;
  3891. const ValueType baseValueType(baseOpnd->GetValueType());
  3892. const ValueType indexValueType(indexOpnd->GetValueType());
  3893. // Validate the array and index types
  3894. if (
  3895. !indexValueType.IsInt() ||
  3896. !(
  3897. baseValueType.IsTypedIntOrFloatArray() ||
  3898. baseValueType.IsArray()
  3899. )
  3900. )
  3901. {
  3902. #if DBG_DUMP
  3903. wchar indexValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  3904. indexValueType.ToString(indexValueTypeStr);
  3905. wchar baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  3906. baseValueType.ToString(baseValueTypeStr);
  3907. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index[%s] or Array[%s] value type is invalid"), indexValueTypeStr, baseValueTypeStr);
  3908. #endif
  3909. return false;
  3910. }
  3911. // The following is conservative and works around a bug in induction variable analysis.
  3912. if (baseOpnd->IsArrayRegOpnd())
  3913. {
  3914. IR::ArrayRegOpnd *baseArrayOp = baseOpnd->AsArrayRegOpnd();
  3915. bool hasBoundChecksRemoved = (
  3916. baseArrayOp->EliminatedLowerBoundCheck() &&
  3917. baseArrayOp->EliminatedUpperBoundCheck() &&
  3918. !instr->extractedUpperBoundCheckWithoutHoisting &&
  3919. !instr->loadedArrayHeadSegment &&
  3920. !instr->loadedArrayHeadSegmentLength
  3921. );
  3922. if (!hasBoundChecksRemoved)
  3923. {
  3924. TRACE_MEMOP_VERBOSE(loop, instr, _u("Missing bounds check optimization"));
  3925. return false;
  3926. }
  3927. }
  3928. if (!baseValueType.IsTypedArray())
  3929. {
  3930. // Check if the instr can kill the value type of the array
  3931. JsArrayKills arrayKills = CheckJsArrayKills(instr);
  3932. if (arrayKills.KillsValueType(baseValueType))
  3933. {
  3934. TRACE_MEMOP_VERBOSE(loop, instr, _u("The array (s%d) can lose its value type"), GetVarSymID(baseOpnd->GetStackSym()));
  3935. return false;
  3936. }
  3937. }
  3938. // Process the Index Operand
  3939. if (!this->OptIsInvariant(baseOpnd, this->currentBlock, loop, this->FindValue(baseOpnd->m_sym), false, true))
  3940. {
  3941. TRACE_MEMOP_VERBOSE(loop, instr, _u("Base (s%d) is not invariant"), GetVarSymID(baseOpnd->GetStackSym()));
  3942. return false;
  3943. }
  3944. // Validate the index
  3945. Assert(indexOpnd->GetStackSym());
  3946. SymID indexSymID = GetVarSymID(indexOpnd->GetStackSym());
  3947. const InductionVariable* iv = GetInductionVariable(indexSymID, loop);
  3948. if (!iv)
  3949. {
  3950. // If the index is not an induction variable return
  3951. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index (s%d) is not an induction variable"), indexSymID);
  3952. return false;
  3953. }
  3954. Assert(iv->IsChangeDeterminate() && iv->IsChangeUnidirectional());
  3955. const IntConstantBounds & bounds = iv->ChangeBounds();
  3956. if (loop->memOpInfo)
  3957. {
  3958. // Only accept induction variables that increments by 1
  3959. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  3960. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(indexSymID, inductionVariableChangeInfo);
  3961. if (
  3962. (bounds.LowerBound() != 1 && bounds.LowerBound() != -1) ||
  3963. (bounds.UpperBound() != bounds.LowerBound()) ||
  3964. inductionVariableChangeInfo.unroll > 1 // Must be 0 (not seen yet) or 1 (already seen)
  3965. )
  3966. {
  3967. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index does not change by 1: %d><%d, unroll=%d"), bounds.LowerBound(), bounds.UpperBound(), inductionVariableChangeInfo.unroll);
  3968. return false;
  3969. }
  3970. // Check if the index is the same in all MemOp optimization in this loop
  3971. if (!loop->memOpInfo->candidates->Empty())
  3972. {
  3973. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  3974. // All MemOp operations within the same loop must use the same index
  3975. if (previousCandidate->index != indexSymID)
  3976. {
  3977. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index is not the same as other MemOp in the loop"));
  3978. return false;
  3979. }
  3980. }
  3981. }
  3982. return true;
  3983. }
  3984. bool
  3985. GlobOpt::CollectMemcopyLdElementI(IR::Instr *instr, Loop *loop)
  3986. {
  3987. Assert(instr->GetSrc1()->IsIndirOpnd());
  3988. IR::IndirOpnd *src1 = instr->GetSrc1()->AsIndirOpnd();
  3989. IR::Opnd *indexOpnd = src1->GetIndexOpnd();
  3990. IR::RegOpnd *baseOpnd = src1->GetBaseOpnd()->AsRegOpnd();
  3991. SymID baseSymID = GetVarSymID(baseOpnd->GetStackSym());
  3992. if (!IsAllowedForMemOpt(instr, false, baseOpnd, indexOpnd))
  3993. {
  3994. return false;
  3995. }
  3996. SymID inductionSymID = GetVarSymID(indexOpnd->GetStackSym());
  3997. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  3998. loop->EnsureMemOpVariablesInitialized();
  3999. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4000. IR::Opnd * dst = instr->GetDst();
  4001. if (!dst->IsRegOpnd() || !dst->AsRegOpnd()->GetStackSym()->IsSingleDef())
  4002. {
  4003. return false;
  4004. }
  4005. Loop::MemCopyCandidate* memcopyInfo = memcopyInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemCopyCandidate);
  4006. memcopyInfo->ldBase = baseSymID;
  4007. memcopyInfo->ldCount = 1;
  4008. memcopyInfo->count = 0;
  4009. memcopyInfo->bIndexAlreadyChanged = isIndexPreIncr;
  4010. memcopyInfo->base = Js::Constants::InvalidSymID; //need to find the stElem first
  4011. memcopyInfo->index = inductionSymID;
  4012. memcopyInfo->transferSym = dst->AsRegOpnd()->GetStackSym();
  4013. loop->memOpInfo->candidates->Prepend(memcopyInfo);
  4014. return true;
  4015. }
  4016. bool
  4017. GlobOpt::CollectMemsetStElementI(IR::Instr *instr, Loop *loop)
  4018. {
  4019. Assert(instr->GetDst()->IsIndirOpnd());
  4020. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  4021. IR::Opnd *indexOp = dst->GetIndexOpnd();
  4022. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  4023. if (!IsAllowedForMemOpt(instr, true, baseOp, indexOp))
  4024. {
  4025. return false;
  4026. }
  4027. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  4028. IR::Opnd *srcDef = instr->GetSrc1();
  4029. StackSym *srcSym = nullptr;
  4030. if (srcDef->IsRegOpnd())
  4031. {
  4032. IR::RegOpnd* opnd = srcDef->AsRegOpnd();
  4033. if (this->OptIsInvariant(opnd, this->currentBlock, loop, this->FindValue(opnd->m_sym), true, true))
  4034. {
  4035. srcSym = opnd->GetStackSym();
  4036. }
  4037. }
  4038. BailoutConstantValue constant = {TyIllegal, 0};
  4039. if (srcDef->IsFloatConstOpnd())
  4040. {
  4041. constant.InitFloatConstValue(srcDef->AsFloatConstOpnd()->m_value);
  4042. }
  4043. else if (srcDef->IsIntConstOpnd())
  4044. {
  4045. constant.InitIntConstValue(srcDef->AsIntConstOpnd()->GetValue(), srcDef->AsIntConstOpnd()->GetType());
  4046. }
  4047. else if (srcDef->IsAddrOpnd())
  4048. {
  4049. constant.InitVarConstValue(srcDef->AsAddrOpnd()->m_address);
  4050. }
  4051. else if(!srcSym)
  4052. {
  4053. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Source is not an invariant"));
  4054. return false;
  4055. }
  4056. // Process the Index Operand
  4057. Assert(indexOp->GetStackSym());
  4058. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  4059. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  4060. loop->EnsureMemOpVariablesInitialized();
  4061. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4062. Loop::MemSetCandidate* memsetInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemSetCandidate);
  4063. memsetInfo->base = baseSymID;
  4064. memsetInfo->index = inductionSymID;
  4065. memsetInfo->constant = constant;
  4066. memsetInfo->srcSym = srcSym;
  4067. memsetInfo->count = 1;
  4068. memsetInfo->bIndexAlreadyChanged = isIndexPreIncr;
  4069. loop->memOpInfo->candidates->Prepend(memsetInfo);
  4070. return true;
  4071. }
  4072. bool GlobOpt::CollectMemcopyStElementI(IR::Instr *instr, Loop *loop)
  4073. {
  4074. if (!loop->memOpInfo || loop->memOpInfo->candidates->Empty())
  4075. {
  4076. // There is no ldElem matching this stElem
  4077. return false;
  4078. }
  4079. Assert(instr->GetDst()->IsIndirOpnd());
  4080. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  4081. IR::Opnd *indexOp = dst->GetIndexOpnd();
  4082. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  4083. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  4084. if (!instr->GetSrc1()->IsRegOpnd())
  4085. {
  4086. return false;
  4087. }
  4088. IR::RegOpnd* src1 = instr->GetSrc1()->AsRegOpnd();
  4089. if (!src1->GetIsDead())
  4090. {
  4091. // This must be the last use of the register.
  4092. // It will invalidate `var m = a[i]; b[i] = m;` but this is not a very interesting case.
  4093. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Source (s%d) is still alive after StElemI"), baseSymID);
  4094. return false;
  4095. }
  4096. if (!IsAllowedForMemOpt(instr, false, baseOp, indexOp))
  4097. {
  4098. return false;
  4099. }
  4100. SymID srcSymID = GetVarSymID(src1->GetStackSym());
  4101. // Prepare the memcopyCandidate entry
  4102. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  4103. if (!previousCandidate->IsMemCopy())
  4104. {
  4105. return false;
  4106. }
  4107. Loop::MemCopyCandidate* memcopyInfo = previousCandidate->AsMemCopy();
  4108. // The previous candidate has to have been created by the matching ldElem
  4109. if (
  4110. memcopyInfo->base != Js::Constants::InvalidSymID ||
  4111. GetVarSymID(memcopyInfo->transferSym) != srcSymID
  4112. )
  4113. {
  4114. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("No matching LdElem found (s%d)"), baseSymID);
  4115. return false;
  4116. }
  4117. Assert(indexOp->GetStackSym());
  4118. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  4119. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  4120. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4121. if (isIndexPreIncr != memcopyInfo->bIndexAlreadyChanged)
  4122. {
  4123. // The index changed between the load and the store
  4124. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Index value changed between ldElem and stElem"));
  4125. return false;
  4126. }
  4127. // Consider: Can we remove the count field?
  4128. memcopyInfo->count++;
  4129. memcopyInfo->base = baseSymID;
  4130. return true;
  4131. }
  4132. bool
  4133. GlobOpt::CollectMemOpLdElementI(IR::Instr *instr, Loop *loop)
  4134. {
  4135. Assert(instr->m_opcode == Js::OpCode::LdElemI_A);
  4136. return (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyLdElementI(instr, loop));
  4137. }
  4138. bool
  4139. GlobOpt::CollectMemOpStElementI(IR::Instr *instr, Loop *loop)
  4140. {
  4141. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict);
  4142. Assert(instr->GetSrc1());
  4143. return (!PHASE_OFF(Js::MemSetPhase, this->func) && CollectMemsetStElementI(instr, loop)) ||
  4144. (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyStElementI(instr, loop));
  4145. }
  4146. bool
  4147. GlobOpt::CollectMemOpInfo(IR::Instr *instr, Value *src1Val, Value *src2Val)
  4148. {
  4149. Assert(this->currentBlock->loop);
  4150. Loop *loop = this->currentBlock->loop;
  4151. if (!loop->blockList.HasTwo())
  4152. {
  4153. // We support memcopy and memset for loops which have only two blocks.
  4154. return false;
  4155. }
  4156. if (loop->GetLoopFlags().isInterpreted && !loop->GetLoopFlags().memopMinCountReached)
  4157. {
  4158. TRACE_MEMOP_VERBOSE(loop, instr, _u("minimum loop count not reached"))
  4159. loop->doMemOp = false;
  4160. return false;
  4161. }
  4162. Assert(loop->doMemOp);
  4163. bool isIncr = true, isChangedByOne = false;
  4164. switch (instr->m_opcode)
  4165. {
  4166. case Js::OpCode::StElemI_A:
  4167. case Js::OpCode::StElemI_A_Strict:
  4168. if (!CollectMemOpStElementI(instr, loop))
  4169. {
  4170. loop->doMemOp = false;
  4171. return false;
  4172. }
  4173. break;
  4174. case Js::OpCode::LdElemI_A:
  4175. if (!CollectMemOpLdElementI(instr, loop))
  4176. {
  4177. loop->doMemOp = false;
  4178. return false;
  4179. }
  4180. break;
  4181. case Js::OpCode::Decr_A:
  4182. isIncr = false;
  4183. case Js::OpCode::Incr_A:
  4184. isChangedByOne = true;
  4185. goto MemOpCheckInductionVariable;
  4186. case Js::OpCode::Sub_I4:
  4187. case Js::OpCode::Sub_A:
  4188. isIncr = false;
  4189. case Js::OpCode::Add_A:
  4190. case Js::OpCode::Add_I4:
  4191. {
  4192. MemOpCheckInductionVariable:
  4193. StackSym *sym = instr->GetSrc1()->GetStackSym();
  4194. if (!sym)
  4195. {
  4196. sym = instr->GetSrc2()->GetStackSym();
  4197. }
  4198. SymID inductionSymID = GetVarSymID(sym);
  4199. if (IsSymIDInductionVariable(inductionSymID, this->currentBlock->loop))
  4200. {
  4201. if (!isChangedByOne)
  4202. {
  4203. IR::Opnd *src1, *src2;
  4204. src1 = instr->GetSrc1();
  4205. src2 = instr->GetSrc2();
  4206. if (src2->IsRegOpnd())
  4207. {
  4208. Value *val = this->FindValue(src2->AsRegOpnd()->m_sym);
  4209. if (val)
  4210. {
  4211. ValueInfo *vi = val->GetValueInfo();
  4212. int constValue;
  4213. if (vi && vi->TryGetIntConstantValue(&constValue))
  4214. {
  4215. if (constValue == 1)
  4216. {
  4217. isChangedByOne = true;
  4218. }
  4219. }
  4220. }
  4221. }
  4222. else if (src2->IsIntConstOpnd())
  4223. {
  4224. if (src2->AsIntConstOpnd()->GetValue() == 1)
  4225. {
  4226. isChangedByOne = true;
  4227. }
  4228. }
  4229. }
  4230. loop->EnsureMemOpVariablesInitialized();
  4231. if (!isChangedByOne)
  4232. {
  4233. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { Js::Constants::InvalidLoopUnrollFactor, 0 };
  4234. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  4235. {
  4236. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  4237. }
  4238. else
  4239. {
  4240. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  4241. }
  4242. }
  4243. else
  4244. {
  4245. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  4246. {
  4247. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
  4248. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  4249. }
  4250. else
  4251. {
  4252. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  4253. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
  4254. inductionVariableChangeInfo.unroll++;
  4255. inductionVariableChangeInfo.isIncremental = isIncr;
  4256. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  4257. }
  4258. }
  4259. break;
  4260. }
  4261. // Fallthrough if not an induction variable
  4262. }
  4263. default:
  4264. if (IsInstrInvalidForMemOp(instr, loop, src1Val, src2Val))
  4265. {
  4266. loop->doMemOp = false;
  4267. return false;
  4268. }
  4269. // Make sure this instruction doesn't use the memcopy transfer sym before it is checked by StElemI
  4270. if (loop->memOpInfo && !loop->memOpInfo->candidates->Empty())
  4271. {
  4272. Loop::MemOpCandidate* prevCandidate = loop->memOpInfo->candidates->Head();
  4273. if (prevCandidate->IsMemCopy())
  4274. {
  4275. Loop::MemCopyCandidate* memcopyCandidate = prevCandidate->AsMemCopy();
  4276. if (memcopyCandidate->base == Js::Constants::InvalidSymID)
  4277. {
  4278. if (instr->FindRegUse(memcopyCandidate->transferSym))
  4279. {
  4280. loop->doMemOp = false;
  4281. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Found illegal use of LdElemI value(s%d)"), GetVarSymID(memcopyCandidate->transferSym));
  4282. return false;
  4283. }
  4284. }
  4285. }
  4286. }
  4287. }
  4288. return true;
  4289. }
  4290. bool
  4291. GlobOpt::IsInstrInvalidForMemOp(IR::Instr *instr, Loop *loop, Value *src1Val, Value *src2Val)
  4292. {
  4293. // List of instruction that are valid with memop (ie: instr that gets removed if memop is emitted)
  4294. if (
  4295. this->currentBlock != loop->GetHeadBlock() &&
  4296. !instr->IsLabelInstr() &&
  4297. instr->IsRealInstr() &&
  4298. instr->m_opcode != Js::OpCode::IncrLoopBodyCount &&
  4299. instr->m_opcode != Js::OpCode::StLoopBodyCount &&
  4300. instr->m_opcode != Js::OpCode::Ld_A &&
  4301. instr->m_opcode != Js::OpCode::Ld_I4 &&
  4302. !(instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  4303. )
  4304. {
  4305. TRACE_MEMOP_VERBOSE(loop, instr, _u("Instruction not accepted for memop"));
  4306. return true;
  4307. }
  4308. // Check prev instr because it could have been added by an optimization and we won't see it here.
  4309. if (OpCodeAttr::FastFldInstr(instr->m_opcode) || (instr->m_prev && OpCodeAttr::FastFldInstr(instr->m_prev->m_opcode)))
  4310. {
  4311. // Refuse any operations interacting with Fields
  4312. TRACE_MEMOP_VERBOSE(loop, instr, _u("Field interaction detected"));
  4313. return true;
  4314. }
  4315. if (Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementSlot)
  4316. {
  4317. // Refuse any operations interacting with slots
  4318. TRACE_MEMOP_VERBOSE(loop, instr, _u("Slot interaction detected"));
  4319. return true;
  4320. }
  4321. if (this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val))
  4322. {
  4323. TRACE_MEMOP_VERBOSE(loop, instr, _u("Implicit call bailout detected"));
  4324. return true;
  4325. }
  4326. return false;
  4327. }
  4328. void
  4329. GlobOpt::TryReplaceLdLen(IR::Instr *& instr)
  4330. {
  4331. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  4332. if ((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->m_opcode == Js::OpCode::ProfiledLdFld) || instr->m_opcode == Js::OpCode::LdFld || instr->m_opcode == Js::OpCode::ScopedLdFld)
  4333. {
  4334. IR::SymOpnd * opnd = instr->GetSrc1()->AsSymOpnd();
  4335. Sym *sym = opnd->m_sym;
  4336. if (sym->IsPropertySym())
  4337. {
  4338. PropertySym *originalPropertySym = sym->AsPropertySym();
  4339. // only on .length
  4340. if (this->lengthEquivBv != nullptr && this->lengthEquivBv->Test(originalPropertySym->m_id))
  4341. {
  4342. IR::RegOpnd* newopnd = IR::RegOpnd::New(originalPropertySym->m_stackSym, IRType::TyVar, instr->m_func);
  4343. ValueInfo *const objectValueInfo = FindValue(originalPropertySym->m_stackSym)->GetValueInfo();
  4344. // Only for things we'd emit a fast path for
  4345. if (
  4346. objectValueInfo->IsLikelyAnyArray() ||
  4347. objectValueInfo->HasHadStringTag() ||
  4348. objectValueInfo->IsLikelyString() ||
  4349. newopnd->IsArgumentsObject() ||
  4350. (this->blockData.argObjSyms && IsArgumentsOpnd(newopnd))
  4351. )
  4352. {
  4353. // We need to properly transfer over the information from the old operand, which is
  4354. // a SymOpnd, to the new one, which is a RegOpnd. Unfortunately, the types mean the
  4355. // normal copy methods won't work here, so we're going to directly copy data.
  4356. newopnd->SetIsJITOptimizedReg(opnd->GetIsJITOptimizedReg());
  4357. newopnd->SetValueType(objectValueInfo->Type());
  4358. newopnd->SetIsDead(opnd->GetIsDead());
  4359. // Now that we have the operand we need, we can go ahead and make the new instr.
  4360. IR::Instr *newinstr = IR::Instr::New(Js::OpCode::LdLen_A, instr->m_func);
  4361. instr->TransferTo(newinstr);
  4362. newinstr->UnlinkSrc1();
  4363. newinstr->SetSrc1(newopnd);
  4364. instr->InsertAfter(newinstr);
  4365. instr->Remove();
  4366. instr = newinstr;
  4367. }
  4368. }
  4369. }
  4370. }
  4371. }
  4372. IR::Instr *
  4373. GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
  4374. {
  4375. Assert(instr->m_func->IsTopFunc() || instr->m_func->isGetterSetter || instr->m_func->callSiteIdInParentFunc != UINT16_MAX);
  4376. IR::Opnd *src1, *src2;
  4377. Value *src1Val = nullptr, *src2Val = nullptr, *dstVal = nullptr;
  4378. Value *src1IndirIndexVal = nullptr, *dstIndirIndexVal = nullptr;
  4379. IR::Instr *instrPrev = instr->m_prev;
  4380. IR::Instr *instrNext = instr->m_next;
  4381. if (instr->IsLabelInstr() && this->func->HasTry() && this->func->DoOptimizeTryCatch())
  4382. {
  4383. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  4384. Assert(this->currentRegion);
  4385. }
  4386. if(PrepareForIgnoringIntOverflow(instr))
  4387. {
  4388. if(!IsLoopPrePass())
  4389. {
  4390. *isInstrRemoved = true;
  4391. currentBlock->RemoveInstr(instr);
  4392. }
  4393. return instrNext;
  4394. }
  4395. if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
  4396. {
  4397. return instrNext;
  4398. }
  4399. if (instr->m_opcode == Js::OpCode::Yield)
  4400. {
  4401. // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
  4402. this->KillStateForGeneratorYield();
  4403. }
  4404. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  4405. this->TryReplaceLdLen(instr);
  4406. // Consider: Do we ever get post-op bailout here, and if so is the FillBailOutInfo call in the right place?
  4407. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  4408. {
  4409. this->FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  4410. }
  4411. this->instrCountSinceLastCleanUp++;
  4412. instr = this->PreOptPeep(instr);
  4413. this->OptArguments(instr);
  4414. //StackArguments Optimization - We bail out if the index is out of range of actuals.
  4415. if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  4416. instr->DoStackArgsOpt(this->func) && !this->IsLoopPrePass())
  4417. {
  4418. GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
  4419. }
  4420. #if DBG
  4421. PropertySym *propertySymUseBefore = nullptr;
  4422. Assert(this->byteCodeUses == nullptr);
  4423. this->byteCodeUsesBeforeOpt->ClearAll();
  4424. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  4425. Assert(noImplicitCallUsesToInsert->Count() == 0);
  4426. #endif
  4427. this->ignoredIntOverflowForCurrentInstr = false;
  4428. this->ignoredNegativeZeroForCurrentInstr = false;
  4429. src1 = instr->GetSrc1();
  4430. src2 = instr->GetSrc2();
  4431. if (src1)
  4432. {
  4433. src1Val = this->OptSrc(src1, &instr, &src1IndirIndexVal);
  4434. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  4435. if (src2)
  4436. {
  4437. src2Val = this->OptSrc(src2, &instr);
  4438. }
  4439. }
  4440. if(instr->GetDst() && instr->GetDst()->IsIndirOpnd())
  4441. {
  4442. this->OptSrc(instr->GetDst(), &instr, &dstIndirIndexVal);
  4443. }
  4444. MarkArgumentsUsedForBranch(instr);
  4445. CSEOptimize(this->currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal);
  4446. OptimizeChecks(instr, src1Val, src2Val);
  4447. OptArraySrc(&instr);
  4448. OptNewScObject(&instr, src1Val);
  4449. instr = this->OptPeep(instr, src1Val, src2Val);
  4450. if (instr->m_opcode == Js::OpCode::Nop ||
  4451. (instr->m_opcode == Js::OpCode::CheckThis &&
  4452. instr->GetSrc1()->IsRegOpnd() &&
  4453. instr->GetSrc1()->AsRegOpnd()->m_sym->m_isSafeThis))
  4454. {
  4455. instrNext = instr->m_next;
  4456. InsertNoImplicitCallUses(instr);
  4457. if (this->byteCodeUses)
  4458. {
  4459. this->InsertByteCodeUses(instr);
  4460. }
  4461. *isInstrRemoved = true;
  4462. this->currentBlock->RemoveInstr(instr);
  4463. return instrNext;
  4464. }
  4465. else if (instr->m_opcode == Js::OpCode::GetNewScObject && !this->IsLoopPrePass() && src1Val->GetValueInfo()->IsPrimitive())
  4466. {
  4467. // Constructor returned (src1) a primitive value, so fold this into "dst = Ld_A src2", where src2 is the new object that
  4468. // was passed into the constructor as its 'this' parameter
  4469. instr->FreeSrc1();
  4470. instr->SetSrc1(instr->UnlinkSrc2());
  4471. instr->m_opcode = Js::OpCode::Ld_A;
  4472. src1Val = src2Val;
  4473. src2Val = nullptr;
  4474. }
  4475. else if (instr->m_opcode == Js::OpCode::TryCatch && this->func->DoOptimizeTryCatch())
  4476. {
  4477. ProcessTryCatch(instr);
  4478. }
  4479. else if (instr->m_opcode == Js::OpCode::BrOnException)
  4480. {
  4481. // BrOnException was added to model flow from try region to the catch region to assist
  4482. // the backward pass in propagating bytecode upward exposed info from the catch block
  4483. // to the try, and to handle break blocks. Removing it here as it has served its purpose
  4484. // and keeping it around might also have unintended effects while merging block data for
  4485. // the catch block's predecessors.
  4486. // Note that the Deadstore pass will still be able to propagate bytecode upward exposed info
  4487. // because it doesn't skip dead blocks for that.
  4488. this->RemoveFlowEdgeToCatchBlock(instr);
  4489. *isInstrRemoved = true;
  4490. this->currentBlock->RemoveInstr(instr);
  4491. return instrNext;
  4492. }
  4493. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  4494. {
  4495. this->RemoveFlowEdgeToCatchBlock(instr);
  4496. }
  4497. bool isAlreadyTypeSpecialized = false;
  4498. if (!IsLoopPrePass() && instr->HasBailOutInfo())
  4499. {
  4500. if (instr->GetBailOutKind() == IR::BailOutExpectingInteger)
  4501. {
  4502. isAlreadyTypeSpecialized = TypeSpecializeBailoutExpectedInteger(instr, src1Val, &dstVal);
  4503. }
  4504. else if (instr->GetBailOutKind() == IR::BailOutExpectingString)
  4505. {
  4506. if (instr->GetSrc1()->IsRegOpnd())
  4507. {
  4508. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyString())
  4509. {
  4510. // Disable SwitchOpt if the source is definitely not a string - This may be realized only in Globopt
  4511. Assert(IsSwitchOptEnabled());
  4512. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  4513. }
  4514. }
  4515. }
  4516. }
  4517. bool forceInvariantHoisting = false;
  4518. const bool ignoreIntOverflowInRangeForInstr = instr->ignoreIntOverflowInRange; // Save it since the instr can change
  4519. if (!isAlreadyTypeSpecialized)
  4520. {
  4521. bool redoTypeSpec;
  4522. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  4523. if(redoTypeSpec && instr->m_opcode != Js::OpCode::Nop)
  4524. {
  4525. forceInvariantHoisting = false;
  4526. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  4527. Assert(!redoTypeSpec);
  4528. }
  4529. if (instr->m_opcode == Js::OpCode::Nop)
  4530. {
  4531. InsertNoImplicitCallUses(instr);
  4532. if (this->byteCodeUses)
  4533. {
  4534. this->InsertByteCodeUses(instr);
  4535. }
  4536. instrNext = instr->m_next;
  4537. *isInstrRemoved = true;
  4538. this->currentBlock->RemoveInstr(instr);
  4539. return instrNext;
  4540. }
  4541. }
  4542. if (ignoreIntOverflowInRangeForInstr)
  4543. {
  4544. VerifyIntSpecForIgnoringIntOverflow(instr);
  4545. }
  4546. // Track calls after any pre-op bailouts have been inserted before the call, because they will need to restore out params.
  4547. // We don't inline in asmjs and hence we don't need to track calls in asmjs too, skipping this step for asmjs.
  4548. if (!GetIsAsmJSFunc())
  4549. {
  4550. this->TrackCalls(instr);
  4551. }
  4552. if (instr->GetSrc1())
  4553. {
  4554. this->UpdateObjPtrValueType(instr->GetSrc1(), instr);
  4555. }
  4556. IR::Opnd *dst = instr->GetDst();
  4557. if (dst)
  4558. {
  4559. // Copy prop dst uses and mark live/available type syms before tracking kills.
  4560. CopyPropDstUses(dst, instr, src1Val);
  4561. }
  4562. // Track mark temp object before we process the dst so we can generate pre-op bailout
  4563. instr = this->TrackMarkTempObject(instrPrev->m_next, instr);
  4564. bool removed = OptTagChecks(instr);
  4565. if (removed)
  4566. {
  4567. *isInstrRemoved = true;
  4568. return instrNext;
  4569. }
  4570. dstVal = this->OptDst(&instr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  4571. dst = instr->GetDst();
  4572. instrNext = instr->m_next;
  4573. if (dst)
  4574. {
  4575. if (this->func->HasTry() && this->func->DoOptimizeTryCatch())
  4576. {
  4577. this->InsertToVarAtDefInTryRegion(instr, dst);
  4578. }
  4579. instr = this->SetTypeCheckBailOut(dst, instr, nullptr);
  4580. this->UpdateObjPtrValueType(dst, instr);
  4581. }
  4582. BVSparse<JitArenaAllocator> instrByteCodeStackSymUsedAfter(this->alloc);
  4583. PropertySym *propertySymUseAfter = nullptr;
  4584. if (this->byteCodeUses != nullptr)
  4585. {
  4586. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  4587. }
  4588. #if DBG
  4589. else
  4590. {
  4591. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  4592. instrByteCodeStackSymUsedAfter.Equal(this->byteCodeUsesBeforeOpt);
  4593. Assert(propertySymUseAfter == propertySymUseBefore);
  4594. }
  4595. #endif
  4596. bool isHoisted = false;
  4597. if (this->currentBlock->loop && !this->IsLoopPrePass())
  4598. {
  4599. isHoisted = this->TryHoistInvariant(instr, this->currentBlock, dstVal, src1Val, src2Val, true, false, forceInvariantHoisting);
  4600. }
  4601. src1 = instr->GetSrc1();
  4602. if (!this->IsLoopPrePass() && src1)
  4603. {
  4604. // instr const, nonConst => canonicalize by swapping operands
  4605. // This simplifies lowering. (somewhat machine dependent)
  4606. // Note that because of Var overflows, src1 may not have been constant prop'd to an IntConst
  4607. this->PreLowerCanonicalize(instr, &src1Val, &src2Val);
  4608. }
  4609. if (!PHASE_OFF(Js::MemOpPhase, this->func) &&
  4610. !isHoisted &&
  4611. !(instr->IsJitProfilingInstr()) &&
  4612. this->currentBlock->loop && !IsLoopPrePass() &&
  4613. !func->IsJitInDebugMode() &&
  4614. (func->HasProfileInfo() && !func->GetReadOnlyProfileInfo()->IsMemOpDisabled()) &&
  4615. this->currentBlock->loop->doMemOp)
  4616. {
  4617. CollectMemOpInfo(instr, src1Val, src2Val);
  4618. }
  4619. InsertNoImplicitCallUses(instr);
  4620. if (this->byteCodeUses != nullptr)
  4621. {
  4622. // Optimization removed some uses from the instruction.
  4623. // Need to insert fake uses so we can get the correct live register to restore in bailout.
  4624. this->byteCodeUses->Minus(&instrByteCodeStackSymUsedAfter);
  4625. if (this->propertySymUse == propertySymUseAfter)
  4626. {
  4627. this->propertySymUse = nullptr;
  4628. }
  4629. this->InsertByteCodeUses(instr);
  4630. }
  4631. if (!this->IsLoopPrePass() && !isHoisted && this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val))
  4632. {
  4633. IR::BailOutKind kind = IR::BailOutOnImplicitCalls;
  4634. if(instr->HasBailOutInfo())
  4635. {
  4636. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  4637. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4638. if((bailOutKind & ~IR::BailOutKindBits) != IR::BailOutOnImplicitCallsPreOp)
  4639. {
  4640. Assert(!(bailOutKind & ~IR::BailOutKindBits));
  4641. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  4642. }
  4643. }
  4644. else if (instr->forcePreOpBailOutIfNeeded || this->isRecursiveCallOnLandingPad)
  4645. {
  4646. // We can't have a byte code reg slot as dst to generate a
  4647. // pre-op implicit call after we have processed the dst.
  4648. // Consider: This might miss an opportunity to use a copy prop sym to restore
  4649. // some other byte code reg if the dst is that copy prop that we already killed.
  4650. Assert(!instr->GetDst()
  4651. || !instr->GetDst()->IsRegOpnd()
  4652. || instr->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg()
  4653. || !instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot());
  4654. this->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  4655. }
  4656. else
  4657. {
  4658. // Capture value of the bailout after the operation is done.
  4659. this->GenerateBailAfterOperation(&instr, kind);
  4660. }
  4661. }
  4662. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  4663. {
  4664. GlobOptBlockData * globOptData = &this->currentBlock->globOptData;
  4665. globOptData->changedSyms->ClearAll();
  4666. if (!this->changedSymsAfterIncBailoutCandidate->IsEmpty())
  4667. {
  4668. //
  4669. // some symbols are changed after the values for current bailout have been
  4670. // captured (GlobOpt::CapturedValues), need to restore such symbols as changed
  4671. // for following incremental bailout construction, or we will miss capturing
  4672. // values for later bailout
  4673. //
  4674. // swap changedSyms and changedSymsAfterIncBailoutCandidate
  4675. // because both are from this->alloc
  4676. BVSparse<JitArenaAllocator> * tempBvSwap = globOptData->changedSyms;
  4677. globOptData->changedSyms = this->changedSymsAfterIncBailoutCandidate;
  4678. this->changedSymsAfterIncBailoutCandidate = tempBvSwap;
  4679. }
  4680. globOptData->capturedValues = globOptData->capturedValuesCandidate;
  4681. // null out capturedValuesCandicate to stop tracking symbols change for it
  4682. globOptData->capturedValuesCandidate = nullptr;
  4683. }
  4684. return instrNext;
  4685. }
  4686. bool
  4687. GlobOpt::OptTagChecks(IR::Instr *instr)
  4688. {
  4689. if (PHASE_OFF(Js::OptTagChecksPhase, this->func) || !this->DoTagChecks())
  4690. {
  4691. return false;
  4692. }
  4693. StackSym *stackSym = nullptr;
  4694. IR::SymOpnd *symOpnd = nullptr;
  4695. IR::RegOpnd *regOpnd = nullptr;
  4696. switch(instr->m_opcode)
  4697. {
  4698. case Js::OpCode::LdFld:
  4699. case Js::OpCode::LdMethodFld:
  4700. case Js::OpCode::CheckFixedFld:
  4701. case Js::OpCode::CheckPropertyGuardAndLoadType:
  4702. symOpnd = instr->GetSrc1()->AsSymOpnd();
  4703. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  4704. break;
  4705. case Js::OpCode::BailOnNotObject:
  4706. case Js::OpCode::BailOnNotArray:
  4707. if (instr->GetSrc1()->IsRegOpnd())
  4708. {
  4709. regOpnd = instr->GetSrc1()->AsRegOpnd();
  4710. stackSym = regOpnd->m_sym;
  4711. }
  4712. break;
  4713. case Js::OpCode::StFld:
  4714. symOpnd = instr->GetDst()->AsSymOpnd();
  4715. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  4716. break;
  4717. }
  4718. if (stackSym)
  4719. {
  4720. Value *value = FindValue(blockData.symToValueMap, stackSym);
  4721. if (value)
  4722. {
  4723. ValueType valueType = value->GetValueInfo()->Type();
  4724. if (instr->m_opcode == Js::OpCode::BailOnNotObject)
  4725. {
  4726. if (valueType.CanBeTaggedValue())
  4727. {
  4728. ChangeValueType(nullptr, value, valueType.SetCanBeTaggedValue(false), false);
  4729. return false;
  4730. }
  4731. if (this->byteCodeUses)
  4732. {
  4733. this->InsertByteCodeUses(instr);
  4734. }
  4735. this->currentBlock->RemoveInstr(instr);
  4736. return true;
  4737. }
  4738. if (valueType.CanBeTaggedValue() &&
  4739. !valueType.HasBeenNumber() &&
  4740. (this->IsLoopPrePass() || !this->currentBlock->loop))
  4741. {
  4742. ValueType newValueType = valueType.SetCanBeTaggedValue(false);
  4743. // Split out the tag check as a separate instruction.
  4744. IR::Instr *bailOutInstr;
  4745. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, IR::BailOutOnTaggedValue, instr, instr->m_func);
  4746. if (!this->IsLoopPrePass())
  4747. {
  4748. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  4749. }
  4750. IR::RegOpnd *srcOpnd = regOpnd;
  4751. if (!srcOpnd)
  4752. {
  4753. srcOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), instr->m_func);
  4754. AnalysisAssert(symOpnd);
  4755. if (symOpnd->GetIsJITOptimizedReg())
  4756. {
  4757. srcOpnd->SetIsJITOptimizedReg(true);
  4758. }
  4759. }
  4760. bailOutInstr->SetSrc1(srcOpnd);
  4761. bailOutInstr->GetSrc1()->SetValueType(valueType);
  4762. instr->InsertBefore(bailOutInstr);
  4763. if (symOpnd)
  4764. {
  4765. symOpnd->SetPropertyOwnerValueType(newValueType);
  4766. }
  4767. else
  4768. {
  4769. regOpnd->SetValueType(newValueType);
  4770. }
  4771. ChangeValueType(nullptr, value, newValueType, false);
  4772. }
  4773. }
  4774. }
  4775. return false;
  4776. }
  4777. bool
  4778. GlobOpt::TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal)
  4779. {
  4780. bool isAlreadyTypeSpecialized = false;
  4781. if(instr->GetSrc1()->IsRegOpnd())
  4782. {
  4783. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyInt() || instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  4784. {
  4785. Assert(IsSwitchOptEnabled());
  4786. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  4787. }
  4788. // Attach the BailOutExpectingInteger to FromVar and Remove the bail out info on the Ld_A (Begin Switch) instr.
  4789. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, TyInt32, IR::BailOutExpectingInteger, false, instr);
  4790. //TypeSpecialize the dst of Ld_A
  4791. TypeSpecializeIntDst(instr, instr->m_opcode, src1Val, src1Val, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, dstVal);
  4792. isAlreadyTypeSpecialized = true;
  4793. }
  4794. instr->ClearBailOutInfo();
  4795. return isAlreadyTypeSpecialized;
  4796. }
  4797. Value*
  4798. GlobOpt::OptDst(
  4799. IR::Instr ** pInstr,
  4800. Value *dstVal,
  4801. Value *src1Val,
  4802. Value *src2Val,
  4803. Value *dstIndirIndexVal,
  4804. Value *src1IndirIndexVal)
  4805. {
  4806. IR::Instr *&instr = *pInstr;
  4807. IR::Opnd *opnd = instr->GetDst();
  4808. if (opnd)
  4809. {
  4810. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  4811. {
  4812. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  4813. }
  4814. else if (instr->m_opcode == Js::OpCode::StElemI_A ||
  4815. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  4816. instr->m_opcode == Js::OpCode::InitComputedProperty)
  4817. {
  4818. this->KillObjectHeaderInlinedTypeSyms(this->currentBlock, false);
  4819. }
  4820. if (opnd->IsIndirOpnd() && !this->IsLoopPrePass())
  4821. {
  4822. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  4823. const ValueType baseValueType(baseOpnd->GetValueType());
  4824. if ((
  4825. baseValueType.IsLikelyNativeArray() ||
  4826. #ifdef _M_IX86
  4827. (
  4828. !AutoSystemInfo::Data.SSE2Available() &&
  4829. baseValueType.IsLikelyObject() &&
  4830. (
  4831. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  4832. baseValueType.GetObjectType() == ObjectType::Float64Array
  4833. )
  4834. )
  4835. #else
  4836. false
  4837. #endif
  4838. ) &&
  4839. instr->GetSrc1()->IsVar())
  4840. {
  4841. if(instr->m_opcode == Js::OpCode::StElemC)
  4842. {
  4843. // StElemC has different code that handles native array conversion or missing value stores. Add a bailout
  4844. // for those cases.
  4845. Assert(baseValueType.IsLikelyNativeArray());
  4846. Assert(!instr->HasBailOutInfo());
  4847. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  4848. }
  4849. else if(instr->HasBailOutInfo())
  4850. {
  4851. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4852. // path. Note that the removed bailouts should not be necessary for correctness. Bailout on native array
  4853. // conversion will be handled automatically as normal.
  4854. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4855. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4856. {
  4857. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4858. }
  4859. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4860. {
  4861. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4862. }
  4863. if(bailOutKind)
  4864. {
  4865. instr->SetBailOutKind(bailOutKind);
  4866. }
  4867. else
  4868. {
  4869. instr->ClearBailOutInfo();
  4870. }
  4871. }
  4872. }
  4873. }
  4874. }
  4875. this->ProcessKills(instr);
  4876. if (opnd)
  4877. {
  4878. if (dstVal == nullptr)
  4879. {
  4880. dstVal = ValueNumberDst(pInstr, src1Val, src2Val);
  4881. }
  4882. if (this->IsLoopPrePass())
  4883. {
  4884. // Keep track of symbols defined in the loop.
  4885. if (opnd->IsRegOpnd())
  4886. {
  4887. StackSym *symDst = opnd->AsRegOpnd()->m_sym;
  4888. rootLoopPrePass->symsDefInLoop->Set(symDst->m_id);
  4889. }
  4890. }
  4891. else if (dstVal)
  4892. {
  4893. opnd->SetValueType(dstVal->GetValueInfo()->Type());
  4894. if(currentBlock->loop &&
  4895. !IsLoopPrePass() &&
  4896. (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
  4897. instr->GetSrc1()->IsRegOpnd() &&
  4898. !func->IsJitInDebugMode() &&
  4899. func->DoGlobOptsForGeneratorFunc())
  4900. {
  4901. // Look for the following patterns:
  4902. //
  4903. // Pattern 1:
  4904. // s1[liveOnBackEdge] = s3[dead]
  4905. //
  4906. // Pattern 2:
  4907. // s3 = operation(s1[liveOnBackEdge], s2)
  4908. // s1[liveOnBackEdge] = s3
  4909. //
  4910. // In both patterns, s1 and s3 have the same value by the end. Prefer to use s1 as the sym store instead of s3
  4911. // since s1 is live on back-edge, as otherwise, their lifetimes overlap, requiring two registers to hold the
  4912. // value instead of one.
  4913. do
  4914. {
  4915. IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  4916. StackSym *srcVarSym = src->m_sym;
  4917. if(srcVarSym->IsTypeSpec())
  4918. {
  4919. srcVarSym = srcVarSym->GetVarEquivSym(nullptr);
  4920. Assert(srcVarSym);
  4921. }
  4922. if(dstVal->GetValueInfo()->GetSymStore() != srcVarSym)
  4923. {
  4924. break;
  4925. }
  4926. IR::RegOpnd *const dst = opnd->AsRegOpnd();
  4927. StackSym *dstVarSym = dst->m_sym;
  4928. if(dstVarSym->IsTypeSpec())
  4929. {
  4930. dstVarSym = dstVarSym->GetVarEquivSym(nullptr);
  4931. Assert(dstVarSym);
  4932. }
  4933. if(!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(dstVarSym->m_id))
  4934. {
  4935. break;
  4936. }
  4937. Value *const srcValue = FindValue(srcVarSym);
  4938. if(srcValue->GetValueNumber() != dstVal->GetValueNumber())
  4939. {
  4940. break;
  4941. }
  4942. if(!src->GetIsDead())
  4943. {
  4944. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  4945. IR::Opnd *const prevDst = prevInstr->GetDst();
  4946. if(!prevDst ||
  4947. !src->IsEqualInternal(prevDst) ||
  4948. !(
  4949. (prevInstr->GetSrc1() && dst->IsEqual(prevInstr->GetSrc1())) ||
  4950. (prevInstr->GetSrc2() && dst->IsEqual(prevInstr->GetSrc2()))
  4951. ))
  4952. {
  4953. break;
  4954. }
  4955. }
  4956. this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
  4957. } while(false);
  4958. }
  4959. }
  4960. this->ValueNumberObjectType(opnd, instr);
  4961. }
  4962. this->CSEAddInstr(this->currentBlock, *pInstr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  4963. return dstVal;
  4964. }
  4965. void
  4966. GlobOpt::CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val)
  4967. {
  4968. if (opnd->IsSymOpnd())
  4969. {
  4970. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  4971. if (symOpnd->m_sym->IsPropertySym())
  4972. {
  4973. PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
  4974. Value *const objectValue = FindValue(originalPropertySym->m_stackSym);
  4975. symOpnd->SetPropertyOwnerValueType(objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  4976. this->FieldHoistOptDst(instr, originalPropertySym, src1Val);
  4977. PropertySym * sym = this->CopyPropPropertySymObj(symOpnd, instr);
  4978. if (sym != originalPropertySym && !this->IsLoopPrePass())
  4979. {
  4980. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  4981. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  4982. this->FieldHoistOptDst(instr, sym, src1Val);
  4983. }
  4984. }
  4985. }
  4986. }
  4987. void
  4988. GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym)
  4989. {
  4990. Value *initialValue;
  4991. StackSym *symStore;
  4992. if (loop->allFieldsKilled || loop->fieldKilled->Test(originalPropertySym->m_id))
  4993. {
  4994. return;
  4995. }
  4996. Assert(!loop->fieldKilled->Test(propertySym->m_id));
  4997. // Value already exists
  4998. if (this->FindValue(propertySym))
  4999. {
  5000. return;
  5001. }
  5002. // If this initial value was already added, we would find in the current value table.
  5003. Assert(!loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue));
  5004. // If propertySym is live in landingPad, we don't need an initial value.
  5005. if (loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  5006. {
  5007. return;
  5008. }
  5009. Value *landingPadObjPtrVal, *currentObjPtrVal;
  5010. landingPadObjPtrVal = this->FindValue(loop->landingPad->globOptData.symToValueMap, propertySym->m_stackSym);
  5011. currentObjPtrVal = this->FindValue(propertySym->m_stackSym);
  5012. if (!currentObjPtrVal || !landingPadObjPtrVal || currentObjPtrVal->GetValueNumber() != landingPadObjPtrVal->GetValueNumber())
  5013. {
  5014. // objPtr has a different value in the landing pad.
  5015. return;
  5016. }
  5017. // The opnd's value type has not yet been initialized. Since the property sym doesn't have a value, it effectively has an
  5018. // Uninitialized value type. Use the profiled value type from the instruction.
  5019. const ValueType profiledValueType =
  5020. instr->IsProfiledInstr() ? instr->AsProfiledInstr()->u.FldInfo().valueType : ValueType::Uninitialized;
  5021. Assert(!profiledValueType.IsDefinite()); // Hence the values created here don't need to be tracked for kills
  5022. initialValue = this->NewGenericValue(profiledValueType, propertySym);
  5023. symStore = StackSym::New(this->func);
  5024. initialValue->GetValueInfo()->SetSymStore(symStore);
  5025. loop->initialValueFieldMap.Add(propertySym, initialValue->Copy(this->alloc, initialValue->GetValueNumber()));
  5026. // Copy the initial value into the landing pad, but without a symStore
  5027. Value *landingPadInitialValue = Value::New(this->alloc, initialValue->GetValueNumber(),
  5028. ValueInfo::New(this->alloc, initialValue->GetValueInfo()->Type()));
  5029. this->SetValue(&(loop->landingPad->globOptData), landingPadInitialValue, propertySym);
  5030. loop->landingPad->globOptData.liveFields->Set(propertySym->m_id);
  5031. #if DBG_DUMP
  5032. if (PHASE_TRACE(Js::FieldPREPhase, this->func))
  5033. {
  5034. Output::Print(_u("** TRACE: Field PRE initial value for loop head #%d. Val:%d symStore:"),
  5035. loop->GetHeadBlock()->GetBlockNum(), initialValue->GetValueNumber());
  5036. symStore->Dump();
  5037. Output::Print(_u("\n Instr: "));
  5038. instr->Dump();
  5039. }
  5040. #endif
  5041. // Add initial value to all the previous blocks in the loop.
  5042. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, this->currentBlock->GetPrev(), loop->GetHeadBlock())
  5043. {
  5044. if (block->GetDataUseCount() == 0)
  5045. {
  5046. // All successor blocks have been processed, no point in adding the value.
  5047. continue;
  5048. }
  5049. Value *newValue = initialValue->Copy(this->alloc, initialValue->GetValueNumber());
  5050. this->SetValue(&(block->globOptData), newValue, propertySym);
  5051. block->globOptData.liveFields->Set(propertySym->m_id);
  5052. this->SetValue(&(block->globOptData), newValue, symStore);
  5053. block->globOptData.liveVarSyms->Set(symStore->m_id);
  5054. } NEXT_BLOCK_BACKWARD_IN_RANGE;
  5055. this->SetValue(&(this->currentBlock->globOptData), initialValue, symStore);
  5056. this->currentBlock->globOptData.liveVarSyms->Set(symStore->m_id);
  5057. this->blockData.liveFields->Set(propertySym->m_id);
  5058. }
  5059. // Examine src, apply copy prop and value number it
  5060. Value*
  5061. GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, IR::IndirOpnd *parentIndirOpnd)
  5062. {
  5063. IR::Instr * &instr = *pInstr;
  5064. Assert(!indirIndexValRef || !*indirIndexValRef);
  5065. Assert(
  5066. parentIndirOpnd
  5067. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5068. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5069. Sym *sym;
  5070. Value *val;
  5071. PropertySym *originalPropertySym = nullptr;
  5072. switch(opnd->GetKind())
  5073. {
  5074. case IR::OpndKindIntConst:
  5075. val = this->GetIntConstantValue(opnd->AsIntConstOpnd()->AsInt32(), instr);
  5076. opnd->SetValueType(val->GetValueInfo()->Type());
  5077. return val;
  5078. case IR::OpndKindInt64Const:
  5079. return nullptr;
  5080. case IR::OpndKindFloatConst:
  5081. {
  5082. const FloatConstType floatValue = opnd->AsFloatConstOpnd()->m_value;
  5083. int32 int32Value;
  5084. if(Js::JavascriptNumber::TryGetInt32Value(floatValue, &int32Value))
  5085. {
  5086. val = GetIntConstantValue(int32Value, instr);
  5087. }
  5088. else
  5089. {
  5090. val = NewFloatConstantValue(floatValue);
  5091. }
  5092. opnd->SetValueType(val->GetValueInfo()->Type());
  5093. return val;
  5094. }
  5095. case IR::OpndKindAddr:
  5096. {
  5097. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  5098. if (addrOpnd->m_isFunction)
  5099. {
  5100. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func), "Fixed function address operand with fixed method calls phase disabled?");
  5101. val = NewFixedFunctionValue((Js::JavascriptFunction *)addrOpnd->m_address, addrOpnd);
  5102. opnd->SetValueType(val->GetValueInfo()->Type());
  5103. return val;
  5104. }
  5105. else if (addrOpnd->IsVar() && Js::TaggedInt::Is(addrOpnd->m_address))
  5106. {
  5107. val = this->GetIntConstantValue(Js::TaggedInt::ToInt32(addrOpnd->m_address), instr);
  5108. opnd->SetValueType(val->GetValueInfo()->Type());
  5109. return val;
  5110. }
  5111. val = this->GetVarConstantValue(addrOpnd);
  5112. return val;
  5113. }
  5114. case IR::OpndKindSym:
  5115. {
  5116. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  5117. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  5118. // value if available, before returning from this function.
  5119. opnd->SetValueType(ValueType::Uninitialized);
  5120. sym = opnd->AsSymOpnd()->m_sym;
  5121. // Don't create a new value for ArgSlots and don't copy prop them away.
  5122. if (sym->IsStackSym() && sym->AsStackSym()->IsArgSlotSym())
  5123. {
  5124. return nullptr;
  5125. }
  5126. // Unless we have profile info, don't create a new value for ArgSlots and don't copy prop them away.
  5127. if (sym->IsStackSym() && sym->AsStackSym()->IsParamSlotSym())
  5128. {
  5129. if (!instr->m_func->IsLoopBody() && instr->m_func->HasProfileInfo())
  5130. {
  5131. // Skip "this" pointer.
  5132. int paramSlotNum = sym->AsStackSym()->GetParamSlotNum() - 2;
  5133. if (paramSlotNum >= 0)
  5134. {
  5135. const auto parameterType = instr->m_func->GetReadOnlyProfileInfo()->GetParameterInfo(static_cast<Js::ArgSlot>(paramSlotNum));
  5136. val = NewGenericValue(parameterType);
  5137. opnd->SetValueType(val->GetValueInfo()->Type());
  5138. return val;
  5139. }
  5140. }
  5141. return nullptr;
  5142. }
  5143. if (!sym->IsPropertySym())
  5144. {
  5145. break;
  5146. }
  5147. originalPropertySym = sym->AsPropertySym();
  5148. Value *const objectValue = FindValue(originalPropertySym->m_stackSym);
  5149. opnd->AsSymOpnd()->SetPropertyOwnerValueType(
  5150. objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  5151. if (!FieldHoistOptSrc(opnd->AsSymOpnd(), instr, originalPropertySym))
  5152. {
  5153. sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
  5154. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  5155. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  5156. if (originalPropertySym == sym || this->IsLoopPrePass() ||
  5157. !FieldHoistOptSrc(opnd->AsSymOpnd(), instr, sym->AsPropertySym()))
  5158. {
  5159. if (!DoFieldCopyProp())
  5160. {
  5161. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5162. {
  5163. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  5164. }
  5165. return nullptr;
  5166. }
  5167. switch (instr->m_opcode)
  5168. {
  5169. // These need the symbolic reference to the field, don't copy prop the value of the field
  5170. case Js::OpCode::DeleteFld:
  5171. case Js::OpCode::DeleteRootFld:
  5172. case Js::OpCode::DeleteFldStrict:
  5173. case Js::OpCode::DeleteRootFldStrict:
  5174. case Js::OpCode::ScopedDeleteFld:
  5175. case Js::OpCode::ScopedDeleteFldStrict:
  5176. case Js::OpCode::LdMethodFromFlags:
  5177. case Js::OpCode::BrOnNoProperty:
  5178. case Js::OpCode::BrOnHasProperty:
  5179. case Js::OpCode::LdMethodFldPolyInlineMiss:
  5180. case Js::OpCode::StSlotChkUndecl:
  5181. return nullptr;
  5182. };
  5183. if (instr->CallsGetter())
  5184. {
  5185. return nullptr;
  5186. }
  5187. if (this->IsLoopPrePass() && this->DoFieldPRE(this->rootLoopPrePass))
  5188. {
  5189. if (!this->prePassLoop->allFieldsKilled && !this->prePassLoop->fieldKilled->Test(sym->m_id))
  5190. {
  5191. this->SetLoopFieldInitialValue(this->rootLoopPrePass, instr, sym->AsPropertySym(), originalPropertySym);
  5192. }
  5193. if (this->IsPREInstrCandidateLoad(instr->m_opcode))
  5194. {
  5195. // Foreach property sym, remember the first instruction that loads it.
  5196. // Can this be done in one call?
  5197. if (!this->prePassInstrMap->ContainsKey(sym->m_id))
  5198. {
  5199. this->prePassInstrMap->AddNew(sym->m_id, instr);
  5200. }
  5201. }
  5202. }
  5203. break;
  5204. }
  5205. }
  5206. // We field hoisted, we can continue as a reg.
  5207. opnd = instr->GetSrc1();
  5208. }
  5209. case IR::OpndKindReg:
  5210. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  5211. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  5212. // value if available, before returning from this function.
  5213. opnd->SetValueType(ValueType::Uninitialized);
  5214. sym = opnd->AsRegOpnd()->m_sym;
  5215. this->MarkTempLastUse(instr, opnd->AsRegOpnd());
  5216. if (sym->AsStackSym()->IsTypeSpec())
  5217. {
  5218. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  5219. }
  5220. break;
  5221. case IR::OpndKindIndir:
  5222. this->OptimizeIndirUses(opnd->AsIndirOpnd(), &instr, indirIndexValRef);
  5223. return nullptr;
  5224. default:
  5225. return nullptr;
  5226. }
  5227. val = this->FindValue(sym);
  5228. if (val)
  5229. {
  5230. Assert(GlobOpt::IsLive(sym, this->currentBlock) || (sym->IsPropertySym()));
  5231. if (instr)
  5232. {
  5233. opnd = this->CopyProp(opnd, instr, val, parentIndirOpnd);
  5234. }
  5235. // Check if we freed the operand.
  5236. if (opnd == nullptr)
  5237. {
  5238. return nullptr;
  5239. }
  5240. // In a loop prepass, determine stack syms that are used before they are defined in the root loop for which the prepass
  5241. // is being done. This information is used to do type specialization conversions in the landing pad where appropriate.
  5242. if(IsLoopPrePass() &&
  5243. sym->IsStackSym() &&
  5244. !rootLoopPrePass->symsUsedBeforeDefined->Test(sym->m_id) &&
  5245. IsLive(sym, &rootLoopPrePass->landingPad->globOptData) && !isAsmJSFunc) // no typespec in asmjs and hence skipping this
  5246. {
  5247. Value *const landingPadValue = FindValue(rootLoopPrePass->landingPad->globOptData.symToValueMap, sym);
  5248. if(landingPadValue && val->GetValueNumber() == landingPadValue->GetValueNumber())
  5249. {
  5250. rootLoopPrePass->symsUsedBeforeDefined->Set(sym->m_id);
  5251. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  5252. if(landingPadValueInfo->IsLikelyNumber())
  5253. {
  5254. rootLoopPrePass->likelyNumberSymsUsedBeforeDefined->Set(sym->m_id);
  5255. if(DoAggressiveIntTypeSpec() ? landingPadValueInfo->IsLikelyInt() : landingPadValueInfo->IsInt())
  5256. {
  5257. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  5258. // specialization is enabled.
  5259. rootLoopPrePass->likelyIntSymsUsedBeforeDefined->Set(sym->m_id);
  5260. }
  5261. }
  5262. #ifdef ENABLE_SIMDJS
  5263. // SIMD_JS
  5264. // For uses before defs, we set likelySimd128*SymsUsedBeforeDefined bits for syms that have landing pad value info that allow type-spec to happen in the loop body.
  5265. // The BV will be added to loop header if the backedge has a live matching type-spec value. We then compensate in the loop header to unbox the value.
  5266. // This allows type-spec in the landing pad instead of boxing/unboxing on each iteration.
  5267. if (Js::IsSimd128Opcode(instr->m_opcode))
  5268. {
  5269. // Simd ops are strongly typed. We type-spec only if the type is likely/Definitely the expected type or if we have object which can come from merging different Simd types.
  5270. // Simd value must be initialized properly on all paths before the loop entry. Cannot be merged with Undefined/Null.
  5271. ThreadContext::SimdFuncSignature funcSignature;
  5272. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, funcSignature);
  5273. Assert(funcSignature.valid);
  5274. ValueType expectedType = funcSignature.args[opnd == instr->GetSrc1() ? 0 : 1];
  5275. if (expectedType.IsSimd128Float32x4())
  5276. {
  5277. if (
  5278. (landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5279. &&
  5280. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  5281. )
  5282. {
  5283. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  5284. }
  5285. }
  5286. else if (expectedType.IsSimd128Int32x4())
  5287. {
  5288. if (
  5289. (landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5290. &&
  5291. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  5292. )
  5293. {
  5294. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  5295. }
  5296. }
  5297. }
  5298. else if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc1() && instr->GetDst()->GetValueType().IsSimd128())
  5299. {
  5300. // Extended_Args for Simd ops are annotated with the expected type by the inliner. Use this info to find out if type-spec is supposed to happen.
  5301. ValueType expectedType = instr->GetDst()->GetValueType();
  5302. if ((landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5303. && expectedType.IsSimd128Float32x4())
  5304. {
  5305. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  5306. }
  5307. else if ((landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5308. && expectedType.IsSimd128Int32x4())
  5309. {
  5310. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  5311. }
  5312. }
  5313. #endif
  5314. }
  5315. }
  5316. }
  5317. else if ((GlobOpt::TransferSrcValue(instr) || OpCodeAttr::CanCSE(instr->m_opcode)) && (opnd == instr->GetSrc1() || opnd == instr->GetSrc2()))
  5318. {
  5319. if (sym->IsPropertySym())
  5320. {
  5321. val = this->CreateFieldSrcValue(sym->AsPropertySym(), originalPropertySym, &opnd, instr);
  5322. }
  5323. else
  5324. {
  5325. val = this->NewGenericValue(ValueType::Uninitialized, opnd);
  5326. }
  5327. }
  5328. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  5329. {
  5330. TryOptimizeInstrWithFixedDataProperty(&instr);
  5331. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  5332. }
  5333. if (val)
  5334. {
  5335. ValueType valueType(val->GetValueInfo()->Type());
  5336. if (valueType.IsLikelyNativeArray() && !valueType.IsObject() && instr->IsProfiledInstr())
  5337. {
  5338. // See if we have profile data for the array type
  5339. IR::ProfiledInstr *const profiledInstr = instr->AsProfiledInstr();
  5340. ValueType profiledArrayType;
  5341. switch(instr->m_opcode)
  5342. {
  5343. case Js::OpCode::LdElemI_A:
  5344. if(instr->GetSrc1()->IsIndirOpnd() && opnd == instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd())
  5345. {
  5346. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  5347. }
  5348. break;
  5349. case Js::OpCode::StElemI_A:
  5350. case Js::OpCode::StElemI_A_Strict:
  5351. case Js::OpCode::StElemC:
  5352. if(instr->GetDst()->IsIndirOpnd() && opnd == instr->GetDst()->AsIndirOpnd()->GetBaseOpnd())
  5353. {
  5354. profiledArrayType = profiledInstr->u.stElemInfo->GetArrayType();
  5355. }
  5356. break;
  5357. case Js::OpCode::LdLen_A:
  5358. if(instr->GetSrc1()->IsRegOpnd() && opnd == instr->GetSrc1())
  5359. {
  5360. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  5361. }
  5362. break;
  5363. }
  5364. if(profiledArrayType.IsLikelyObject() &&
  5365. profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
  5366. (profiledArrayType.HasVarElements() || (valueType.HasIntElements() && profiledArrayType.HasFloatElements())))
  5367. {
  5368. // Merge array type we pulled from profile with type propagated by dataflow.
  5369. valueType = valueType.Merge(profiledArrayType).SetHasNoMissingValues(valueType.HasNoMissingValues());
  5370. ChangeValueType(currentBlock, FindValue(blockData.symToValueMap, opnd->AsRegOpnd()->m_sym), valueType, false);
  5371. }
  5372. }
  5373. opnd->SetValueType(valueType);
  5374. if(!IsLoopPrePass() && opnd->IsSymOpnd() && valueType.IsDefinite())
  5375. {
  5376. if (opnd->AsSymOpnd()->m_sym->IsPropertySym())
  5377. {
  5378. // A property sym can only be guaranteed to have a definite value type when implicit calls are disabled from the
  5379. // point where the sym was defined with the definite value type. Insert an instruction to indicate to the
  5380. // dead-store pass that implicit calls need to be kept disabled until after this instruction.
  5381. Assert(DoFieldCopyProp());
  5382. CaptureNoImplicitCallUses(opnd, false, instr);
  5383. }
  5384. }
  5385. }
  5386. else
  5387. {
  5388. opnd->SetValueType(ValueType::Uninitialized);
  5389. }
  5390. return val;
  5391. }
  5392. /*
  5393. * GlobOpt::TryOptimizeInstrWithFixedDataProperty
  5394. * Converts Ld[Root]Fld instr to
  5395. * * CheckFixedFld
  5396. * * Dst = Ld_A <int Constant value>
  5397. * This API assumes that the source operand is a Sym/PropertySym kind.
  5398. */
  5399. void
  5400. GlobOpt::TryOptimizeInstrWithFixedDataProperty(IR::Instr ** const pInstr)
  5401. {
  5402. Assert(pInstr);
  5403. IR::Instr * &instr = *pInstr;
  5404. IR::Opnd * src1 = instr->GetSrc1();
  5405. Assert(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->IsPropertySymOpnd());
  5406. if(PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func))
  5407. {
  5408. return;
  5409. }
  5410. if (!this->IsLoopPrePass() && !this->isRecursiveCallOnLandingPad &&
  5411. OpCodeAttr::CanLoadFixedFields(instr->m_opcode))
  5412. {
  5413. instr->TryOptimizeInstrWithFixedDataProperty(&instr, this);
  5414. }
  5415. }
  5416. bool
  5417. GlobOpt::TransferSrcValue(IR::Instr * instr)
  5418. {
  5419. // Return whether the instruction transfers a value to the destination.
  5420. // This is used to determine whether we should generate a value for the src so that it will
  5421. // match with the dst for copy prop.
  5422. // No point creating an unknown value for the src of a binary instr, as the dst will just be a different
  5423. // Don't create value for instruction without dst as well. The value doesn't go anywhere.
  5424. // if (src2 == nullptr) Disable copy prop for ScopedLdFld/ScopeStFld, etc., consider enabling that in the future
  5425. // Consider: Add opcode attribute to indicate whether the opcode would use the value or not
  5426. return instr->GetDst() != nullptr && instr->GetSrc2() == nullptr && !OpCodeAttr::DoNotTransfer(instr->m_opcode) && !instr->CallsAccessor();
  5427. }
  5428. Value*
  5429. GlobOpt::FindValue(Sym *sym)
  5430. {
  5431. return FindValue(this->blockData.symToValueMap, sym);
  5432. }
  5433. Value*
  5434. GlobOpt::FindValue(GlobHashTable *valueNumberMap, Sym *sym)
  5435. {
  5436. Assert(valueNumberMap);
  5437. if (sym->IsStackSym() && sym->AsStackSym()->IsTypeSpec())
  5438. {
  5439. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  5440. }
  5441. else if (sym->IsPropertySym())
  5442. {
  5443. return FindPropertyValue(valueNumberMap, sym->m_id);
  5444. }
  5445. if (sym->IsStackSym() && sym->AsStackSym()->IsFromByteCodeConstantTable())
  5446. {
  5447. return this->byteCodeConstantValueArray->Get(sym->m_id);
  5448. }
  5449. else
  5450. {
  5451. return FindValueFromHashTable(valueNumberMap, sym->m_id);
  5452. }
  5453. }
  5454. ValueNumber
  5455. GlobOpt::FindValueNumber(GlobHashTable *valueNumberMap, Sym *sym)
  5456. {
  5457. Value *val = FindValue(valueNumberMap, sym);
  5458. return val->GetValueNumber();
  5459. }
  5460. Value *
  5461. GlobOpt::FindPropertyValue(GlobHashTable *valueNumberMap, SymID symId)
  5462. {
  5463. Assert(this->func->m_symTable->Find(symId)->IsPropertySym());
  5464. if (!this->blockData.liveFields->Test(symId))
  5465. {
  5466. Assert(!IsHoistablePropertySym(symId));
  5467. return nullptr;
  5468. }
  5469. return FindValueFromHashTable(valueNumberMap, symId);
  5470. }
  5471. ValueNumber
  5472. GlobOpt::FindPropertyValueNumber(GlobHashTable *valueNumberMap, SymID symId)
  5473. {
  5474. Value *val = FindPropertyValue(valueNumberMap, symId);
  5475. return val->GetValueNumber();
  5476. }
  5477. Value *
  5478. GlobOpt::FindObjectTypeValue(StackSym* typeSym)
  5479. {
  5480. return FindObjectTypeValue(typeSym, this->blockData.symToValueMap);
  5481. }
  5482. Value *
  5483. GlobOpt::FindObjectTypeValue(StackSym* typeSym, BasicBlock* block)
  5484. {
  5485. return FindObjectTypeValue(typeSym->m_id, block);
  5486. }
  5487. Value *
  5488. GlobOpt::FindObjectTypeValue(SymID typeSymId, BasicBlock* block)
  5489. {
  5490. return FindObjectTypeValue(typeSymId, block->globOptData.symToValueMap, block->globOptData.liveFields);
  5491. }
  5492. Value *
  5493. GlobOpt::FindObjectTypeValue(StackSym* typeSym, GlobHashTable *valueNumberMap)
  5494. {
  5495. return FindObjectTypeValue(typeSym->m_id, valueNumberMap);
  5496. }
  5497. Value *
  5498. GlobOpt::FindObjectTypeValue(SymID typeSymId, GlobHashTable *valueNumberMap)
  5499. {
  5500. return FindObjectTypeValue(typeSymId, valueNumberMap, this->blockData.liveFields);
  5501. }
  5502. Value *
  5503. GlobOpt::FindObjectTypeValue(StackSym* typeSym, GlobHashTable *valueNumberMap, BVSparse<JitArenaAllocator>* liveFields)
  5504. {
  5505. return FindObjectTypeValue(typeSym->m_id, valueNumberMap, liveFields);
  5506. }
  5507. Value *
  5508. GlobOpt::FindObjectTypeValue(SymID typeSymId, GlobHashTable *valueNumberMap, BVSparse<JitArenaAllocator>* liveFields)
  5509. {
  5510. Assert(this->func->m_symTable->Find(typeSymId)->IsStackSym());
  5511. if (!liveFields->Test(typeSymId))
  5512. {
  5513. return nullptr;
  5514. }
  5515. return FindObjectTypeValueNoLivenessCheck(typeSymId, valueNumberMap);
  5516. }
  5517. Value *
  5518. GlobOpt::FindObjectTypeValueNoLivenessCheck(StackSym* typeSym, BasicBlock* block)
  5519. {
  5520. return FindObjectTypeValueNoLivenessCheck(typeSym->m_id, block->globOptData.symToValueMap);
  5521. }
  5522. Value *
  5523. GlobOpt::FindObjectTypeValueNoLivenessCheck(SymID typeSymId, GlobHashTable *valueNumberMap)
  5524. {
  5525. Value* value = FindValueFromHashTable(valueNumberMap, typeSymId);
  5526. Assert(value == nullptr || value->GetValueInfo()->IsJsType());
  5527. return value;
  5528. }
  5529. Value *
  5530. GlobOpt::FindFuturePropertyValue(PropertySym *const propertySym)
  5531. {
  5532. Assert(propertySym);
  5533. // Try a direct lookup based on this sym
  5534. Value *const value = FindValue(propertySym);
  5535. if(value)
  5536. {
  5537. return value;
  5538. }
  5539. if(PHASE_OFF(Js::CopyPropPhase, func))
  5540. {
  5541. // Need to use copy-prop info to backtrack
  5542. return nullptr;
  5543. }
  5544. // Try to get the property object's value
  5545. StackSym *const objectSym = propertySym->m_stackSym;
  5546. Value *objectValue = FindValue(objectSym);
  5547. if(!objectValue)
  5548. {
  5549. if(!objectSym->IsSingleDef())
  5550. {
  5551. return nullptr;
  5552. }
  5553. switch(objectSym->m_instrDef->m_opcode)
  5554. {
  5555. case Js::OpCode::Ld_A:
  5556. case Js::OpCode::LdSlotArr:
  5557. case Js::OpCode::LdSlot:
  5558. // Allow only these op-codes for tracking the object sym's value transfer backwards. Other transfer op-codes
  5559. // could be included here if this function is used in scenarios that need them.
  5560. break;
  5561. default:
  5562. return nullptr;
  5563. }
  5564. // Try to get the property object's value from the src of the definition
  5565. IR::Opnd *const objectTransferSrc = objectSym->m_instrDef->GetSrc1();
  5566. if(!objectTransferSrc)
  5567. {
  5568. return nullptr;
  5569. }
  5570. if(objectTransferSrc->IsRegOpnd())
  5571. {
  5572. objectValue = FindValue(objectTransferSrc->AsRegOpnd()->m_sym);
  5573. }
  5574. else if(objectTransferSrc->IsSymOpnd())
  5575. {
  5576. Sym *const objectTransferSrcSym = objectTransferSrc->AsSymOpnd()->m_sym;
  5577. if(objectTransferSrcSym->IsStackSym())
  5578. {
  5579. objectValue = FindValue(objectTransferSrcSym);
  5580. }
  5581. else
  5582. {
  5583. // About to make a recursive call, so when jitting in the foreground, probe the stack
  5584. if(!func->IsBackgroundJIT())
  5585. {
  5586. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  5587. }
  5588. objectValue = FindFuturePropertyValue(objectTransferSrcSym->AsPropertySym());
  5589. }
  5590. }
  5591. else
  5592. {
  5593. return nullptr;
  5594. }
  5595. if(!objectValue)
  5596. {
  5597. return nullptr;
  5598. }
  5599. }
  5600. // Try to use the property object's copy-prop sym and the property ID to find a mapped property sym, and get its value
  5601. StackSym *const objectCopyPropSym = GetCopyPropSym(nullptr, objectValue);
  5602. if(!objectCopyPropSym)
  5603. {
  5604. return nullptr;
  5605. }
  5606. PropertySym *const propertyCopyPropSym = PropertySym::Find(objectCopyPropSym->m_id, propertySym->m_propertyId, func);
  5607. if(!propertyCopyPropSym)
  5608. {
  5609. return nullptr;
  5610. }
  5611. return FindValue(propertyCopyPropSym);
  5612. }
  5613. Value *
  5614. GlobOpt::FindValueFromHashTable(GlobHashTable *valueNumberMap, SymID symId)
  5615. {
  5616. Value ** valuePtr = valueNumberMap->Get(symId);
  5617. if (valuePtr == nullptr)
  5618. {
  5619. return 0;
  5620. }
  5621. return (*valuePtr);
  5622. }
  5623. StackSym *
  5624. GlobOpt::GetCopyPropSym(Sym * sym, Value * value)
  5625. {
  5626. return GetCopyPropSym(this->currentBlock, sym, value);
  5627. }
  5628. StackSym *
  5629. GlobOpt::GetCopyPropSym(BasicBlock * block, Sym * sym, Value * value)
  5630. {
  5631. ValueInfo *valueInfo = value->GetValueInfo();
  5632. Sym * copySym = valueInfo->GetSymStore();
  5633. if (!copySym)
  5634. {
  5635. return nullptr;
  5636. }
  5637. // Only copy prop stackSym, as a propertySym wouldn't improve anything.
  5638. // SingleDef info isn't flow sensitive, so make sure the symbol is actually live.
  5639. if (copySym->IsStackSym() && copySym != sym)
  5640. {
  5641. Assert(!copySym->AsStackSym()->IsTypeSpec());
  5642. Value *copySymVal = this->FindValue(block->globOptData.symToValueMap, valueInfo->GetSymStore());
  5643. if (copySymVal && copySymVal->GetValueNumber() == value->GetValueNumber())
  5644. {
  5645. if (valueInfo->IsVarConstant() && !GlobOpt::IsLive(copySym, block))
  5646. {
  5647. // Because the addrConstantToValueMap isn't flow-based, the symStore of
  5648. // varConstants may not be live.
  5649. return nullptr;
  5650. }
  5651. return copySym->AsStackSym();
  5652. }
  5653. }
  5654. return nullptr;
  5655. }
  5656. // Constant prop if possible, otherwise if this value already resides in another
  5657. // symbol, reuse this previous symbol. This should help register allocation.
  5658. IR::Opnd *
  5659. GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd)
  5660. {
  5661. Assert(
  5662. parentIndirOpnd
  5663. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5664. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5665. if (this->IsLoopPrePass())
  5666. {
  5667. // Transformations are not legal in prepass...
  5668. return opnd;
  5669. }
  5670. if (!this->func->DoGlobOptsForGeneratorFunc())
  5671. {
  5672. // Don't copy prop in generator functions because non-bytecode temps that span a yield
  5673. // cannot be saved and restored by the current bail-out mechanics utilized by generator
  5674. // yield/resume.
  5675. // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
  5676. return opnd;
  5677. }
  5678. if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
  5679. {
  5680. // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
  5681. return opnd;
  5682. }
  5683. // Don't copy-prop link operands of ExtendedArgs
  5684. if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc2())
  5685. {
  5686. return opnd;
  5687. }
  5688. // Don't copy-prop operand of SIMD instr with ExtendedArg operands. Each instr should have its exclusive EA sequence.
  5689. if (
  5690. Js::IsSimd128Opcode(instr->m_opcode) &&
  5691. instr->GetSrc1() != nullptr &&
  5692. instr->GetSrc1()->IsRegOpnd() &&
  5693. instr->GetSrc2() == nullptr
  5694. )
  5695. {
  5696. StackSym *sym = instr->GetSrc1()->GetStackSym();
  5697. if (sym && sym->IsSingleDef() && sym->GetInstrDef()->m_opcode == Js::OpCode::ExtendArg_A)
  5698. {
  5699. return opnd;
  5700. }
  5701. }
  5702. ValueInfo *valueInfo = val->GetValueInfo();
  5703. // Constant prop?
  5704. int32 intConstantValue;
  5705. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  5706. {
  5707. if (PHASE_OFF(Js::ConstPropPhase, this->func))
  5708. {
  5709. return opnd;
  5710. }
  5711. if ((
  5712. instr->m_opcode == Js::OpCode::StElemI_A ||
  5713. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  5714. instr->m_opcode == Js::OpCode::StElemC
  5715. ) && instr->GetSrc1() == opnd)
  5716. {
  5717. // Disabling prop to src of native array store, because we were losing the chance to type specialize.
  5718. // Is it possible to type specialize this src if we allow constants, etc., to be prop'd here?
  5719. if (instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray())
  5720. {
  5721. return opnd;
  5722. }
  5723. }
  5724. if(opnd != instr->GetSrc1() && opnd != instr->GetSrc2())
  5725. {
  5726. if(PHASE_OFF(Js::IndirCopyPropPhase, instr->m_func))
  5727. {
  5728. return opnd;
  5729. }
  5730. // Const-prop an indir opnd's constant index into its offset
  5731. IR::Opnd *srcs[] = { instr->GetSrc1(), instr->GetSrc2(), instr->GetDst() };
  5732. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]); ++i)
  5733. {
  5734. const auto src = srcs[i];
  5735. if(!src || !src->IsIndirOpnd())
  5736. {
  5737. continue;
  5738. }
  5739. const auto indir = src->AsIndirOpnd();
  5740. if(opnd == indir->GetIndexOpnd())
  5741. {
  5742. Assert(indir->GetScale() == 0);
  5743. GOPT_TRACE_OPND(opnd, _u("Constant prop indir index into offset (value: %d)\n"), intConstantValue);
  5744. this->CaptureByteCodeSymUses(instr);
  5745. indir->SetOffset(intConstantValue);
  5746. indir->SetIndexOpnd(nullptr);
  5747. }
  5748. }
  5749. return opnd;
  5750. }
  5751. if (Js::TaggedInt::IsOverflow(intConstantValue))
  5752. {
  5753. return opnd;
  5754. }
  5755. IR::Opnd *constOpnd;
  5756. if (opnd->IsVar())
  5757. {
  5758. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked((int)intConstantValue), IR::AddrOpndKindConstantVar, instr->m_func);
  5759. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), addrOpnd->m_address, intConstantValue);
  5760. constOpnd = addrOpnd;
  5761. }
  5762. else
  5763. {
  5764. // Note: Jit loop body generates some i32 operands...
  5765. Assert(opnd->IsInt32() || opnd->IsInt64() || opnd->IsUInt32());
  5766. IRType opndType;
  5767. IntConstType constVal;
  5768. if (opnd->IsUInt32())
  5769. {
  5770. // avoid sign extension
  5771. constVal = (uint32)intConstantValue;
  5772. opndType = TyUint32;
  5773. }
  5774. else
  5775. {
  5776. constVal = intConstantValue;
  5777. opndType = TyInt32;
  5778. }
  5779. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(constVal, opndType, instr->m_func);
  5780. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), intOpnd->GetImmediateValue(instr->m_func), intConstantValue);
  5781. constOpnd = intOpnd;
  5782. }
  5783. #if ENABLE_DEBUG_CONFIG_OPTIONS
  5784. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  5785. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  5786. {
  5787. instr->DumpFieldCopyPropTestTrace();
  5788. }
  5789. #endif
  5790. this->CaptureByteCodeSymUses(instr);
  5791. opnd = instr->ReplaceSrc(opnd, constOpnd);
  5792. switch (instr->m_opcode)
  5793. {
  5794. case Js::OpCode::LdSlot:
  5795. case Js::OpCode::LdSlotArr:
  5796. case Js::OpCode::LdFld:
  5797. case Js::OpCode::LdFldForTypeOf:
  5798. case Js::OpCode::LdRootFldForTypeOf:
  5799. case Js::OpCode::LdFldForCallApplyTarget:
  5800. case Js::OpCode::LdRootFld:
  5801. case Js::OpCode::LdMethodFld:
  5802. case Js::OpCode::LdRootMethodFld:
  5803. case Js::OpCode::LdMethodFromFlags:
  5804. case Js::OpCode::ScopedLdMethodFld:
  5805. instr->m_opcode = Js::OpCode::Ld_A;
  5806. case Js::OpCode::Ld_A:
  5807. {
  5808. IR::Opnd * dst = instr->GetDst();
  5809. if (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsSingleDef())
  5810. {
  5811. dst->AsRegOpnd()->m_sym->SetIsIntConst((int)intConstantValue);
  5812. }
  5813. break;
  5814. }
  5815. case Js::OpCode::ArgOut_A:
  5816. case Js::OpCode::ArgOut_A_Inline:
  5817. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  5818. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  5819. if (instr->GetDst()->IsRegOpnd())
  5820. {
  5821. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  5822. instr->GetDst()->AsRegOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  5823. }
  5824. else
  5825. {
  5826. instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  5827. }
  5828. break;
  5829. case Js::OpCode::TypeofElem:
  5830. instr->m_opcode = Js::OpCode::Typeof;
  5831. break;
  5832. case Js::OpCode::StSlotChkUndecl:
  5833. if (instr->GetSrc2() == opnd)
  5834. {
  5835. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  5836. // due to the implicit read for ChkUndecl.
  5837. instr->m_opcode = Js::OpCode::StSlot;
  5838. instr->FreeSrc2();
  5839. opnd = nullptr;
  5840. }
  5841. break;
  5842. }
  5843. return opnd;
  5844. }
  5845. Sym *opndSym = nullptr;
  5846. if (opnd->IsRegOpnd())
  5847. {
  5848. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5849. opndSym = regOpnd->m_sym;
  5850. }
  5851. else if (opnd->IsSymOpnd())
  5852. {
  5853. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  5854. opndSym = symOpnd->m_sym;
  5855. }
  5856. if (!opndSym)
  5857. {
  5858. return opnd;
  5859. }
  5860. if (PHASE_OFF(Js::CopyPropPhase, this->func))
  5861. {
  5862. this->SetSymStoreDirect(valueInfo, opndSym);
  5863. return opnd;
  5864. }
  5865. // We should have dealt with field hoist already
  5866. Assert(!GlobOpt::TransferSrcValue(instr) || !opndSym->IsPropertySym() ||
  5867. !this->IsHoistedPropertySym(opndSym->AsPropertySym()));
  5868. StackSym *copySym = this->GetCopyPropSym(opndSym, val);
  5869. if (copySym != nullptr)
  5870. {
  5871. // Copy prop.
  5872. return CopyPropReplaceOpnd(instr, opnd, copySym, parentIndirOpnd);
  5873. }
  5874. else
  5875. {
  5876. if (valueInfo->GetSymStore() && instr->m_opcode == Js::OpCode::Ld_A && instr->GetDst()->IsRegOpnd()
  5877. && valueInfo->GetSymStore() == instr->GetDst()->AsRegOpnd()->m_sym)
  5878. {
  5879. // Avoid resetting symStore after fieldHoisting:
  5880. // t1 = LdFld field <- set symStore to fieldHoistSym
  5881. // fieldHoistSym = Ld_A t1 <- we're looking at t1 now, but want to copy-prop fieldHoistSym forward
  5882. return opnd;
  5883. }
  5884. this->SetSymStoreDirect(valueInfo, opndSym);
  5885. }
  5886. return opnd;
  5887. }
  5888. IR::Opnd *
  5889. GlobOpt::CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd)
  5890. {
  5891. Assert(
  5892. parentIndirOpnd
  5893. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5894. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5895. Assert(GlobOpt::IsLive(copySym, this->currentBlock));
  5896. IR::RegOpnd *regOpnd;
  5897. StackSym *newSym = copySym;
  5898. GOPT_TRACE_OPND(opnd, _u("Copy prop s%d\n"), newSym->m_id);
  5899. #if ENABLE_DEBUG_CONFIG_OPTIONS
  5900. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  5901. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  5902. {
  5903. instr->DumpFieldCopyPropTestTrace();
  5904. }
  5905. #endif
  5906. this->CaptureByteCodeSymUses(instr);
  5907. if (opnd->IsRegOpnd())
  5908. {
  5909. regOpnd = opnd->AsRegOpnd();
  5910. regOpnd->m_sym = newSym;
  5911. regOpnd->SetIsJITOptimizedReg(true);
  5912. // The dead bit on the opnd is specific to the sym it is referencing. Since we replaced the sym, the bit is reset.
  5913. regOpnd->SetIsDead(false);
  5914. if(parentIndirOpnd)
  5915. {
  5916. return regOpnd;
  5917. }
  5918. }
  5919. else
  5920. {
  5921. // If this is an object type specialized field load inside a loop, and it produces a type value which wasn't live
  5922. // before, make sure the type check is left in the loop, because it may be the last type check in the loop protecting
  5923. // other fields which are not hoistable and are lexically upstream in the loop. If the check is not ultimately
  5924. // needed, the dead store pass will remove it.
  5925. if (this->currentBlock->loop != nullptr && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  5926. {
  5927. IR::PropertySymOpnd* propertySymOpnd = opnd->AsPropertySymOpnd();
  5928. if (CheckIfPropOpEmitsTypeCheck(instr, propertySymOpnd))
  5929. {
  5930. // We only set guarded properties in the dead store pass, so they shouldn't be set here yet. If they were
  5931. // we would need to move them from this operand to the operand which is being copy propagated.
  5932. Assert(propertySymOpnd->GetGuardedPropOps() == nullptr);
  5933. // We're creating a copy of this operand to be reused in the same spot in the flow, so we can copy all
  5934. // flow sensitive fields. However, we will do only a type check here (no property access) and only for
  5935. // the sake of downstream instructions, so the flags pertaining to this property access are irrelevant.
  5936. IR::PropertySymOpnd* checkObjTypeOpnd = CreateOpndForTypeCheckOnly(propertySymOpnd, instr->m_func);
  5937. IR::Instr* checkObjTypeInstr = IR::Instr::New(Js::OpCode::CheckObjType, instr->m_func);
  5938. checkObjTypeInstr->SetSrc1(checkObjTypeOpnd);
  5939. checkObjTypeInstr->SetByteCodeOffset(instr);
  5940. instr->InsertBefore(checkObjTypeInstr);
  5941. // Since we inserted this instruction before the one that is being processed in natural flow, we must process
  5942. // it for object type spec explicitly here.
  5943. FinishOptPropOp(checkObjTypeInstr, checkObjTypeOpnd);
  5944. Assert(!propertySymOpnd->IsTypeChecked());
  5945. checkObjTypeInstr = this->SetTypeCheckBailOut(checkObjTypeOpnd, checkObjTypeInstr, nullptr);
  5946. Assert(checkObjTypeInstr->HasBailOutInfo());
  5947. if (this->currentBlock->loop && !this->IsLoopPrePass())
  5948. {
  5949. // Try hoisting this checkObjType.
  5950. // But since this isn't the current instr being optimized, we need to play tricks with
  5951. // the byteCodeUse fields...
  5952. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  5953. PropertySym * currentPropertySymUse = this->propertySymUse;
  5954. PropertySym * tempPropertySymUse = NULL;
  5955. this->byteCodeUses = NULL;
  5956. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  5957. #if DBG
  5958. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  5959. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  5960. #endif
  5961. this->propertySymUse = NULL;
  5962. GlobOpt::TrackByteCodeSymUsed(checkObjTypeInstr, tempByteCodeUse, &tempPropertySymUse);
  5963. TryHoistInvariant(checkObjTypeInstr, this->currentBlock, NULL, this->FindValue(copySym), NULL, true);
  5964. this->byteCodeUses = currentBytecodeUses;
  5965. this->propertySymUse = currentPropertySymUse;
  5966. #if DBG
  5967. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  5968. #endif
  5969. }
  5970. }
  5971. }
  5972. if (opnd->IsSymOpnd() && opnd->GetIsDead())
  5973. {
  5974. // Take the property sym out of the live fields set
  5975. this->EndFieldLifetime(opnd->AsSymOpnd());
  5976. }
  5977. regOpnd = IR::RegOpnd::New(newSym, opnd->GetType(), instr->m_func);
  5978. regOpnd->SetIsJITOptimizedReg(true);
  5979. instr->ReplaceSrc(opnd, regOpnd);
  5980. }
  5981. switch (instr->m_opcode)
  5982. {
  5983. case Js::OpCode::Ld_A:
  5984. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  5985. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  5986. {
  5987. this->InsertByteCodeUses(instr, true);
  5988. instr->m_opcode = Js::OpCode::Nop;
  5989. }
  5990. break;
  5991. case Js::OpCode::LdSlot:
  5992. case Js::OpCode::LdSlotArr:
  5993. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  5994. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  5995. {
  5996. this->InsertByteCodeUses(instr, true);
  5997. instr->m_opcode = Js::OpCode::Nop;
  5998. }
  5999. else
  6000. {
  6001. instr->m_opcode = Js::OpCode::Ld_A;
  6002. }
  6003. break;
  6004. case Js::OpCode::StSlotChkUndecl:
  6005. if (instr->GetSrc2()->IsRegOpnd())
  6006. {
  6007. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  6008. // due to the implicit read for ChkUndecl.
  6009. instr->m_opcode = Js::OpCode::StSlot;
  6010. instr->FreeSrc2();
  6011. return nullptr;
  6012. }
  6013. break;
  6014. case Js::OpCode::LdFld:
  6015. case Js::OpCode::LdFldForTypeOf:
  6016. case Js::OpCode::LdRootFldForTypeOf:
  6017. case Js::OpCode::LdFldForCallApplyTarget:
  6018. case Js::OpCode::LdRootFld:
  6019. case Js::OpCode::LdMethodFld:
  6020. case Js::OpCode::LdRootMethodFld:
  6021. case Js::OpCode::ScopedLdMethodFld:
  6022. instr->m_opcode = Js::OpCode::Ld_A;
  6023. break;
  6024. case Js::OpCode::LdMethodFromFlags:
  6025. // The bailout is checked on the loop top and we don't need to check bailout again in loop.
  6026. instr->m_opcode = Js::OpCode::Ld_A;
  6027. instr->ClearBailOutInfo();
  6028. break;
  6029. case Js::OpCode::TypeofElem:
  6030. instr->m_opcode = Js::OpCode::Typeof;
  6031. break;
  6032. }
  6033. this->MarkTempLastUse(instr, regOpnd);
  6034. return regOpnd;
  6035. }
  6036. void
  6037. GlobOpt::MarkTempLastUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  6038. {
  6039. if (OpCodeAttr::NonTempNumberSources(instr->m_opcode))
  6040. {
  6041. // Turn off bit if opcode could cause the src to be aliased.
  6042. this->blockData.isTempSrc->Clear(regOpnd->m_sym->m_id);
  6043. }
  6044. else if (this->blockData.isTempSrc->Test(regOpnd->m_sym->m_id))
  6045. {
  6046. // We just mark things that are temp in the globopt phase.
  6047. // The backwards phase will turn this off if it is not the last use.
  6048. // The isTempSrc is freed at the end of each block, which is why the backwards phase can't
  6049. // just use it.
  6050. if (!PHASE_OFF(Js::BackwardPhase, this->func) && !this->IsLoopPrePass())
  6051. {
  6052. regOpnd->m_isTempLastUse = true;
  6053. }
  6054. }
  6055. }
  6056. ValueNumber
  6057. GlobOpt::NewValueNumber()
  6058. {
  6059. ValueNumber valueNumber = this->currentValue++;
  6060. if (valueNumber == 0)
  6061. {
  6062. Js::Throw::OutOfMemory();
  6063. }
  6064. return valueNumber;
  6065. }
  6066. Value *GlobOpt::NewValue(ValueInfo *const valueInfo)
  6067. {
  6068. return NewValue(NewValueNumber(), valueInfo);
  6069. }
  6070. Value *GlobOpt::NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo)
  6071. {
  6072. Assert(valueInfo);
  6073. return Value::New(alloc, valueNumber, valueInfo);
  6074. }
  6075. Value *GlobOpt::CopyValue(Value *const value)
  6076. {
  6077. return CopyValue(value, NewValueNumber());
  6078. }
  6079. Value *GlobOpt::CopyValue(Value *const value, const ValueNumber valueNumber)
  6080. {
  6081. Assert(value);
  6082. return value->Copy(alloc, valueNumber);
  6083. }
  6084. Value *
  6085. GlobOpt::NewGenericValue(const ValueType valueType)
  6086. {
  6087. return NewGenericValue(valueType, static_cast<IR::Opnd *>(nullptr));
  6088. }
  6089. Value *
  6090. GlobOpt::NewGenericValue(const ValueType valueType, IR::Opnd *const opnd)
  6091. {
  6092. // Shouldn't assign a likely-int value to something that is definitely not an int
  6093. Assert(!(valueType.IsLikelyInt() && opnd && opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->m_isNotInt));
  6094. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  6095. Value *val = NewValue(valueInfo);
  6096. TrackNewValueForKills(val);
  6097. this->InsertNewValue(val, opnd);
  6098. return val;
  6099. }
  6100. Value *
  6101. GlobOpt::NewGenericValue(const ValueType valueType, Sym *const sym)
  6102. {
  6103. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  6104. Value *val = NewValue(valueInfo);
  6105. TrackNewValueForKills(val);
  6106. this->SetValue(&this->blockData, val, sym);
  6107. return val;
  6108. }
  6109. Value *
  6110. GlobOpt::GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  6111. {
  6112. Value *value = nullptr;
  6113. Value *const cachedValue = this->intConstantToValueMap->Lookup(intConst, nullptr);
  6114. if(cachedValue)
  6115. {
  6116. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6117. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6118. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6119. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6120. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6121. // Otherwise, we have to create a new value with a new value number.
  6122. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  6123. if (symStore && IsLive(symStore, &blockData))
  6124. {
  6125. Value *const symStoreValue = FindValue(symStore);
  6126. int32 symStoreIntConstantValue;
  6127. if (symStoreValue &&
  6128. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  6129. symStoreValue->GetValueInfo()->TryGetIntConstantValue(&symStoreIntConstantValue) &&
  6130. symStoreIntConstantValue == intConst)
  6131. {
  6132. value = symStoreValue;
  6133. }
  6134. }
  6135. }
  6136. if (!value)
  6137. {
  6138. value = NewIntConstantValue(intConst, instr, !Js::TaggedInt::IsOverflow(intConst));
  6139. }
  6140. return this->InsertNewValue(value, opnd);
  6141. }
  6142. Value *
  6143. GlobOpt::NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable)
  6144. {
  6145. Value * value = NewValue(IntConstantValueInfo::New(this->alloc, intConst));
  6146. this->intConstantToValueMap->Item(intConst, value);
  6147. if (isTaggable &&
  6148. !PHASE_OFF(Js::HoistConstIntPhase, this->func))
  6149. {
  6150. // When creating a new int constant value, make sure it gets a symstore. If the int const doesn't have a symstore,
  6151. // any downstream instruction using the same int will have to create a new value (object) for the int.
  6152. // This gets in the way of CSE.
  6153. value = HoistConstantLoadAndPropagateValueBackward(Js::TaggedInt::ToVarUnchecked(intConst), instr, value);
  6154. if (!value->GetValueInfo()->GetSymStore() &&
  6155. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  6156. {
  6157. StackSym * sym = instr->GetDst()->GetStackSym();
  6158. Assert(sym);
  6159. if (sym->IsTypeSpec())
  6160. {
  6161. Assert(sym->IsInt32());
  6162. StackSym * varSym = sym->GetVarEquivSym(instr->m_func);
  6163. SetValue(&this->currentBlock->globOptData, value, varSym);
  6164. this->currentBlock->globOptData.liveInt32Syms->Set(varSym->m_id);
  6165. }
  6166. else
  6167. {
  6168. SetValue(&this->currentBlock->globOptData, value, sym);
  6169. this->currentBlock->globOptData.liveVarSyms->Set(sym->m_id);
  6170. }
  6171. }
  6172. }
  6173. return value;
  6174. }
  6175. ValueInfo *
  6176. GlobOpt::NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout)
  6177. {
  6178. if (min == max)
  6179. {
  6180. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  6181. // 'wasNegativeZeroPreventedByBailout'
  6182. return IntConstantValueInfo::New(this->alloc, max);
  6183. }
  6184. return IntRangeValueInfo::New(this->alloc, min, max, wasNegativeZeroPreventedByBailout);
  6185. }
  6186. ValueInfo *GlobOpt::NewIntRangeValueInfo(
  6187. const ValueInfo *const originalValueInfo,
  6188. const int32 min,
  6189. const int32 max) const
  6190. {
  6191. Assert(originalValueInfo);
  6192. ValueInfo *valueInfo;
  6193. if(min == max)
  6194. {
  6195. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  6196. // 'wasNegativeZeroPreventedByBailout'
  6197. valueInfo = IntConstantValueInfo::New(alloc, min);
  6198. }
  6199. else
  6200. {
  6201. valueInfo =
  6202. IntRangeValueInfo::New(
  6203. alloc,
  6204. min,
  6205. max,
  6206. min <= 0 && max >= 0 && originalValueInfo->WasNegativeZeroPreventedByBailout());
  6207. }
  6208. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  6209. return valueInfo;
  6210. }
  6211. Value *
  6212. GlobOpt::NewIntRangeValue(
  6213. const int32 min,
  6214. const int32 max,
  6215. const bool wasNegativeZeroPreventedByBailout,
  6216. IR::Opnd *const opnd)
  6217. {
  6218. ValueInfo *valueInfo = this->NewIntRangeValueInfo(min, max, wasNegativeZeroPreventedByBailout);
  6219. Value *val = NewValue(valueInfo);
  6220. if (opnd)
  6221. {
  6222. GOPT_TRACE_OPND(opnd, _u("Range %d (0x%X) to %d (0x%X)\n"), min, min, max, max);
  6223. }
  6224. this->InsertNewValue(val, opnd);
  6225. return val;
  6226. }
  6227. IntBoundedValueInfo *GlobOpt::NewIntBoundedValueInfo(
  6228. const ValueInfo *const originalValueInfo,
  6229. const IntBounds *const bounds) const
  6230. {
  6231. Assert(originalValueInfo);
  6232. bounds->Verify();
  6233. IntBoundedValueInfo *const valueInfo =
  6234. IntBoundedValueInfo::New(
  6235. originalValueInfo->Type(),
  6236. bounds,
  6237. (
  6238. bounds->ConstantLowerBound() <= 0 &&
  6239. bounds->ConstantUpperBound() >= 0 &&
  6240. originalValueInfo->WasNegativeZeroPreventedByBailout()
  6241. ),
  6242. alloc);
  6243. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  6244. return valueInfo;
  6245. }
  6246. Value *GlobOpt::NewIntBoundedValue(
  6247. const ValueType valueType,
  6248. const IntBounds *const bounds,
  6249. const bool wasNegativeZeroPreventedByBailout,
  6250. IR::Opnd *const opnd)
  6251. {
  6252. Value *const value = NewValue(IntBoundedValueInfo::New(valueType, bounds, wasNegativeZeroPreventedByBailout, alloc));
  6253. InsertNewValue(value, opnd);
  6254. return value;
  6255. }
  6256. Value *
  6257. GlobOpt::NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd)
  6258. {
  6259. FloatConstantValueInfo *valueInfo = FloatConstantValueInfo::New(this->alloc, floatValue);
  6260. Value *val = NewValue(valueInfo);
  6261. this->InsertNewValue(val, opnd);
  6262. return val;
  6263. }
  6264. Value *
  6265. GlobOpt::GetVarConstantValue(IR::AddrOpnd *addrOpnd)
  6266. {
  6267. bool isVar = addrOpnd->IsVar();
  6268. bool isString = isVar && addrOpnd->m_localAddress && JITJavascriptString::Is(addrOpnd->m_localAddress);
  6269. Value *val = nullptr;
  6270. Value *cachedValue;
  6271. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  6272. {
  6273. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6274. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6275. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6276. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6277. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6278. // Otherwise, we have to create a new value with a new value number.
  6279. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6280. if(symStore && IsLive(symStore, &blockData))
  6281. {
  6282. Value *const symStoreValue = FindValue(symStore);
  6283. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6284. {
  6285. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6286. if(symStoreValueInfo->IsVarConstant() && symStoreValueInfo->AsVarConstant()->VarValue() == addrOpnd->m_address)
  6287. {
  6288. val = symStoreValue;
  6289. }
  6290. }
  6291. }
  6292. }
  6293. else if (isString)
  6294. {
  6295. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  6296. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  6297. if (this->stringConstantToValueMap->TryGetValue(internalString, &cachedValue))
  6298. {
  6299. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6300. if (symStore && IsLive(symStore, &blockData))
  6301. {
  6302. Value *const symStoreValue = FindValue(symStore);
  6303. if (symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6304. {
  6305. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6306. if (symStoreValueInfo->IsVarConstant())
  6307. {
  6308. JITJavascriptString * cachedString = JITJavascriptString::FromVar(symStoreValue->GetValueInfo()->AsVarConstant()->VarValue(true));
  6309. Js::InternalString cachedInternalString(cachedString->GetString(), cachedString->GetLength());
  6310. if (Js::InternalStringComparer::Equals(internalString, cachedInternalString))
  6311. {
  6312. val = symStoreValue;
  6313. }
  6314. }
  6315. }
  6316. }
  6317. }
  6318. }
  6319. if(!val)
  6320. {
  6321. val = NewVarConstantValue(addrOpnd, isString);
  6322. }
  6323. addrOpnd->SetValueType(val->GetValueInfo()->Type());
  6324. return val;
  6325. }
  6326. Value *
  6327. GlobOpt::NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString)
  6328. {
  6329. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, addrOpnd->m_address, addrOpnd->GetValueType(), false, addrOpnd->m_localAddress);
  6330. Value * value = NewValue(valueInfo);
  6331. this->addrConstantToValueMap->Item(addrOpnd->m_address, value);
  6332. if (isString)
  6333. {
  6334. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  6335. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  6336. this->stringConstantToValueMap->Item(internalString, value);
  6337. }
  6338. return value;
  6339. }
  6340. Value *
  6341. GlobOpt::HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value)
  6342. {
  6343. if (this->IsLoopPrePass() ||
  6344. ((this->currentBlock == this->func->m_fg->blockList) &&
  6345. TransferSrcValue(origInstr)))
  6346. {
  6347. return value;
  6348. }
  6349. // Only hoisting taggable int const loads for now. Could be extended to other constants (floats, strings, addr opnds) if we see some benefit.
  6350. Assert(Js::TaggedInt::Is(varConst));
  6351. // Insert a load of the constant at the top of the function
  6352. StackSym * dstSym = StackSym::New(this->func);
  6353. IR::RegOpnd * constRegOpnd = IR::RegOpnd::New(dstSym, TyVar, this->func);
  6354. IR::Instr * loadInstr = IR::Instr::NewConstantLoad(constRegOpnd, (intptr_t)varConst, ValueType::GetInt(true), this->func);
  6355. this->func->m_fg->blockList->GetFirstInstr()->InsertAfter(loadInstr);
  6356. // Type-spec the load (Support for floats needs to be added when we start hoisting float constants).
  6357. bool typeSpecedToInt = false;
  6358. if (Js::TaggedInt::Is(varConst) && !IsTypeSpecPhaseOff(this->func))
  6359. {
  6360. typeSpecedToInt = true;
  6361. loadInstr->m_opcode = Js::OpCode::Ld_I4;
  6362. ToInt32Dst(loadInstr, loadInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  6363. loadInstr->GetDst()->GetStackSym()->SetIsConst();
  6364. }
  6365. else
  6366. {
  6367. this->currentBlock->globOptData.liveVarSyms->Set(dstSym->m_id);
  6368. }
  6369. // Add the value (object) to the current block's symToValueMap and propagate the value backward to all relevant blocks so it is available on merges.
  6370. value = this->InsertNewValue(value, constRegOpnd);
  6371. BVSparse<JitArenaAllocator>* GlobOptBlockData::*bv;
  6372. bv = typeSpecedToInt ? &GlobOptBlockData::liveInt32Syms : &GlobOptBlockData::liveVarSyms; // Will need to be expanded when we start hoisting float constants.
  6373. if (this->currentBlock != this->func->m_fg->blockList)
  6374. {
  6375. for (InvariantBlockBackwardIterator it(this, this->currentBlock, this->func->m_fg->blockList, nullptr);
  6376. it.IsValid();
  6377. it.MoveNext())
  6378. {
  6379. BasicBlock * block = it.Block();
  6380. (block->globOptData.*bv)->Set(dstSym->m_id);
  6381. Assert(!FindValue(block->globOptData.symToValueMap, dstSym));
  6382. Value *const valueCopy = CopyValue(value, value->GetValueNumber());
  6383. SetValue(&block->globOptData, valueCopy, dstSym);
  6384. }
  6385. }
  6386. return value;
  6387. }
  6388. Value *
  6389. GlobOpt::NewFixedFunctionValue(Js::JavascriptFunction *function, IR::AddrOpnd *addrOpnd)
  6390. {
  6391. Assert(function != nullptr);
  6392. Value *val = nullptr;
  6393. Value *cachedValue;
  6394. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  6395. {
  6396. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6397. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6398. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6399. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6400. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6401. // Otherwise, we have to create a new value with a new value number.
  6402. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6403. if(symStore && IsLive(symStore, &blockData))
  6404. {
  6405. Value *const symStoreValue = FindValue(symStore);
  6406. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6407. {
  6408. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6409. if(symStoreValueInfo->IsVarConstant())
  6410. {
  6411. VarConstantValueInfo *const symStoreVarConstantValueInfo = symStoreValueInfo->AsVarConstant();
  6412. if(symStoreVarConstantValueInfo->VarValue() == addrOpnd->m_address &&
  6413. symStoreVarConstantValueInfo->IsFunction())
  6414. {
  6415. val = symStoreValue;
  6416. }
  6417. }
  6418. }
  6419. }
  6420. }
  6421. if(!val)
  6422. {
  6423. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, function, addrOpnd->GetValueType(), true, addrOpnd->m_localAddress);
  6424. val = NewValue(valueInfo);
  6425. this->addrConstantToValueMap->AddNew(addrOpnd->m_address, val);
  6426. }
  6427. this->InsertNewValue(val, addrOpnd);
  6428. return val;
  6429. }
  6430. Value *
  6431. GlobOpt::InsertNewValue(Value *val, IR::Opnd *opnd)
  6432. {
  6433. return this->InsertNewValue(&this->blockData, val, opnd);
  6434. }
  6435. Value *
  6436. GlobOpt::InsertNewValue(GlobOptBlockData *blockData, Value *val, IR::Opnd *opnd)
  6437. {
  6438. return this->SetValue(blockData, val, opnd);
  6439. }
  6440. void
  6441. GlobOpt::SetValueToHashTable(GlobHashTable *valueNumberMap, Value *val, Sym *sym)
  6442. {
  6443. Value **pValue = valueNumberMap->FindOrInsertNew(sym);
  6444. *pValue = val;
  6445. }
  6446. StackSym *GlobOpt::GetTaggedIntConstantStackSym(const int32 intConstantValue) const
  6447. {
  6448. Assert(!Js::TaggedInt::IsOverflow(intConstantValue));
  6449. return intConstantToStackSymMap->Lookup(intConstantValue, nullptr);
  6450. }
  6451. StackSym *GlobOpt::GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const
  6452. {
  6453. StackSym *stackSym = GetTaggedIntConstantStackSym(intConstantValue);
  6454. if(stackSym)
  6455. {
  6456. return stackSym;
  6457. }
  6458. stackSym = StackSym::New(TyVar,func);
  6459. intConstantToStackSymMap->Add(intConstantValue, stackSym);
  6460. return stackSym;
  6461. }
  6462. Sym *
  6463. GlobOpt::SetSymStore(ValueInfo *valueInfo, Sym *sym)
  6464. {
  6465. if (sym->IsStackSym())
  6466. {
  6467. StackSym *stackSym = sym->AsStackSym();
  6468. if (stackSym->IsTypeSpec())
  6469. {
  6470. stackSym = stackSym->GetVarEquivSym(this->func);
  6471. sym = stackSym;
  6472. }
  6473. }
  6474. if (valueInfo->GetSymStore() == nullptr || valueInfo->GetSymStore()->IsPropertySym())
  6475. {
  6476. SetSymStoreDirect(valueInfo, sym);
  6477. }
  6478. return sym;
  6479. }
  6480. void
  6481. GlobOpt::SetSymStoreDirect(ValueInfo * valueInfo, Sym * sym)
  6482. {
  6483. Sym * prevSymStore = valueInfo->GetSymStore();
  6484. if (prevSymStore && prevSymStore->IsStackSym() &&
  6485. prevSymStore->AsStackSym()->HasByteCodeRegSlot())
  6486. {
  6487. this->SetChangedSym(prevSymStore->m_id);
  6488. }
  6489. valueInfo->SetSymStore(sym);
  6490. }
  6491. void
  6492. GlobOpt::SetChangedSym(SymID symId)
  6493. {
  6494. // this->currentBlock might not be the one which contain the changing symId,
  6495. // like hoisting invariant, but more changed symId is overly conservative and safe.
  6496. // symId in the hoisted to block is marked as JITOptimizedReg so it does't affect bailout.
  6497. GlobOptBlockData * globOptData = &this->currentBlock->globOptData;
  6498. if (globOptData->changedSyms)
  6499. {
  6500. globOptData = &this->currentBlock->globOptData;
  6501. globOptData->changedSyms->Set(symId);
  6502. if (globOptData->capturedValuesCandidate != nullptr)
  6503. {
  6504. this->changedSymsAfterIncBailoutCandidate->Set(symId);
  6505. }
  6506. }
  6507. // else could be hit only in MergeValues and it is handled by MergeCapturedValues
  6508. }
  6509. void
  6510. GlobOpt::SetValue(GlobOptBlockData *blockData, Value *val, Sym * sym)
  6511. {
  6512. ValueInfo *valueInfo = val->GetValueInfo();
  6513. sym = this->SetSymStore(valueInfo, sym);
  6514. bool isStackSym = sym->IsStackSym();
  6515. if (isStackSym && sym->AsStackSym()->IsFromByteCodeConstantTable())
  6516. {
  6517. // Put the constants in a global array. This will minimize the per-block info.
  6518. this->byteCodeConstantValueArray->Set(sym->m_id, val);
  6519. this->byteCodeConstantValueNumbersBv->Set(val->GetValueNumber());
  6520. }
  6521. else
  6522. {
  6523. SetValueToHashTable(blockData->symToValueMap, val, sym);
  6524. if (isStackSym && sym->AsStackSym()->HasByteCodeRegSlot())
  6525. {
  6526. this->SetChangedSym(sym->m_id);
  6527. }
  6528. }
  6529. }
  6530. Value *
  6531. GlobOpt::SetValue(GlobOptBlockData *blockData, Value *val, IR::Opnd *opnd)
  6532. {
  6533. if (opnd)
  6534. {
  6535. Sym *sym;
  6536. switch (opnd->GetKind())
  6537. {
  6538. case IR::OpndKindSym:
  6539. sym = opnd->AsSymOpnd()->m_sym;
  6540. break;
  6541. case IR::OpndKindReg:
  6542. sym = opnd->AsRegOpnd()->m_sym;
  6543. break;
  6544. default:
  6545. sym = nullptr;
  6546. }
  6547. if (sym)
  6548. {
  6549. SetValue(blockData, val, sym);
  6550. }
  6551. }
  6552. return val;
  6553. }
  6554. // Figure out the Value of this dst.
  6555. Value *
  6556. GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
  6557. {
  6558. IR::Instr *&instr = *pInstr;
  6559. IR::Opnd *dst = instr->GetDst();
  6560. Value *dstVal = nullptr;
  6561. Sym *sym;
  6562. if (instr->CallsSetter())
  6563. {
  6564. return nullptr;
  6565. }
  6566. if (dst == nullptr)
  6567. {
  6568. return nullptr;
  6569. }
  6570. switch (dst->GetKind())
  6571. {
  6572. case IR::OpndKindSym:
  6573. sym = dst->AsSymOpnd()->m_sym;
  6574. break;
  6575. case IR::OpndKindReg:
  6576. sym = dst->AsRegOpnd()->m_sym;
  6577. if (OpCodeAttr::TempNumberProducing(instr->m_opcode))
  6578. {
  6579. this->blockData.isTempSrc->Set(sym->m_id);
  6580. }
  6581. else if (OpCodeAttr::TempNumberTransfer(instr->m_opcode))
  6582. {
  6583. IR::Opnd *src1 = instr->GetSrc1();
  6584. if (src1->IsRegOpnd() && this->blockData.isTempSrc->Test(src1->AsRegOpnd()->m_sym->m_id))
  6585. {
  6586. StackSym *src1Sym = src1->AsRegOpnd()->m_sym;
  6587. // isTempSrc is used for marking isTempLastUse, which is used to generate AddLeftDead()
  6588. // calls instead of the normal Add helpers. It tells the runtime that concats can use string
  6589. // builders.
  6590. // We need to be careful in the case where src1 points to a string builder and is getting aliased.
  6591. // Clear the bit on src and dst of the transfer instr in this case, unless we can prove src1
  6592. // isn't pointing at a string builder, like if it is single def and the def instr is not an Add,
  6593. // but TempProducing.
  6594. if (src1Sym->IsSingleDef() && src1Sym->m_instrDef->m_opcode != Js::OpCode::Add_A
  6595. && OpCodeAttr::TempNumberProducing(src1Sym->m_instrDef->m_opcode))
  6596. {
  6597. this->blockData.isTempSrc->Set(sym->m_id);
  6598. }
  6599. else
  6600. {
  6601. this->blockData.isTempSrc->Clear(src1->AsRegOpnd()->m_sym->m_id);
  6602. this->blockData.isTempSrc->Clear(sym->m_id);
  6603. }
  6604. }
  6605. else
  6606. {
  6607. this->blockData.isTempSrc->Clear(sym->m_id);
  6608. }
  6609. }
  6610. else
  6611. {
  6612. this->blockData.isTempSrc->Clear(sym->m_id);
  6613. }
  6614. break;
  6615. case IR::OpndKindIndir:
  6616. return nullptr;
  6617. default:
  6618. return nullptr;
  6619. }
  6620. int32 min1, max1, min2, max2, newMin, newMax;
  6621. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  6622. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  6623. switch (instr->m_opcode)
  6624. {
  6625. case Js::OpCode::Conv_PrimStr:
  6626. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  6627. "Creator of this instruction should have set the type");
  6628. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsPrimitive())
  6629. {
  6630. break;
  6631. }
  6632. instr->m_opcode = Js::OpCode::Conv_Str;
  6633. // fall-through
  6634. case Js::OpCode::Conv_Str:
  6635. // This opcode is commented out since we don't track regex information in GlobOpt now.
  6636. //case Js::OpCode::Coerce_Regex:
  6637. case Js::OpCode::Coerce_Str:
  6638. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  6639. "Creator of this instruction should have set the type");
  6640. // fall-through
  6641. case Js::OpCode::Coerce_StrOrRegex:
  6642. // We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
  6643. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString())
  6644. {
  6645. break;
  6646. }
  6647. instr->m_opcode = Js::OpCode::Ld_A;
  6648. // fall-through
  6649. case Js::OpCode::BytecodeArgOutCapture:
  6650. case Js::OpCode::InitConst:
  6651. case Js::OpCode::LdAsmJsFunc:
  6652. case Js::OpCode::Ld_A:
  6653. case Js::OpCode::Ld_I4:
  6654. // Propagate sym attributes across the reg copy.
  6655. if (!this->IsLoopPrePass() && instr->GetSrc1()->IsRegOpnd())
  6656. {
  6657. if (dst->AsRegOpnd()->m_sym->IsSingleDef())
  6658. {
  6659. dst->AsRegOpnd()->m_sym->CopySymAttrs(instr->GetSrc1()->AsRegOpnd()->m_sym);
  6660. }
  6661. }
  6662. if (instr->IsProfiledInstr())
  6663. {
  6664. const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  6665. if(!(
  6666. profiledValueType.IsLikelyInt() &&
  6667. (
  6668. (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) ||
  6669. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  6670. )
  6671. ))
  6672. {
  6673. if(!src1ValueInfo)
  6674. {
  6675. dstVal = this->NewGenericValue(profiledValueType, dst);
  6676. }
  6677. else if(src1ValueInfo->IsUninitialized())
  6678. {
  6679. if(IsLoopPrePass())
  6680. {
  6681. dstVal = this->NewGenericValue(profiledValueType, dst);
  6682. }
  6683. else
  6684. {
  6685. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  6686. // can improve the original value type.
  6687. src1ValueInfo->Type() = profiledValueType;
  6688. instr->GetSrc1()->SetValueType(profiledValueType);
  6689. }
  6690. }
  6691. }
  6692. }
  6693. if (dstVal == nullptr)
  6694. {
  6695. // Ld_A is just transferring the value
  6696. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6697. }
  6698. break;
  6699. case Js::OpCode::ExtendArg_A:
  6700. {
  6701. // SIMD_JS
  6702. // We avoid transforming EAs to Lds to keep the IR shape consistent and avoid CSEing of EAs.
  6703. // CSEOptimize only assigns a Value to the EA dst, and doesn't turn it to a Ld. If this happened, we shouldn't assign a new Value here.
  6704. if (DoCSE())
  6705. {
  6706. IR::Opnd * currDst = instr->GetDst();
  6707. Value * currDstVal = this->FindValue(currDst->GetStackSym());
  6708. if (currDstVal != nullptr)
  6709. {
  6710. return currDstVal;
  6711. }
  6712. }
  6713. break;
  6714. }
  6715. case Js::OpCode::CheckFixedFld:
  6716. AssertMsg(false, "CheckFixedFld doesn't have a dst, so we should never get here");
  6717. break;
  6718. case Js::OpCode::LdSlot:
  6719. case Js::OpCode::LdSlotArr:
  6720. case Js::OpCode::LdFld:
  6721. case Js::OpCode::LdFldForTypeOf:
  6722. case Js::OpCode::LdFldForCallApplyTarget:
  6723. // Do not transfer value type on ldFldForTypeOf to prevent copy-prop to LdRootFld in case the field doesn't exist since LdRootFldForTypeOf does not throw
  6724. //case Js::OpCode::LdRootFldForTypeOf:
  6725. case Js::OpCode::LdRootFld:
  6726. case Js::OpCode::LdMethodFld:
  6727. case Js::OpCode::LdRootMethodFld:
  6728. case Js::OpCode::ScopedLdMethodFld:
  6729. case Js::OpCode::LdMethodFromFlags:
  6730. if (instr->IsProfiledInstr())
  6731. {
  6732. ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  6733. if(!(profiledValueType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  6734. {
  6735. if(!src1ValueInfo)
  6736. {
  6737. dstVal = this->NewGenericValue(profiledValueType, dst);
  6738. }
  6739. else if(src1ValueInfo->IsUninitialized())
  6740. {
  6741. if(IsLoopPrePass() && (!dst->IsRegOpnd() || !dst->AsRegOpnd()->m_sym->IsSingleDef() || DoFieldHoisting()))
  6742. {
  6743. dstVal = this->NewGenericValue(profiledValueType, dst);
  6744. }
  6745. else
  6746. {
  6747. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  6748. // can improve the original value type.
  6749. src1ValueInfo->Type() = profiledValueType;
  6750. instr->GetSrc1()->SetValueType(profiledValueType);
  6751. }
  6752. }
  6753. }
  6754. }
  6755. if (dstVal == nullptr)
  6756. {
  6757. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6758. }
  6759. if(!this->IsLoopPrePass())
  6760. {
  6761. // We cannot transfer value if the field hasn't been copy prop'd because we don't generate
  6762. // an implicit call bailout between those values if we don't have "live fields" unless, we are hoisting the field.
  6763. PropertySym *propertySym = instr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
  6764. StackSym * fieldHoistSym;
  6765. Loop * loop = this->FindFieldHoistStackSym(this->currentBlock->loop, propertySym->m_id, &fieldHoistSym, instr);
  6766. ValueInfo *dstValueInfo = (dstVal ? dstVal->GetValueInfo() : nullptr);
  6767. // Update symStore for field hoisting
  6768. if (loop != nullptr && (dstValueInfo != nullptr))
  6769. {
  6770. this->SetSymStoreDirect(dstValueInfo, fieldHoistSym);
  6771. }
  6772. // Update symStore if it isn't a stackSym
  6773. if (dstVal && (!dstValueInfo->GetSymStore() || !dstValueInfo->GetSymStore()->IsStackSym()))
  6774. {
  6775. Assert(dst->IsRegOpnd());
  6776. this->SetSymStoreDirect(dstValueInfo, dst->AsRegOpnd()->m_sym);
  6777. }
  6778. if (src1Val != dstVal)
  6779. {
  6780. this->SetValue(&this->blockData, dstVal, instr->GetSrc1());
  6781. }
  6782. }
  6783. break;
  6784. case Js::OpCode::LdC_A_R8:
  6785. case Js::OpCode::LdC_A_I4:
  6786. case Js::OpCode::ArgIn_A:
  6787. dstVal = src1Val;
  6788. break;
  6789. case Js::OpCode::LdStr:
  6790. if (src1Val == nullptr)
  6791. {
  6792. src1Val = NewGenericValue(ValueType::String, dst);
  6793. }
  6794. dstVal = src1Val;
  6795. break;
  6796. // LdElemUndef only assign undef if the field doesn't exist.
  6797. // So we don't actually know what the value is, so we can't really copy prop it.
  6798. //case Js::OpCode::LdElemUndef:
  6799. case Js::OpCode::StSlot:
  6800. case Js::OpCode::StSlotChkUndecl:
  6801. case Js::OpCode::StFld:
  6802. case Js::OpCode::StRootFld:
  6803. case Js::OpCode::StFldStrict:
  6804. case Js::OpCode::StRootFldStrict:
  6805. if (DoFieldCopyProp())
  6806. {
  6807. if (src1Val == nullptr)
  6808. {
  6809. // src1 may have no value if it's not a valid var, e.g., NULL for let/const initialization.
  6810. // Consider creating generic values for such things.
  6811. return nullptr;
  6812. }
  6813. AssertMsg(!src2Val, "Bad src Values...");
  6814. Assert(sym->IsPropertySym());
  6815. SymID symId = sym->m_id;
  6816. Assert(instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl || !this->blockData.liveFields->Test(symId));
  6817. if (IsHoistablePropertySym(symId))
  6818. {
  6819. // We have changed the value of a hoistable field, load afterwards shouldn't get hoisted,
  6820. // but we will still copy prop the pre-assign sym to it if we have a live value.
  6821. Assert((instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl) && this->blockData.liveFields->Test(symId));
  6822. this->blockData.hoistableFields->Clear(symId);
  6823. }
  6824. this->blockData.liveFields->Set(symId);
  6825. if (!this->IsLoopPrePass() && dst->GetIsDead())
  6826. {
  6827. // Take the property sym out of the live fields set (with special handling for loops).
  6828. this->EndFieldLifetime(dst->AsSymOpnd());
  6829. }
  6830. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6831. }
  6832. else
  6833. {
  6834. return nullptr;
  6835. }
  6836. break;
  6837. case Js::OpCode::Conv_Num:
  6838. if(src1ValueInfo->IsNumber())
  6839. {
  6840. dstVal = ValueNumberTransferDst(instr, src1Val);
  6841. }
  6842. else
  6843. {
  6844. return NewGenericValue(src1ValueInfo->Type().ToDefiniteAnyNumber(), dst);
  6845. }
  6846. break;
  6847. case Js::OpCode::Not_A:
  6848. {
  6849. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  6850. {
  6851. min1 = INT32_MIN;
  6852. max1 = INT32_MAX;
  6853. }
  6854. this->PropagateIntRangeForNot(min1, max1, &newMin, &newMax);
  6855. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  6856. }
  6857. case Js::OpCode::Xor_A:
  6858. case Js::OpCode::Or_A:
  6859. case Js::OpCode::And_A:
  6860. case Js::OpCode::Shl_A:
  6861. case Js::OpCode::Shr_A:
  6862. case Js::OpCode::ShrU_A:
  6863. {
  6864. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  6865. {
  6866. min1 = INT32_MIN;
  6867. max1 = INT32_MAX;
  6868. }
  6869. if (!src2Val || !src2ValueInfo->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec()))
  6870. {
  6871. min2 = INT32_MIN;
  6872. max2 = INT32_MAX;
  6873. }
  6874. if (instr->m_opcode == Js::OpCode::ShrU_A &&
  6875. min1 < 0 &&
  6876. IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  6877. {
  6878. // Src1 may be too large to represent as a signed int32, and src2 may be zero.
  6879. // Since the result can therefore be too large to represent as a signed int32,
  6880. // include Number in the value type.
  6881. return CreateDstUntransferredValue(
  6882. ValueType::AnyNumber.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
  6883. }
  6884. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  6885. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  6886. }
  6887. case Js::OpCode::Incr_A:
  6888. case Js::OpCode::Decr_A:
  6889. {
  6890. ValueType valueType;
  6891. if(src1Val)
  6892. {
  6893. valueType = src1Val->GetValueInfo()->Type().ToDefiniteAnyNumber();
  6894. }
  6895. else
  6896. {
  6897. valueType = ValueType::Number;
  6898. }
  6899. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  6900. }
  6901. case Js::OpCode::Add_A:
  6902. {
  6903. ValueType valueType;
  6904. if (src1Val && src1ValueInfo->IsLikelyNumber() && src2Val && src2ValueInfo->IsLikelyNumber())
  6905. {
  6906. if(src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())
  6907. {
  6908. // When doing aggressiveIntType, just assume the result is likely going to be int
  6909. // if both input is int.
  6910. const bool isLikelyTagged = src1ValueInfo->IsLikelyTaggedInt() && src2ValueInfo->IsLikelyTaggedInt();
  6911. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6912. {
  6913. // If both of them are numbers then we can definitely say that the result is a number.
  6914. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  6915. }
  6916. else
  6917. {
  6918. // This is only likely going to be int but can be a string as well.
  6919. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  6920. }
  6921. }
  6922. else
  6923. {
  6924. // We can only be certain of any thing if both of them are numbers.
  6925. // Otherwise, the result could be string.
  6926. if (src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6927. {
  6928. if (src1ValueInfo->IsFloat() || src2ValueInfo->IsFloat())
  6929. {
  6930. // If one of them is a float, the result probably is a float instead of just int
  6931. // but should always be a number.
  6932. valueType = ValueType::Float;
  6933. }
  6934. else
  6935. {
  6936. // Could be int, could be number
  6937. valueType = ValueType::Number;
  6938. }
  6939. }
  6940. else if (src1ValueInfo->IsLikelyFloat() || src2ValueInfo->IsLikelyFloat())
  6941. {
  6942. // Result is likely a float (but can be anything)
  6943. valueType = ValueType::Float.ToLikely();
  6944. }
  6945. else
  6946. {
  6947. // Otherwise it is a likely int or float (but can be anything)
  6948. valueType = ValueType::Number.ToLikely();
  6949. }
  6950. }
  6951. }
  6952. else if((src1Val && src1ValueInfo->IsString()) || (src2Val && src2ValueInfo->IsString()))
  6953. {
  6954. // String + anything should always result in a string
  6955. valueType = ValueType::String;
  6956. }
  6957. else if((src1Val && src1ValueInfo->IsNotString() && src1ValueInfo->IsPrimitive())
  6958. && (src2Val && src2ValueInfo->IsNotString() && src2ValueInfo->IsPrimitive()))
  6959. {
  6960. // If src1 and src2 are not strings and primitive, add should yield a number.
  6961. valueType = ValueType::Number;
  6962. }
  6963. else if((src1Val && src1ValueInfo->IsLikelyString()) || (src2Val && src2ValueInfo->IsLikelyString()))
  6964. {
  6965. // likelystring + anything should always result in a likelystring
  6966. valueType = ValueType::String.ToLikely();
  6967. }
  6968. else
  6969. {
  6970. // Number or string. Could make the value a merge of Number and String, but Uninitialized is more useful at the moment.
  6971. Assert(valueType.IsUninitialized());
  6972. }
  6973. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  6974. }
  6975. case Js::OpCode::Div_A:
  6976. {
  6977. ValueType divValueType = GetDivValueType(instr, src1Val, src2Val, false);
  6978. if (divValueType.IsLikelyInt() || divValueType.IsFloat())
  6979. {
  6980. return CreateDstUntransferredValue(divValueType, instr, src1Val, src2Val);
  6981. }
  6982. }
  6983. // fall-through
  6984. case Js::OpCode::Sub_A:
  6985. case Js::OpCode::Mul_A:
  6986. case Js::OpCode::Rem_A:
  6987. {
  6988. ValueType valueType;
  6989. if( src1Val &&
  6990. src1ValueInfo->IsLikelyInt() &&
  6991. src2Val &&
  6992. src2ValueInfo->IsLikelyInt() &&
  6993. instr->m_opcode != Js::OpCode::Div_A)
  6994. {
  6995. const bool isLikelyTagged =
  6996. src1ValueInfo->IsLikelyTaggedInt() && (src2ValueInfo->IsLikelyTaggedInt() || instr->m_opcode == Js::OpCode::Rem_A);
  6997. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6998. {
  6999. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  7000. }
  7001. else
  7002. {
  7003. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  7004. }
  7005. }
  7006. else if ((src1Val && src1ValueInfo->IsLikelyFloat()) || (src2Val && src2ValueInfo->IsLikelyFloat()))
  7007. {
  7008. // This should ideally be NewNumberAndLikelyFloatValue since we know the result is a number but not sure if it will
  7009. // be a float value. However, that Number/LikelyFloat value type doesn't exist currently and all the necessary
  7010. // checks are done for float values (tagged int checks, etc.) so it's sufficient to just create a float value here.
  7011. valueType = ValueType::Float;
  7012. }
  7013. else
  7014. {
  7015. valueType = ValueType::Number;
  7016. }
  7017. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  7018. }
  7019. case Js::OpCode::CallI:
  7020. Assert(dst->IsRegOpnd());
  7021. return NewGenericValue(dst->AsRegOpnd()->GetValueType(), dst);
  7022. case Js::OpCode::LdElemI_A:
  7023. {
  7024. dstVal = ValueNumberLdElemDst(pInstr, src1Val);
  7025. const ValueType baseValueType(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  7026. if( (
  7027. baseValueType.IsLikelyNativeArray() ||
  7028. #ifdef _M_IX86
  7029. (
  7030. !AutoSystemInfo::Data.SSE2Available() &&
  7031. baseValueType.IsLikelyObject() &&
  7032. (
  7033. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  7034. baseValueType.GetObjectType() == ObjectType::Float64Array
  7035. )
  7036. )
  7037. #else
  7038. false
  7039. #endif
  7040. ) &&
  7041. instr->GetDst()->IsVar() &&
  7042. instr->HasBailOutInfo())
  7043. {
  7044. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  7045. // path. Note that the removed bailouts should not be necessary for correctness.
  7046. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7047. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  7048. {
  7049. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  7050. }
  7051. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  7052. {
  7053. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  7054. }
  7055. if(bailOutKind)
  7056. {
  7057. instr->SetBailOutKind(bailOutKind);
  7058. }
  7059. else
  7060. {
  7061. instr->ClearBailOutInfo();
  7062. }
  7063. }
  7064. return dstVal;
  7065. }
  7066. case Js::OpCode::LdMethodElem:
  7067. // Not worth profiling this, just assume it's likely object (should be likely function but ValueType does not track
  7068. // functions currently, so using ObjectType::Object instead)
  7069. dstVal = NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely(), dst);
  7070. if(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray() && instr->HasBailOutInfo())
  7071. {
  7072. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  7073. // path. Note that the removed bailouts should not be necessary for correctness.
  7074. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7075. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  7076. {
  7077. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  7078. }
  7079. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  7080. {
  7081. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  7082. }
  7083. if(bailOutKind)
  7084. {
  7085. instr->SetBailOutKind(bailOutKind);
  7086. }
  7087. else
  7088. {
  7089. instr->ClearBailOutInfo();
  7090. }
  7091. }
  7092. return dstVal;
  7093. case Js::OpCode::StElemI_A:
  7094. case Js::OpCode::StElemI_A_Strict:
  7095. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  7096. break;
  7097. case Js::OpCode::LdLen_A:
  7098. if (instr->IsProfiledInstr())
  7099. {
  7100. const ValueType profiledValueType(instr->AsProfiledInstr()->u.ldElemInfo->GetElementType());
  7101. if(!(profiledValueType.IsLikelyInt() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  7102. {
  7103. return this->NewGenericValue(profiledValueType, dst);
  7104. }
  7105. }
  7106. break;
  7107. case Js::OpCode::BrOnEmpty:
  7108. case Js::OpCode::BrOnNotEmpty:
  7109. Assert(dst->IsRegOpnd());
  7110. Assert(dst->GetValueType().IsString());
  7111. return this->NewGenericValue(ValueType::String, dst);
  7112. case Js::OpCode::IsInst:
  7113. case Js::OpCode::LdTrue:
  7114. case Js::OpCode::LdFalse:
  7115. return this->NewGenericValue(ValueType::Boolean, dst);
  7116. case Js::OpCode::LdUndef:
  7117. return this->NewGenericValue(ValueType::Undefined, dst);
  7118. case Js::OpCode::LdC_A_Null:
  7119. return this->NewGenericValue(ValueType::Null, dst);
  7120. case Js::OpCode::LdThis:
  7121. if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
  7122. (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
  7123. {
  7124. return this->NewGenericValue(ValueType::GetObject(ObjectType::Object), dst);
  7125. }
  7126. break;
  7127. case Js::OpCode::Typeof:
  7128. return this->NewGenericValue(ValueType::String, dst);
  7129. break;
  7130. }
  7131. #ifdef ENABLE_SIMDJS
  7132. // SIMD_JS
  7133. if (Js::IsSimd128Opcode(instr->m_opcode) && !func->GetJITFunctionBody()->IsAsmJsMode())
  7134. {
  7135. ThreadContext::SimdFuncSignature simdFuncSignature;
  7136. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, simdFuncSignature);
  7137. return this->NewGenericValue(simdFuncSignature.returnType, dst);
  7138. }
  7139. #endif
  7140. if (dstVal == nullptr)
  7141. {
  7142. return this->NewGenericValue(dst->GetValueType(), dst);
  7143. }
  7144. return this->SetValue(&this->blockData, dstVal, dst);
  7145. }
  7146. Value *
  7147. GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
  7148. {
  7149. IR::Instr *&instr = *pInstr;
  7150. IR::Opnd *dst = instr->GetDst();
  7151. Value *dstVal = nullptr;
  7152. int32 newMin, newMax;
  7153. ValueInfo *srcValueInfo = (srcVal ? srcVal->GetValueInfo() : nullptr);
  7154. ValueType profiledElementType;
  7155. if (instr->IsProfiledInstr())
  7156. {
  7157. profiledElementType = instr->AsProfiledInstr()->u.ldElemInfo->GetElementType();
  7158. if(!(profiledElementType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) &&
  7159. srcVal &&
  7160. srcValueInfo->IsUninitialized())
  7161. {
  7162. if(IsLoopPrePass())
  7163. {
  7164. dstVal = NewGenericValue(profiledElementType, dst);
  7165. }
  7166. else
  7167. {
  7168. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  7169. // can improve the original value type.
  7170. srcValueInfo->Type() = profiledElementType;
  7171. instr->GetSrc1()->SetValueType(profiledElementType);
  7172. }
  7173. }
  7174. }
  7175. IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
  7176. const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
  7177. if (instr->DoStackArgsOpt(this->func) ||
  7178. !(
  7179. baseValueType.IsLikelyOptimizedTypedArray() ||
  7180. (baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
  7181. ) ||
  7182. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  7183. // Don't do type spec on native array with a history of accessing gaps, as this is a bailout
  7184. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  7185. !ShouldExpectConventionalArrayIndexValue(src))
  7186. {
  7187. if(DoTypedArrayTypeSpec() && !IsLoopPrePass())
  7188. {
  7189. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access.\n"));
  7190. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  7191. {
  7192. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  7193. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7194. baseValueType.ToString(baseValueTypeStr);
  7195. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because %s.\n"),
  7196. this->func->GetJITFunctionBody()->GetDisplayName(),
  7197. this->func->GetDebugNumberSet(debugStringBuffer),
  7198. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  7199. baseValueTypeStr,
  7200. instr->DoStackArgsOpt(this->func) ? _u("instruction uses the arguments object") :
  7201. baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
  7202. Output::Flush();
  7203. }
  7204. }
  7205. if(!dstVal)
  7206. {
  7207. if(srcVal)
  7208. {
  7209. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7210. }
  7211. else
  7212. {
  7213. dstVal = NewGenericValue(profiledElementType, dst);
  7214. }
  7215. }
  7216. return dstVal;
  7217. }
  7218. Assert(instr->GetSrc1()->IsIndirOpnd());
  7219. IRType toType = TyVar;
  7220. IR::BailOutKind bailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  7221. switch(baseValueType.GetObjectType())
  7222. {
  7223. case ObjectType::Int8Array:
  7224. case ObjectType::Int8VirtualArray:
  7225. case ObjectType::Int8MixedArray:
  7226. newMin = Int8ConstMin;
  7227. newMax = Int8ConstMax;
  7228. goto IntArrayCommon;
  7229. case ObjectType::Uint8Array:
  7230. case ObjectType::Uint8VirtualArray:
  7231. case ObjectType::Uint8MixedArray:
  7232. case ObjectType::Uint8ClampedArray:
  7233. case ObjectType::Uint8ClampedVirtualArray:
  7234. case ObjectType::Uint8ClampedMixedArray:
  7235. newMin = Uint8ConstMin;
  7236. newMax = Uint8ConstMax;
  7237. goto IntArrayCommon;
  7238. case ObjectType::Int16Array:
  7239. case ObjectType::Int16VirtualArray:
  7240. case ObjectType::Int16MixedArray:
  7241. newMin = Int16ConstMin;
  7242. newMax = Int16ConstMax;
  7243. goto IntArrayCommon;
  7244. case ObjectType::Uint16Array:
  7245. case ObjectType::Uint16VirtualArray:
  7246. case ObjectType::Uint16MixedArray:
  7247. newMin = Uint16ConstMin;
  7248. newMax = Uint16ConstMax;
  7249. goto IntArrayCommon;
  7250. case ObjectType::Int32Array:
  7251. case ObjectType::Int32VirtualArray:
  7252. case ObjectType::Int32MixedArray:
  7253. case ObjectType::Uint32Array: // int-specialized loads from uint32 arrays will bail out on values that don't fit in an int32
  7254. case ObjectType::Uint32VirtualArray:
  7255. case ObjectType::Uint32MixedArray:
  7256. Int32Array:
  7257. newMin = Int32ConstMin;
  7258. newMax = Int32ConstMax;
  7259. goto IntArrayCommon;
  7260. IntArrayCommon:
  7261. Assert(dst->IsRegOpnd());
  7262. // If int type spec is disabled, it is ok to load int values as they can help float type spec, and merging int32 with float64 => float64.
  7263. // But if float type spec is also disabled, we'll have problems because float64 merged with var => float64...
  7264. if (!this->DoAggressiveIntTypeSpec() && !this->DoFloatTypeSpec())
  7265. {
  7266. if (!dstVal)
  7267. {
  7268. if (srcVal)
  7269. {
  7270. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7271. }
  7272. else
  7273. {
  7274. dstVal = NewGenericValue(profiledElementType, dst);
  7275. }
  7276. }
  7277. return dstVal;
  7278. }
  7279. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, bailOutKind, newMin, newMax, &dstVal);
  7280. toType = TyInt32;
  7281. break;
  7282. case ObjectType::Float32Array:
  7283. case ObjectType::Float32VirtualArray:
  7284. case ObjectType::Float32MixedArray:
  7285. case ObjectType::Float64Array:
  7286. case ObjectType::Float64VirtualArray:
  7287. case ObjectType::Float64MixedArray:
  7288. Float64Array:
  7289. Assert(dst->IsRegOpnd());
  7290. // If float type spec is disabled, don't load float64 values
  7291. if (!this->DoFloatTypeSpec())
  7292. {
  7293. if (!dstVal)
  7294. {
  7295. if (srcVal)
  7296. {
  7297. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7298. }
  7299. else
  7300. {
  7301. dstVal = NewGenericValue(profiledElementType, dst);
  7302. }
  7303. }
  7304. return dstVal;
  7305. }
  7306. TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, &dstVal);
  7307. toType = TyFloat64;
  7308. break;
  7309. default:
  7310. Assert(baseValueType.IsLikelyNativeArray());
  7311. bailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  7312. if(baseValueType.HasIntElements())
  7313. {
  7314. goto Int32Array;
  7315. }
  7316. Assert(baseValueType.HasFloatElements());
  7317. goto Float64Array;
  7318. }
  7319. if(!dstVal)
  7320. {
  7321. dstVal = NewGenericValue(profiledElementType, dst);
  7322. }
  7323. Assert(toType != TyVar);
  7324. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  7325. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  7326. {
  7327. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  7328. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7329. baseValueType.ToString(baseValueTypeStr);
  7330. char dstValTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7331. dstVal->GetValueInfo()->Type().ToString(dstValTypeStr);
  7332. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s producing %S"),
  7333. this->func->GetJITFunctionBody()->GetDisplayName(),
  7334. this->func->GetDebugNumberSet(debugStringBuffer),
  7335. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  7336. baseValueTypeStr,
  7337. toType == TyInt32 ? _u("int32") : _u("float64"),
  7338. dstValTypeStr);
  7339. #if DBG_DUMP
  7340. Output::Print(_u(" ("));
  7341. dstVal->Dump();
  7342. Output::Print(_u(").\n"));
  7343. #else
  7344. Output::Print(_u(".\n"));
  7345. #endif
  7346. Output::Flush();
  7347. }
  7348. if(!this->IsLoopPrePass())
  7349. {
  7350. if(instr->HasBailOutInfo())
  7351. {
  7352. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  7353. Assert(
  7354. (
  7355. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  7356. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  7357. ) &&
  7358. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  7359. if(bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  7360. {
  7361. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  7362. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  7363. // bails out for the right reason.
  7364. instr->SetBailOutKind(
  7365. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  7366. }
  7367. else
  7368. {
  7369. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  7370. // calls to occur, so it must be merged in to eliminate generating the helper call
  7371. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  7372. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  7373. }
  7374. }
  7375. else
  7376. {
  7377. GenerateBailAtOperation(&instr, bailOutKind);
  7378. }
  7379. }
  7380. return dstVal;
  7381. }
  7382. ValueType
  7383. GlobOpt::GetPrepassValueTypeForDst(
  7384. const ValueType desiredValueType,
  7385. IR::Instr *const instr,
  7386. Value *const src1Value,
  7387. Value *const src2Value,
  7388. bool *const isValueInfoPreciseRef) const
  7389. {
  7390. // Values with definite types can be created in the loop prepass only when it is guaranteed that the value type will be the
  7391. // same on any iteration of the loop. The heuristics currently used are:
  7392. // - If the source sym is not live on the back-edge, then it acquires a new value for each iteration of the loop, so
  7393. // that value type can be definite
  7394. // - Consider: A better solution for this is to track values that originate in this loop, which can have definite value
  7395. // types. That catches more cases, should look into that in the future.
  7396. // - If the source sym has a constant value that doesn't change for the duration of the function
  7397. // - The operation always results in a definite value type. For instance, signed bitwise operations always result in an
  7398. // int32, conv_num and ++ always result in a number, etc.
  7399. // - For operations that always result in an int32, the resulting int range is precise only if the source syms pass
  7400. // the above heuristics. Otherwise, the range must be expanded to the full int32 range.
  7401. Assert(IsLoopPrePass());
  7402. Assert(instr);
  7403. if(isValueInfoPreciseRef)
  7404. {
  7405. *isValueInfoPreciseRef = false;
  7406. }
  7407. if(!desiredValueType.IsDefinite())
  7408. {
  7409. return desiredValueType;
  7410. }
  7411. if((instr->GetSrc1() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Value)) ||
  7412. (instr->GetSrc2() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Value)))
  7413. {
  7414. // If the desired value type is not precise, the value type of the destination is derived from the value types of the
  7415. // sources. Since the value type of a source sym is not definite, the destination value type also cannot be definite.
  7416. if(desiredValueType.IsInt() && OpCodeAttr::IsInt32(instr->m_opcode))
  7417. {
  7418. // The op always produces an int32, but not always a tagged int
  7419. return ValueType::GetInt(desiredValueType.IsLikelyTaggedInt());
  7420. }
  7421. if(desiredValueType.IsNumber() && OpCodeAttr::ProducesNumber(instr->m_opcode))
  7422. {
  7423. // The op always produces a number, but not always an int
  7424. return desiredValueType.ToDefiniteAnyNumber();
  7425. }
  7426. return desiredValueType.ToLikely();
  7427. }
  7428. if(isValueInfoPreciseRef)
  7429. {
  7430. // The produced value info is derived from the sources, which have precise value infos
  7431. *isValueInfoPreciseRef = true;
  7432. }
  7433. return desiredValueType;
  7434. }
  7435. bool
  7436. GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue) const
  7437. {
  7438. Assert(IsLoopPrePass());
  7439. Assert(src);
  7440. if(!src->IsRegOpnd() || !srcValue)
  7441. {
  7442. return false;
  7443. }
  7444. ValueInfo *const srcValueInfo = srcValue->GetValueInfo();
  7445. if(!srcValueInfo->IsDefinite())
  7446. {
  7447. return false;
  7448. }
  7449. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  7450. Assert(!srcSym->IsTypeSpec());
  7451. int32 intConstantValue;
  7452. return
  7453. srcSym->IsFromByteCodeConstantTable() ||
  7454. (
  7455. srcValueInfo->TryGetIntConstantValue(&intConstantValue) &&
  7456. !Js::TaggedInt::IsOverflow(intConstantValue) &&
  7457. GetTaggedIntConstantStackSym(intConstantValue) == srcSym
  7458. ) ||
  7459. !currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(srcSym->m_id);
  7460. }
  7461. Value *GlobOpt::CreateDstUntransferredIntValue(
  7462. const int32 min,
  7463. const int32 max,
  7464. IR::Instr *const instr,
  7465. Value *const src1Value,
  7466. Value *const src2Value)
  7467. {
  7468. Assert(instr);
  7469. Assert(instr->GetDst());
  7470. Assert(OpCodeAttr::ProducesNumber(instr->m_opcode)
  7471. || (instr->m_opcode == Js::OpCode::Add_A && src1Value->GetValueInfo()->IsNumber()
  7472. && src2Value->GetValueInfo()->IsNumber()));
  7473. ValueType valueType(ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  7474. Assert(valueType.IsInt());
  7475. bool isValueInfoPrecise;
  7476. if(IsLoopPrePass())
  7477. {
  7478. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  7479. }
  7480. else
  7481. {
  7482. isValueInfoPrecise = true;
  7483. }
  7484. IR::Opnd *const dst = instr->GetDst();
  7485. if(isValueInfoPrecise)
  7486. {
  7487. Assert(valueType == ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  7488. Assert(!(dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsTypeSpec()));
  7489. return NewIntRangeValue(min, max, false, dst);
  7490. }
  7491. return NewGenericValue(valueType, dst);
  7492. }
  7493. Value *
  7494. GlobOpt::CreateDstUntransferredValue(
  7495. const ValueType desiredValueType,
  7496. IR::Instr *const instr,
  7497. Value *const src1Value,
  7498. Value *const src2Value)
  7499. {
  7500. Assert(instr);
  7501. Assert(instr->GetDst());
  7502. Assert(!desiredValueType.IsInt()); // use CreateDstUntransferredIntValue instead
  7503. ValueType valueType(desiredValueType);
  7504. if(IsLoopPrePass())
  7505. {
  7506. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value);
  7507. }
  7508. return NewGenericValue(valueType, instr->GetDst());
  7509. }
  7510. Value *
  7511. GlobOpt::ValueNumberTransferDst(IR::Instr *const instr, Value * src1Val)
  7512. {
  7513. Value *dstVal = this->IsLoopPrePass() ? this->ValueNumberTransferDstInPrepass(instr, src1Val) : src1Val;
  7514. // Don't copy-prop a temp over a user symbol. This is likely to extend the temp's lifetime, as the user symbol
  7515. // is more likely to already have later references.
  7516. // REVIEW: Enabling this does cause perf issues...
  7517. #if 0
  7518. if (dstVal != src1Val)
  7519. {
  7520. return dstVal;
  7521. }
  7522. Sym *dstSym = dst->GetStackSym();
  7523. if (dstVal && dstSym && dstSym->IsStackSym() && !dstSym->AsStackSym()->m_isBytecodeTmp)
  7524. {
  7525. Sym *dstValSym = dstVal->GetValueInfo()->GetSymStore();
  7526. if (dstValSym && dstValSym->AsStackSym()->m_isBytecodeTmp /* src->GetIsDead()*/)
  7527. {
  7528. dstVal->GetValueInfo()->SetSymStore(dstSym);
  7529. }
  7530. }
  7531. #endif
  7532. return dstVal;
  7533. }
  7534. bool
  7535. GlobOpt::IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue)
  7536. {
  7537. if (this->DoFieldHoisting())
  7538. {
  7539. return false;
  7540. }
  7541. if (src->IsRegOpnd())
  7542. {
  7543. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  7544. if (srcSym->IsFromByteCodeConstantTable())
  7545. {
  7546. return true;
  7547. }
  7548. ValueInfo *srcValueInfo = srcValue->GetValueInfo();
  7549. int32 srcIntConstantValue;
  7550. if (srcValueInfo->TryGetIntConstantValue(&srcIntConstantValue) && !Js::TaggedInt::IsOverflow(srcIntConstantValue)
  7551. && GetTaggedIntConstantStackSym(srcIntConstantValue) == srcSym)
  7552. {
  7553. return true;
  7554. }
  7555. }
  7556. return false;
  7557. }
  7558. Value *
  7559. GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val)
  7560. {
  7561. Value *dstVal = nullptr;
  7562. if (!src1Val)
  7563. {
  7564. return nullptr;
  7565. }
  7566. bool isValueInfoPrecise;
  7567. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7568. // TODO: This conflicts with new values created by the type specialization code
  7569. // We should re-enable if we change that code to avoid the new values.
  7570. #if 0
  7571. if (this->IsSafeToTransferInPrePass(instr->GetSrc1(), src1Val))
  7572. {
  7573. return src1Val;
  7574. }
  7575. if (this->IsPREInstrCandidateLoad(instr->m_opcode) && instr->GetDst())
  7576. {
  7577. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  7578. for (Loop *curLoop = this->currentBlock->loop; curLoop; curLoop = curLoop->parent)
  7579. {
  7580. if (curLoop->fieldPRESymStore->Test(dstSym->m_id))
  7581. {
  7582. return src1Val;
  7583. }
  7584. }
  7585. }
  7586. if (!this->DoFieldHoisting())
  7587. {
  7588. if (instr->GetDst()->IsRegOpnd())
  7589. {
  7590. StackSym *stackSym = instr->GetDst()->AsRegOpnd()->m_sym;
  7591. if (stackSym->IsSingleDef() || this->IsLive(stackSym, this->prePassLoop->landingPad))
  7592. {
  7593. IntConstantBounds src1IntConstantBounds;
  7594. if (src1ValueInfo->TryGetIntConstantBounds(&src1IntConstantBounds) &&
  7595. !(
  7596. src1IntConstantBounds.LowerBound() == INT32_MIN &&
  7597. src1IntConstantBounds.UpperBound() == INT32_MAX
  7598. ))
  7599. {
  7600. const ValueType valueType(
  7601. GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  7602. if (isValueInfoPrecise)
  7603. {
  7604. return src1Val;
  7605. }
  7606. }
  7607. else
  7608. {
  7609. return src1Val;
  7610. }
  7611. }
  7612. }
  7613. }
  7614. #endif
  7615. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  7616. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  7617. // first pass when transferring a value that is live on the back-edge.
  7618. // In prepass we are going to copy the value but with a different value number
  7619. // for aggressive int type spec.
  7620. const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  7621. if(isValueInfoPrecise || (valueType == src1ValueInfo->Type() && src1ValueInfo->IsGeneric()))
  7622. {
  7623. Assert(valueType == src1ValueInfo->Type());
  7624. dstVal = CopyValue(src1Val);
  7625. TrackCopiedValueForKills(dstVal);
  7626. }
  7627. else
  7628. {
  7629. dstVal = NewGenericValue(valueType);
  7630. dstVal->GetValueInfo()->SetSymStore(src1ValueInfo->GetSymStore());
  7631. }
  7632. return dstVal;
  7633. }
  7634. void
  7635. GlobOpt::PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32* pNewMax)
  7636. {
  7637. int32 tmp;
  7638. Int32Math::Not(minimum, pNewMin);
  7639. *pNewMax = *pNewMin;
  7640. Int32Math::Not(maximum, &tmp);
  7641. *pNewMin = min(*pNewMin, tmp);
  7642. *pNewMax = max(*pNewMax, tmp);
  7643. }
  7644. void
  7645. GlobOpt::PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  7646. int32 min2, int32 max2, int32 *pNewMin, int32* pNewMax)
  7647. {
  7648. int32 min, max, tmp, tmp2;
  7649. min = INT32_MIN;
  7650. max = INT32_MAX;
  7651. switch (instr->m_opcode)
  7652. {
  7653. case Js::OpCode::Xor_A:
  7654. case Js::OpCode::Or_A:
  7655. // Find range with highest high order bit
  7656. tmp = ::max((uint32)min1, (uint32)max1);
  7657. tmp2 = ::max((uint32)min2, (uint32)max2);
  7658. if ((uint32)tmp > (uint32)tmp2)
  7659. {
  7660. max = tmp;
  7661. }
  7662. else
  7663. {
  7664. max = tmp2;
  7665. }
  7666. if (max < 0)
  7667. {
  7668. min = INT32_MIN; // REVIEW: conservative...
  7669. max = INT32_MAX;
  7670. }
  7671. else
  7672. {
  7673. // Turn values like 0x1010 into 0x1111
  7674. max = 1 << Math::Log2(max);
  7675. max = (uint32)(max << 1) - 1;
  7676. min = 0;
  7677. }
  7678. break;
  7679. case Js::OpCode::And_A:
  7680. if (min1 == INT32_MIN && min2 == INT32_MIN)
  7681. {
  7682. // Shortcut
  7683. break;
  7684. }
  7685. // Find range with lowest higher bit
  7686. tmp = ::max((uint32)min1, (uint32)max1);
  7687. tmp2 = ::max((uint32)min2, (uint32)max2);
  7688. if ((uint32)tmp < (uint32)tmp2)
  7689. {
  7690. min = min1;
  7691. max = max1;
  7692. }
  7693. else
  7694. {
  7695. min = min2;
  7696. max = max2;
  7697. }
  7698. // To compute max, look if min has higher high bit
  7699. if ((uint32)min > (uint32)max)
  7700. {
  7701. max = min;
  7702. }
  7703. // If max is negative, max let's assume it could be -1, so result in MAX_INT
  7704. if (max < 0)
  7705. {
  7706. max = INT32_MAX;
  7707. }
  7708. // If min is positive, the resulting min is zero
  7709. if (min >= 0)
  7710. {
  7711. min = 0;
  7712. }
  7713. else
  7714. {
  7715. min = INT32_MIN;
  7716. }
  7717. break;
  7718. case Js::OpCode::Shl_A:
  7719. {
  7720. // Shift count
  7721. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  7722. {
  7723. min2 = 0;
  7724. max2 = 0x1F;
  7725. }
  7726. else
  7727. {
  7728. min2 &= 0x1F;
  7729. max2 &= 0x1F;
  7730. }
  7731. int32 min1FreeTopBitCount = min1 ? (sizeof(int32) * 8) - (Math::Log2(min1) + 1) : (sizeof(int32) * 8);
  7732. int32 max1FreeTopBitCount = max1 ? (sizeof(int32) * 8) - (Math::Log2(max1) + 1) : (sizeof(int32) * 8);
  7733. if (min1FreeTopBitCount <= max2 || max1FreeTopBitCount <= max2)
  7734. {
  7735. // If the shift is going to touch the sign bit return the max range
  7736. min = INT32_MIN;
  7737. max = INT32_MAX;
  7738. }
  7739. else
  7740. {
  7741. // Compute max
  7742. // Turn values like 0x1010 into 0x1111
  7743. if (min1)
  7744. {
  7745. min1 = 1 << Math::Log2(min1);
  7746. min1 = (min1 << 1) - 1;
  7747. }
  7748. if (max1)
  7749. {
  7750. max1 = 1 << Math::Log2(max1);
  7751. max1 = (uint32)(max1 << 1) - 1;
  7752. }
  7753. if (max1 > 0)
  7754. {
  7755. int32 nrTopBits = (sizeof(int32) * 8) - Math::Log2(max1);
  7756. if (nrTopBits < ::min(max2, 30))
  7757. max = INT32_MAX;
  7758. else
  7759. max = ::max((max1 << ::min(max2, 30)) & ~0x80000000, (min1 << min2) & ~0x80000000);
  7760. }
  7761. else
  7762. {
  7763. max = (max1 << min2) & ~0x80000000;
  7764. }
  7765. // Compute min
  7766. if (min1 < 0)
  7767. {
  7768. min = ::min(min1 << max2, max1 << max2);
  7769. }
  7770. else
  7771. {
  7772. min = ::min(min1 << min2, max1 << max2);
  7773. }
  7774. // Turn values like 0x1110 into 0x1000
  7775. if (min)
  7776. {
  7777. min = 1 << Math::Log2(min);
  7778. }
  7779. }
  7780. }
  7781. break;
  7782. case Js::OpCode::Shr_A:
  7783. // Shift count
  7784. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  7785. {
  7786. min2 = 0;
  7787. max2 = 0x1F;
  7788. }
  7789. else
  7790. {
  7791. min2 &= 0x1F;
  7792. max2 &= 0x1F;
  7793. }
  7794. // Compute max
  7795. if (max1 < 0)
  7796. {
  7797. max = max1 >> max2;
  7798. }
  7799. else
  7800. {
  7801. max = max1 >> min2;
  7802. }
  7803. // Compute min
  7804. if (min1 < 0)
  7805. {
  7806. min = min1 >> min2;
  7807. }
  7808. else
  7809. {
  7810. min = min1 >> max2;
  7811. }
  7812. break;
  7813. case Js::OpCode::ShrU_A:
  7814. // shift count is constant zero
  7815. if ((min2 == max2) && (max2 & 0x1f) == 0)
  7816. {
  7817. // We can't encode uint32 result, so it has to be used as int32 only or the original value is positive.
  7818. Assert(instr->ignoreIntOverflow || min1 >= 0);
  7819. // We can transfer the signed int32 range.
  7820. min = min1;
  7821. max = max1;
  7822. break;
  7823. }
  7824. const IntConstantBounds src2NewBounds = IntConstantBounds(min2, max2).And_0x1f();
  7825. // Zero is only allowed if result is always a signed int32 or always used as a signed int32
  7826. Assert(min1 >= 0 || instr->ignoreIntOverflow || !src2NewBounds.Contains(0));
  7827. min2 = src2NewBounds.LowerBound();
  7828. max2 = src2NewBounds.UpperBound();
  7829. Assert(min2 <= max2);
  7830. // zero shift count is only allowed if result is used as int32 and/or value is positive
  7831. Assert(min2 > 0 || instr->ignoreIntOverflow || min1 >= 0);
  7832. uint32 umin1 = (uint32)min1;
  7833. uint32 umax1 = (uint32)max1;
  7834. if (umin1 > umax1)
  7835. {
  7836. uint32 temp = umax1;
  7837. umax1 = umin1;
  7838. umin1 = temp;
  7839. }
  7840. Assert(min2 >= 0 && max2 < 32);
  7841. // Compute max
  7842. if (min1 < 0)
  7843. {
  7844. umax1 = UINT32_MAX;
  7845. }
  7846. max = umax1 >> min2;
  7847. // Compute min
  7848. if (min1 <= 0 && max1 >=0)
  7849. {
  7850. min = 0;
  7851. }
  7852. else
  7853. {
  7854. min = umin1 >> max2;
  7855. }
  7856. // We should be able to fit uint32 range as int32
  7857. Assert(instr->ignoreIntOverflow || (min >= 0 && max >= 0) );
  7858. if (min > max)
  7859. {
  7860. // can only happen if shift count can be zero
  7861. Assert(min2 == 0 && (instr->ignoreIntOverflow || min1 >= 0));
  7862. min = Int32ConstMin;
  7863. max = Int32ConstMax;
  7864. }
  7865. break;
  7866. }
  7867. *pNewMin = min;
  7868. *pNewMax = max;
  7869. }
  7870. IR::Instr *
  7871. GlobOpt::TypeSpecialization(
  7872. IR::Instr *instr,
  7873. Value **pSrc1Val,
  7874. Value **pSrc2Val,
  7875. Value **pDstVal,
  7876. bool *redoTypeSpecRef,
  7877. bool *const forceInvariantHoistingRef)
  7878. {
  7879. Value *&src1Val = *pSrc1Val;
  7880. Value *&src2Val = *pSrc2Val;
  7881. *redoTypeSpecRef = false;
  7882. Assert(!*forceInvariantHoistingRef);
  7883. this->ignoredIntOverflowForCurrentInstr = false;
  7884. this->ignoredNegativeZeroForCurrentInstr = false;
  7885. // - Int32 values that can't be tagged are created as float constant values instead because a JavascriptNumber var is needed
  7886. // for that value at runtime. For the purposes of type specialization, recover the int32 values so that they will be
  7887. // treated as ints.
  7888. // - If int overflow does not matter for the instruction, we can additionally treat uint32 values as int32 values because
  7889. // the value resulting from the operation will eventually be converted to int32 anyway
  7890. Value *const src1OriginalVal = src1Val;
  7891. Value *const src2OriginalVal = src2Val;
  7892. #ifdef ENABLE_SIMDJS
  7893. // SIMD_JS
  7894. if (TypeSpecializeSimd128(instr, pSrc1Val, pSrc2Val, pDstVal))
  7895. {
  7896. return instr;
  7897. }
  7898. #endif
  7899. if(!instr->ShouldCheckForIntOverflow())
  7900. {
  7901. if(src1Val && src1Val->GetValueInfo()->IsFloatConstant())
  7902. {
  7903. int32 int32Value;
  7904. bool isInt32;
  7905. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  7906. src1Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  7907. &int32Value,
  7908. &isInt32))
  7909. {
  7910. src1Val = GetIntConstantValue(int32Value, instr);
  7911. if(!isInt32)
  7912. {
  7913. this->ignoredIntOverflowForCurrentInstr = true;
  7914. }
  7915. }
  7916. }
  7917. if(src2Val && src2Val->GetValueInfo()->IsFloatConstant())
  7918. {
  7919. int32 int32Value;
  7920. bool isInt32;
  7921. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  7922. src2Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  7923. &int32Value,
  7924. &isInt32))
  7925. {
  7926. src2Val = GetIntConstantValue(int32Value, instr);
  7927. if(!isInt32)
  7928. {
  7929. this->ignoredIntOverflowForCurrentInstr = true;
  7930. }
  7931. }
  7932. }
  7933. }
  7934. const AutoRestoreVal autoRestoreSrc1Val(src1OriginalVal, &src1Val);
  7935. const AutoRestoreVal autoRestoreSrc2Val(src2OriginalVal, &src2Val);
  7936. if (src1Val && instr->GetSrc2() == nullptr)
  7937. {
  7938. // Unary
  7939. // Note make sure that native array StElemI gets to TypeSpecializeStElem. Do this for typed arrays, too?
  7940. int32 intConstantValue;
  7941. if (!this->IsLoopPrePass() &&
  7942. !instr->IsBranchInstr() &&
  7943. src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) &&
  7944. !(
  7945. // Nothing to fold for element stores. Go into type specialization to see if they can at least be specialized.
  7946. instr->m_opcode == Js::OpCode::StElemI_A ||
  7947. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  7948. instr->m_opcode == Js::OpCode::StElemC ||
  7949. instr->m_opcode == Js::OpCode::MultiBr ||
  7950. instr->m_opcode == Js::OpCode::InlineArrayPop
  7951. ))
  7952. {
  7953. if (OptConstFoldUnary(&instr, intConstantValue, src1Val == src1OriginalVal, pDstVal))
  7954. {
  7955. return instr;
  7956. }
  7957. }
  7958. else if (this->TypeSpecializeUnary(
  7959. &instr,
  7960. &src1Val,
  7961. pDstVal,
  7962. src1OriginalVal,
  7963. redoTypeSpecRef,
  7964. forceInvariantHoistingRef))
  7965. {
  7966. return instr;
  7967. }
  7968. else if(*redoTypeSpecRef)
  7969. {
  7970. return instr;
  7971. }
  7972. }
  7973. else if (instr->GetSrc2() && !instr->IsBranchInstr())
  7974. {
  7975. // Binary
  7976. if (!this->IsLoopPrePass())
  7977. {
  7978. // OptConstFoldBinary doesn't do type spec, so only deal with things we are sure are int (IntConstant and IntRange)
  7979. // and not just likely ints TypeSpecializeBinary will deal with type specializing them and fold them again
  7980. IntConstantBounds src1IntConstantBounds, src2IntConstantBounds;
  7981. if (src1Val && src1Val->GetValueInfo()->TryGetIntConstantBounds(&src1IntConstantBounds))
  7982. {
  7983. if (src2Val && src2Val->GetValueInfo()->TryGetIntConstantBounds(&src2IntConstantBounds))
  7984. {
  7985. if (this->OptConstFoldBinary(&instr, src1IntConstantBounds, src2IntConstantBounds, pDstVal))
  7986. {
  7987. return instr;
  7988. }
  7989. }
  7990. }
  7991. }
  7992. }
  7993. if (instr->GetSrc2() && this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2OriginalVal, redoTypeSpecRef))
  7994. {
  7995. if (!this->IsLoopPrePass() &&
  7996. instr->m_opcode != Js::OpCode::Nop &&
  7997. instr->m_opcode != Js::OpCode::Br && // We may have const fold a branch
  7998. // Cannot const-peep if the result of the operation is required for a bailout check
  7999. !(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnResultConditions))
  8000. {
  8001. if (src1Val && src1Val->GetValueInfo()->HasIntConstantValue())
  8002. {
  8003. if (this->OptConstPeep(instr, instr->GetSrc1(), pDstVal, src1Val->GetValueInfo()))
  8004. {
  8005. return instr;
  8006. }
  8007. }
  8008. else if (src2Val && src2Val->GetValueInfo()->HasIntConstantValue())
  8009. {
  8010. if (this->OptConstPeep(instr, instr->GetSrc2(), pDstVal, src2Val->GetValueInfo()))
  8011. {
  8012. return instr;
  8013. }
  8014. }
  8015. }
  8016. return instr;
  8017. }
  8018. else if(*redoTypeSpecRef)
  8019. {
  8020. return instr;
  8021. }
  8022. if (instr->IsBranchInstr() && !this->IsLoopPrePass())
  8023. {
  8024. if (this->OptConstFoldBranch(instr, src1Val, src2Val, pDstVal))
  8025. {
  8026. return instr;
  8027. }
  8028. }
  8029. // We didn't type specialize, make sure the srcs are unspecialized
  8030. IR::Opnd *src1 = instr->GetSrc1();
  8031. if (src1)
  8032. {
  8033. instr = this->ToVarUses(instr, src1, false, src1Val);
  8034. IR::Opnd *src2 = instr->GetSrc2();
  8035. if (src2)
  8036. {
  8037. instr = this->ToVarUses(instr, src2, false, src2Val);
  8038. }
  8039. }
  8040. IR::Opnd *dst = instr->GetDst();
  8041. if (dst)
  8042. {
  8043. instr = this->ToVarUses(instr, dst, true, nullptr);
  8044. // Handling for instructions other than built-ins that may require only dst type specialization
  8045. // should be added here.
  8046. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode) && !GetIsAsmJSFunc()) // don't need to do typespec for asmjs
  8047. {
  8048. this->TypeSpecializeInlineBuiltInDst(&instr, pDstVal);
  8049. return instr;
  8050. }
  8051. // Clear the int specialized bit on the dst.
  8052. if (dst->IsRegOpnd())
  8053. {
  8054. IR::RegOpnd *dstRegOpnd = dst->AsRegOpnd();
  8055. if (!dstRegOpnd->m_sym->IsTypeSpec())
  8056. {
  8057. this->ToVarRegOpnd(dstRegOpnd, this->currentBlock);
  8058. }
  8059. else if (dstRegOpnd->m_sym->IsInt32())
  8060. {
  8061. this->ToInt32Dst(instr, dstRegOpnd, this->currentBlock);
  8062. }
  8063. else if (dstRegOpnd->m_sym->IsUInt32() && GetIsAsmJSFunc())
  8064. {
  8065. this->ToUInt32Dst(instr, dstRegOpnd, this->currentBlock);
  8066. }
  8067. else if (dstRegOpnd->m_sym->IsFloat64())
  8068. {
  8069. this->ToFloat64Dst(instr, dstRegOpnd, this->currentBlock);
  8070. }
  8071. }
  8072. else if (dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym())
  8073. {
  8074. this->ToVarStackSym(dst->AsSymOpnd()->m_sym->AsStackSym(), this->currentBlock);
  8075. }
  8076. }
  8077. return instr;
  8078. }
  8079. bool
  8080. GlobOpt::OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *valuInfo)
  8081. {
  8082. int32 value;
  8083. IR::Opnd *src;
  8084. IR::Opnd *nonConstSrc = (constSrc == instr->GetSrc1() ? instr->GetSrc2() : instr->GetSrc1());
  8085. // Try to find the value from value info first
  8086. if (valuInfo->TryGetIntConstantValue(&value))
  8087. {
  8088. }
  8089. else if (constSrc->IsAddrOpnd())
  8090. {
  8091. IR::AddrOpnd *addrOpnd = constSrc->AsAddrOpnd();
  8092. #ifdef _M_X64
  8093. Assert(addrOpnd->IsVar() || Math::FitsInDWord((size_t)addrOpnd->m_address));
  8094. #else
  8095. Assert(sizeof(value) == sizeof(addrOpnd->m_address));
  8096. #endif
  8097. if (addrOpnd->IsVar())
  8098. {
  8099. value = Js::TaggedInt::ToInt32(addrOpnd->m_address);
  8100. }
  8101. else
  8102. {
  8103. // We asserted that the address will fit in a DWORD above
  8104. value = ::Math::PointerCastToIntegral<int32>(constSrc->AsAddrOpnd()->m_address);
  8105. }
  8106. }
  8107. else if (constSrc->IsIntConstOpnd())
  8108. {
  8109. value = constSrc->AsIntConstOpnd()->AsInt32();
  8110. }
  8111. else
  8112. {
  8113. return false;
  8114. }
  8115. switch(instr->m_opcode)
  8116. {
  8117. // Can't do all Add_A because of string concats.
  8118. // Sub_A cannot be transformed to a NEG_A because 0 - 0 != -0
  8119. case Js::OpCode::Add_A:
  8120. src = nonConstSrc;
  8121. if (!src->GetValueType().IsInt())
  8122. {
  8123. // 0 + -0 != -0
  8124. // "Foo" + 0 != "Foo
  8125. return false;
  8126. }
  8127. // fall-through
  8128. case Js::OpCode::Add_Ptr:
  8129. case Js::OpCode::Add_I4:
  8130. if (value != 0)
  8131. {
  8132. return false;
  8133. }
  8134. if (constSrc == instr->GetSrc1())
  8135. {
  8136. src = instr->GetSrc2();
  8137. }
  8138. else
  8139. {
  8140. src = instr->GetSrc1();
  8141. }
  8142. break;
  8143. case Js::OpCode::Mul_A:
  8144. case Js::OpCode::Mul_I4:
  8145. if (value == 0)
  8146. {
  8147. // -0 * 0 != 0
  8148. return false;
  8149. }
  8150. else if (value == 1)
  8151. {
  8152. src = nonConstSrc;
  8153. }
  8154. else
  8155. {
  8156. return false;
  8157. }
  8158. break;
  8159. case Js::OpCode::Div_A:
  8160. if (value == 1 && constSrc == instr->GetSrc2())
  8161. {
  8162. src = instr->GetSrc1();
  8163. }
  8164. else
  8165. {
  8166. return false;
  8167. }
  8168. break;
  8169. case Js::OpCode::Or_I4:
  8170. if (value == -1)
  8171. {
  8172. src = constSrc;
  8173. }
  8174. else if (value == 0)
  8175. {
  8176. src = nonConstSrc;
  8177. }
  8178. else
  8179. {
  8180. return false;
  8181. }
  8182. break;
  8183. case Js::OpCode::And_I4:
  8184. if (value == -1)
  8185. {
  8186. src = nonConstSrc;
  8187. }
  8188. else if (value == 0)
  8189. {
  8190. src = constSrc;
  8191. }
  8192. else
  8193. {
  8194. return false;
  8195. }
  8196. break;
  8197. case Js::OpCode::Shl_I4:
  8198. case Js::OpCode::ShrU_I4:
  8199. case Js::OpCode::Shr_I4:
  8200. if (value != 0 || constSrc != instr->GetSrc2())
  8201. {
  8202. return false;
  8203. }
  8204. src = instr->GetSrc1();
  8205. break;
  8206. default:
  8207. return false;
  8208. }
  8209. this->CaptureByteCodeSymUses(instr);
  8210. if (src == instr->GetSrc1())
  8211. {
  8212. instr->FreeSrc2();
  8213. }
  8214. else
  8215. {
  8216. Assert(src == instr->GetSrc2());
  8217. instr->ReplaceSrc1(instr->UnlinkSrc2());
  8218. }
  8219. instr->m_opcode = Js::OpCode::Ld_A;
  8220. return true;
  8221. }
  8222. Js::Var // TODO: michhol OOP JIT, shouldn't play with Vars
  8223. GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
  8224. {
  8225. ValueInfo *valueInfo = val->GetValueInfo();
  8226. if (valueInfo->IsVarConstant() && valueInfo->IsPrimitive())
  8227. {
  8228. return valueInfo->AsVarConstant()->VarValue();
  8229. }
  8230. if (opnd->IsAddrOpnd())
  8231. {
  8232. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  8233. if (addrOpnd->IsVar())
  8234. {
  8235. return addrOpnd->m_address;
  8236. }
  8237. }
  8238. else if (opnd->IsIntConstOpnd())
  8239. {
  8240. if (!Js::TaggedInt::IsOverflow(opnd->AsIntConstOpnd()->AsInt32()))
  8241. {
  8242. return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
  8243. }
  8244. }
  8245. else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
  8246. {
  8247. if (valueInfo->IsBoolean())
  8248. {
  8249. IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
  8250. if (defInstr->m_opcode != Js::OpCode::Ld_A || !defInstr->GetSrc1()->IsAddrOpnd())
  8251. {
  8252. return nullptr;
  8253. }
  8254. Assert(defInstr->GetSrc1()->AsAddrOpnd()->IsVar());
  8255. return defInstr->GetSrc1()->AsAddrOpnd()->m_address;
  8256. }
  8257. else if (valueInfo->IsUndefined())
  8258. {
  8259. return (Js::Var)this->func->GetScriptContextInfo()->GetUndefinedAddr();
  8260. }
  8261. else if (valueInfo->IsNull())
  8262. {
  8263. return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
  8264. }
  8265. }
  8266. return nullptr;
  8267. }
  8268. bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
  8269. {
  8270. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8271. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8272. return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
  8273. (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
  8274. }
  8275. bool
  8276. GlobOpt::OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal)
  8277. {
  8278. if (!src1Val)
  8279. {
  8280. return false;
  8281. }
  8282. Js::Var src1Var = this->GetConstantVar(instr->GetSrc1(), src1Val);
  8283. Js::Var src2Var = nullptr;
  8284. if (instr->GetSrc2())
  8285. {
  8286. if (!src2Val)
  8287. {
  8288. return false;
  8289. }
  8290. src2Var = this->GetConstantVar(instr->GetSrc2(), src2Val);
  8291. }
  8292. // Make sure GetConstantVar only returns primitives.
  8293. // TODO: OOP JIT, enabled these asserts
  8294. //Assert(!src1Var || !Js::JavascriptOperators::IsObject(src1Var));
  8295. //Assert(!src2Var || !Js::JavascriptOperators::IsObject(src2Var));
  8296. BOOL result;
  8297. int32 constVal;
  8298. switch (instr->m_opcode)
  8299. {
  8300. case Js::OpCode::BrEq_A:
  8301. case Js::OpCode::BrNotNeq_A:
  8302. if (!src1Var || !src2Var)
  8303. {
  8304. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  8305. {
  8306. result = false;
  8307. }
  8308. else
  8309. {
  8310. return false;
  8311. }
  8312. }
  8313. else
  8314. {
  8315. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8316. {
  8317. // TODO: OOP JIT, const folding
  8318. return false;
  8319. }
  8320. result = Js::JavascriptOperators::Equal(src1Var, src2Var, this->func->GetScriptContext());
  8321. }
  8322. break;
  8323. case Js::OpCode::BrNeq_A:
  8324. case Js::OpCode::BrNotEq_A:
  8325. if (!src1Var || !src2Var)
  8326. {
  8327. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  8328. {
  8329. result = true;
  8330. }
  8331. else
  8332. {
  8333. return false;
  8334. }
  8335. }
  8336. else
  8337. {
  8338. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8339. {
  8340. // TODO: OOP JIT, const folding
  8341. return false;
  8342. }
  8343. result = Js::JavascriptOperators::NotEqual(src1Var, src2Var, this->func->GetScriptContext());
  8344. }
  8345. break;
  8346. case Js::OpCode::BrSrEq_A:
  8347. case Js::OpCode::BrSrNotNeq_A:
  8348. if (!src1Var || !src2Var)
  8349. {
  8350. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8351. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8352. if (
  8353. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  8354. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  8355. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  8356. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  8357. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  8358. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  8359. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  8360. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  8361. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  8362. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  8363. )
  8364. {
  8365. result = false;
  8366. }
  8367. else
  8368. {
  8369. return false;
  8370. }
  8371. }
  8372. else
  8373. {
  8374. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8375. {
  8376. // TODO: OOP JIT, const folding
  8377. return false;
  8378. }
  8379. result = Js::JavascriptOperators::StrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  8380. }
  8381. break;
  8382. case Js::OpCode::BrSrNeq_A:
  8383. case Js::OpCode::BrSrNotEq_A:
  8384. if (!src1Var || !src2Var)
  8385. {
  8386. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8387. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8388. if (
  8389. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  8390. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  8391. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  8392. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  8393. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  8394. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  8395. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  8396. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  8397. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  8398. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  8399. )
  8400. {
  8401. result = true;
  8402. }
  8403. else
  8404. {
  8405. return false;
  8406. }
  8407. }
  8408. else
  8409. {
  8410. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8411. {
  8412. // TODO: OOP JIT, const folding
  8413. return false;
  8414. }
  8415. result = Js::JavascriptOperators::NotStrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  8416. }
  8417. break;
  8418. case Js::OpCode::BrFalse_A:
  8419. case Js::OpCode::BrTrue_A:
  8420. {
  8421. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  8422. if(src1ValueInfo->IsNull() || src1ValueInfo->IsUndefined())
  8423. {
  8424. result = instr->m_opcode == Js::OpCode::BrFalse_A;
  8425. break;
  8426. }
  8427. if(src1ValueInfo->IsObject() && src1ValueInfo->GetObjectType() > ObjectType::Object)
  8428. {
  8429. // Specific object types that are tracked are equivalent to 'true'
  8430. result = instr->m_opcode == Js::OpCode::BrTrue_A;
  8431. break;
  8432. }
  8433. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8434. {
  8435. // TODO: OOP JIT, const folding
  8436. return false;
  8437. }
  8438. if (!src1Var)
  8439. {
  8440. return false;
  8441. }
  8442. result = Js::JavascriptConversion::ToBoolean(src1Var, this->func->GetScriptContext());
  8443. if(instr->m_opcode == Js::OpCode::BrFalse_A)
  8444. {
  8445. result = !result;
  8446. }
  8447. break;
  8448. }
  8449. case Js::OpCode::BrFalse_I4:
  8450. // this path would probably work outside of asm.js, but we should verify that if we ever hit this scenario
  8451. Assert(GetIsAsmJSFunc());
  8452. constVal = 0;
  8453. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&constVal) && constVal != 0)
  8454. {
  8455. instr->FreeSrc1();
  8456. if (instr->GetSrc2())
  8457. {
  8458. instr->FreeSrc2();
  8459. }
  8460. instr->m_opcode = Js::OpCode::Nop;
  8461. return true;
  8462. }
  8463. return false;
  8464. default:
  8465. return false;
  8466. }
  8467. this->OptConstFoldBr(!!result, instr);
  8468. return true;
  8469. }
  8470. bool
  8471. GlobOpt::OptConstFoldUnary(
  8472. IR::Instr * *pInstr,
  8473. const int32 intConstantValue,
  8474. const bool isUsingOriginalSrc1Value,
  8475. Value **pDstVal)
  8476. {
  8477. IR::Instr * &instr = *pInstr;
  8478. int32 value = 0;
  8479. IR::Opnd *constOpnd;
  8480. bool isInt = true;
  8481. bool doSetDstVal = true;
  8482. FloatConstType fValue = 0.0;
  8483. if (!DoConstFold())
  8484. {
  8485. return false;
  8486. }
  8487. if (instr->GetDst() && !instr->GetDst()->IsRegOpnd())
  8488. {
  8489. return false;
  8490. }
  8491. switch(instr->m_opcode)
  8492. {
  8493. case Js::OpCode::Neg_A:
  8494. if (intConstantValue == 0)
  8495. {
  8496. // Could fold to -0.0
  8497. return false;
  8498. }
  8499. if (Int32Math::Neg(intConstantValue, &value))
  8500. {
  8501. return false;
  8502. }
  8503. break;
  8504. case Js::OpCode::Not_A:
  8505. Int32Math::Not(intConstantValue, &value);
  8506. break;
  8507. case Js::OpCode::Ld_A:
  8508. if (instr->HasBailOutInfo())
  8509. {
  8510. //The profile data for switch expr can be string and in GlobOpt we realize it is an int.
  8511. if(instr->GetBailOutKind() == IR::BailOutExpectingString)
  8512. {
  8513. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  8514. }
  8515. Assert(instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  8516. instr->ClearBailOutInfo();
  8517. }
  8518. value = intConstantValue;
  8519. if(isUsingOriginalSrc1Value)
  8520. {
  8521. doSetDstVal = false; // Let OptDst do it by copying src1Val
  8522. }
  8523. break;
  8524. case Js::OpCode::Conv_Num:
  8525. case Js::OpCode::LdC_A_I4:
  8526. value = intConstantValue;
  8527. if(isUsingOriginalSrc1Value)
  8528. {
  8529. doSetDstVal = false; // Let OptDst do it by copying src1Val
  8530. }
  8531. break;
  8532. case Js::OpCode::Incr_A:
  8533. if (Int32Math::Inc(intConstantValue, &value))
  8534. {
  8535. return false;
  8536. }
  8537. break;
  8538. case Js::OpCode::Decr_A:
  8539. if (Int32Math::Dec(intConstantValue, &value))
  8540. {
  8541. return false;
  8542. }
  8543. break;
  8544. case Js::OpCode::InlineMathAcos:
  8545. fValue = Js::Math::Acos((double)intConstantValue);
  8546. isInt = false;
  8547. break;
  8548. case Js::OpCode::InlineMathAsin:
  8549. fValue = Js::Math::Asin((double)intConstantValue);
  8550. isInt = false;
  8551. break;
  8552. case Js::OpCode::InlineMathAtan:
  8553. fValue = Js::Math::Atan((double)intConstantValue);
  8554. isInt = false;
  8555. break;
  8556. case Js::OpCode::InlineMathCos:
  8557. fValue = Js::Math::Cos((double)intConstantValue);
  8558. isInt = false;
  8559. break;
  8560. case Js::OpCode::InlineMathExp:
  8561. fValue = Js::Math::Exp((double)intConstantValue);
  8562. isInt = false;
  8563. break;
  8564. case Js::OpCode::InlineMathLog:
  8565. fValue = Js::Math::Log((double)intConstantValue);
  8566. isInt = false;
  8567. break;
  8568. case Js::OpCode::InlineMathSin:
  8569. fValue = Js::Math::Sin((double)intConstantValue);
  8570. isInt = false;
  8571. break;
  8572. case Js::OpCode::InlineMathSqrt:
  8573. fValue = ::sqrt((double)intConstantValue);
  8574. isInt = false;
  8575. break;
  8576. case Js::OpCode::InlineMathTan:
  8577. fValue = ::tan((double)intConstantValue);
  8578. isInt = false;
  8579. break;
  8580. case Js::OpCode::InlineMathFround:
  8581. fValue = (double) (float) intConstantValue;
  8582. isInt = false;
  8583. break;
  8584. case Js::OpCode::InlineMathAbs:
  8585. if (intConstantValue == INT32_MIN)
  8586. {
  8587. if (instr->GetDst()->IsInt32())
  8588. {
  8589. // if dst is an int (e.g. in asm.js), we should coerce it, not convert to float
  8590. value = static_cast<int32>(2147483648U);
  8591. }
  8592. else
  8593. {
  8594. // Rejit with AggressiveIntTypeSpecDisabled for Math.abs(INT32_MIN) because it causes dst
  8595. // to be float type which could be different with previous type spec result in LoopPrePass
  8596. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  8597. }
  8598. }
  8599. else
  8600. {
  8601. value = ::abs(intConstantValue);
  8602. }
  8603. break;
  8604. case Js::OpCode::InlineMathClz:
  8605. DWORD clz;
  8606. if (_BitScanReverse(&clz, intConstantValue))
  8607. {
  8608. value = 31 - clz;
  8609. }
  8610. else
  8611. {
  8612. value = 32;
  8613. }
  8614. instr->ClearBailOutInfo();
  8615. break;
  8616. case Js::OpCode::Ctz:
  8617. Assert(func->GetJITFunctionBody()->IsWasmFunction());
  8618. Assert(!instr->HasBailOutInfo());
  8619. DWORD ctz;
  8620. if (_BitScanForward(&ctz, intConstantValue))
  8621. {
  8622. value = ctz;
  8623. }
  8624. else
  8625. {
  8626. value = 32;
  8627. }
  8628. break;
  8629. case Js::OpCode::InlineMathFloor:
  8630. value = intConstantValue;
  8631. instr->ClearBailOutInfo();
  8632. break;
  8633. case Js::OpCode::InlineMathCeil:
  8634. value = intConstantValue;
  8635. instr->ClearBailOutInfo();
  8636. break;
  8637. case Js::OpCode::InlineMathRound:
  8638. value = intConstantValue;
  8639. instr->ClearBailOutInfo();
  8640. break;
  8641. case Js::OpCode::ToVar:
  8642. if (Js::TaggedInt::IsOverflow(intConstantValue))
  8643. {
  8644. return false;
  8645. }
  8646. else
  8647. {
  8648. value = intConstantValue;
  8649. instr->ClearBailOutInfo();
  8650. break;
  8651. }
  8652. default:
  8653. return false;
  8654. }
  8655. this->CaptureByteCodeSymUses(instr);
  8656. Assert(!instr->HasBailOutInfo()); // If we are, in fact, successful in constant folding the instruction, there is no point in having the bailoutinfo around anymore.
  8657. // Make sure that it is cleared if it was initially present.
  8658. if (!isInt)
  8659. {
  8660. value = (int32)fValue;
  8661. if (fValue == (double)value)
  8662. {
  8663. isInt = true;
  8664. }
  8665. }
  8666. if (isInt)
  8667. {
  8668. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  8669. GOPT_TRACE(_u("Constant folding to %d\n"), value);
  8670. }
  8671. else
  8672. {
  8673. constOpnd = IR::FloatConstOpnd::New(fValue, TyFloat64, instr->m_func);
  8674. GOPT_TRACE(_u("Constant folding to %f\n"), fValue);
  8675. }
  8676. instr->ReplaceSrc1(constOpnd);
  8677. this->OptSrc(constOpnd, &instr);
  8678. IR::Opnd *dst = instr->GetDst();
  8679. Assert(dst->IsRegOpnd());
  8680. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  8681. if (isInt)
  8682. {
  8683. if (dstSym->IsSingleDef())
  8684. {
  8685. dstSym->SetIsIntConst(value);
  8686. }
  8687. if (doSetDstVal)
  8688. {
  8689. *pDstVal = GetIntConstantValue(value, instr, dst);
  8690. }
  8691. if (IsTypeSpecPhaseOff(this->func))
  8692. {
  8693. instr->m_opcode = Js::OpCode::LdC_A_I4;
  8694. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  8695. }
  8696. else
  8697. {
  8698. instr->m_opcode = Js::OpCode::Ld_I4;
  8699. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  8700. StackSym * currDstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  8701. if (currDstSym->IsSingleDef())
  8702. {
  8703. currDstSym->SetIsIntConst(value);
  8704. }
  8705. }
  8706. }
  8707. else
  8708. {
  8709. *pDstVal = NewFloatConstantValue(fValue, dst);
  8710. if (IsTypeSpecPhaseOff(this->func))
  8711. {
  8712. instr->m_opcode = Js::OpCode::LdC_A_R8;
  8713. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  8714. }
  8715. else
  8716. {
  8717. instr->m_opcode = Js::OpCode::LdC_F8_R8;
  8718. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  8719. }
  8720. }
  8721. return true;
  8722. }
  8723. //------------------------------------------------------------------------------------------------------
  8724. // Type specialization
  8725. //------------------------------------------------------------------------------------------------------
  8726. bool
  8727. GlobOpt::IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val)
  8728. {
  8729. Assert(src);
  8730. Assert(val);
  8731. ValueInfo *valueInfo = val->GetValueInfo();
  8732. Assert(valueInfo->IsLikelyInt());
  8733. // If it is not known that the operand is definitely an int, the operand is not already type-specialized, and it's not live
  8734. // in the loop landing pad (if we're in a loop), it's probably not worth type-specializing this instruction. The common case
  8735. // where type-specializing this would be bad is where the operations are entirely on properties or array elements, where the
  8736. // ratio of FromVars and ToVars to the number of actual operations is high, and the conversions would dominate the time
  8737. // spent. On the other hand, if we're using a function formal parameter more than once, it would probably be worth
  8738. // type-specializing it, hence the IsDead check on the operands.
  8739. return
  8740. valueInfo->IsInt() ||
  8741. valueInfo->HasIntConstantValue(true) ||
  8742. !src->GetIsDead() ||
  8743. !src->IsRegOpnd() ||
  8744. this->IsInt32TypeSpecialized(src->AsRegOpnd()->m_sym, this->currentBlock) ||
  8745. (this->currentBlock->loop && this->IsLive(src->AsRegOpnd()->m_sym, this->currentBlock->loop->landingPad));
  8746. }
  8747. bool
  8748. GlobOpt::IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst)
  8749. {
  8750. Assert(dst);
  8751. const auto sym = dst->AsRegOpnd()->m_sym;
  8752. return
  8753. this->IsInt32TypeSpecialized(sym, this->currentBlock) ||
  8754. (this->currentBlock->loop && this->IsLive(sym, this->currentBlock->loop->landingPad));
  8755. }
  8756. bool
  8757. GlobOpt::IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val)
  8758. {
  8759. Assert(instr);
  8760. const auto src1 = instr->GetSrc1();
  8761. const auto src2 = instr->GetSrc2();
  8762. // In addition to checking each operand and the destination, if for any reason we only have to do a maximum of two
  8763. // conversions instead of the worst-case 3 conversions, it's probably worth specializing.
  8764. if (IsWorthSpecializingToInt32DueToSrc(src1, src1Val) ||
  8765. (src2Val && IsWorthSpecializingToInt32DueToSrc(src2, src2Val)))
  8766. {
  8767. return true;
  8768. }
  8769. IR::Opnd *dst = instr->GetDst();
  8770. if (!dst || IsWorthSpecializingToInt32DueToDst(dst))
  8771. {
  8772. return true;
  8773. }
  8774. if (dst->IsEqual(src1) || (src2Val && (dst->IsEqual(src2) || src1->IsEqual(src2))))
  8775. {
  8776. return true;
  8777. }
  8778. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  8779. // Skip useless Ld_A's
  8780. do
  8781. {
  8782. switch (instrNext->m_opcode)
  8783. {
  8784. case Js::OpCode::Ld_A:
  8785. if (!dst->IsEqual(instrNext->GetSrc1()))
  8786. {
  8787. goto done;
  8788. }
  8789. dst = instrNext->GetDst();
  8790. break;
  8791. case Js::OpCode::LdFld:
  8792. case Js::OpCode::LdRootFld:
  8793. case Js::OpCode::LdRootFldForTypeOf:
  8794. case Js::OpCode::LdFldForTypeOf:
  8795. case Js::OpCode::LdElemI_A:
  8796. case Js::OpCode::ByteCodeUses:
  8797. break;
  8798. default:
  8799. goto done;
  8800. }
  8801. instrNext = instrNext->GetNextRealInstrOrLabel();
  8802. } while (true);
  8803. done:
  8804. // If the next instr could also be type specialized, then it is probably worth it.
  8805. if ((instrNext->GetSrc1() && dst->IsEqual(instrNext->GetSrc1())) || (instrNext->GetSrc2() && dst->IsEqual(instrNext->GetSrc2())))
  8806. {
  8807. switch (instrNext->m_opcode)
  8808. {
  8809. case Js::OpCode::Add_A:
  8810. case Js::OpCode::Sub_A:
  8811. case Js::OpCode::Mul_A:
  8812. case Js::OpCode::Div_A:
  8813. case Js::OpCode::Rem_A:
  8814. case Js::OpCode::Xor_A:
  8815. case Js::OpCode::And_A:
  8816. case Js::OpCode::Or_A:
  8817. case Js::OpCode::Shl_A:
  8818. case Js::OpCode::Shr_A:
  8819. case Js::OpCode::Incr_A:
  8820. case Js::OpCode::Decr_A:
  8821. case Js::OpCode::Neg_A:
  8822. case Js::OpCode::Not_A:
  8823. case Js::OpCode::Conv_Num:
  8824. case Js::OpCode::BrEq_I4:
  8825. case Js::OpCode::BrTrue_I4:
  8826. case Js::OpCode::BrFalse_I4:
  8827. case Js::OpCode::BrGe_I4:
  8828. case Js::OpCode::BrGt_I4:
  8829. case Js::OpCode::BrLt_I4:
  8830. case Js::OpCode::BrLe_I4:
  8831. case Js::OpCode::BrNeq_I4:
  8832. return true;
  8833. }
  8834. }
  8835. return false;
  8836. }
  8837. bool
  8838. GlobOpt::TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal)
  8839. {
  8840. Assert(src1Val->GetValueInfo()->IsNumber());
  8841. if (this->IsLoopPrePass())
  8842. {
  8843. return false;
  8844. }
  8845. switch (instr->m_opcode)
  8846. {
  8847. case Js::OpCode::Conv_Num:
  8848. // Optimize Conv_Num away since we know this is a number
  8849. instr->m_opcode = Js::OpCode::Ld_A;
  8850. return false;
  8851. }
  8852. return false;
  8853. }
  8854. bool
  8855. GlobOpt::TypeSpecializeUnary(
  8856. IR::Instr **pInstr,
  8857. Value **pSrc1Val,
  8858. Value **pDstVal,
  8859. Value *const src1OriginalVal,
  8860. bool *redoTypeSpecRef,
  8861. bool *const forceInvariantHoistingRef)
  8862. {
  8863. Assert(pSrc1Val);
  8864. Value *&src1Val = *pSrc1Val;
  8865. Assert(src1Val);
  8866. // We don't need to do typespec for asmjs
  8867. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  8868. {
  8869. return false;
  8870. }
  8871. IR::Instr *&instr = *pInstr;
  8872. int32 min, max;
  8873. // Inline built-ins explicitly specify how srcs/dst must be specialized.
  8874. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  8875. {
  8876. TypeSpecializeInlineBuiltInUnary(pInstr, &src1Val, pDstVal, src1OriginalVal, redoTypeSpecRef);
  8877. return true;
  8878. }
  8879. // Consider: If type spec wasn't completely done, make sure that we don't type-spec the dst 2nd time.
  8880. if(instr->m_opcode == Js::OpCode::LdLen_A && TypeSpecializeLdLen(&instr, &src1Val, pDstVal, forceInvariantHoistingRef))
  8881. {
  8882. return true;
  8883. }
  8884. if (!src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, this->DoAggressiveIntTypeSpec()))
  8885. {
  8886. src1Val = src1OriginalVal;
  8887. if (src1Val->GetValueInfo()->IsLikelyFloat())
  8888. {
  8889. // Try to type specialize to float
  8890. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  8891. }
  8892. else if (src1Val->GetValueInfo()->IsNumber())
  8893. {
  8894. return TypeSpecializeNumberUnary(instr, src1Val, pDstVal);
  8895. }
  8896. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  8897. }
  8898. return this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, min, max, src1OriginalVal, redoTypeSpecRef);
  8899. }
  8900. // Returns true if the built-in requested type specialization, and no further action needed,
  8901. // otherwise returns false.
  8902. void
  8903. GlobOpt::TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef)
  8904. {
  8905. IR::Instr *&instr = *pInstr;
  8906. Assert(pSrc1Val);
  8907. Value *&src1Val = *pSrc1Val;
  8908. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  8909. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  8910. Assert(builtInId != Js::BuiltinFunction::None);
  8911. // Consider using different bailout for float/int FromVars, so that when the arg cannot be converted to number we don't disable
  8912. // type spec for other parts of the big function but rather just don't inline that built-in instr.
  8913. // E.g. could do that if the value is not likelyInt/likelyFloat.
  8914. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  8915. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_Args) == Js::BuiltInFlags::BIF_TypeSpecUnaryToFloat;
  8916. if (areAllArgsAlwaysFloat)
  8917. {
  8918. // InlineMathAcos, InlineMathAsin, InlineMathAtan, InlineMathCos, InlineMathExp, InlineMathLog, InlineMathSin, InlineMathSqrt, InlineMathTan.
  8919. Assert(this->DoFloatTypeSpec());
  8920. // Type-spec the src.
  8921. src1Val = src1OriginalVal;
  8922. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  8923. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized to float, but something failed during the process.");
  8924. // Type-spec the dst.
  8925. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  8926. }
  8927. else if (instr->m_opcode == Js::OpCode::InlineMathAbs)
  8928. {
  8929. // Consider the case when the value is unknown - because of bailout in abs we may disable type spec for the whole function which is too much.
  8930. // First, try int.
  8931. int minVal, maxVal;
  8932. bool shouldTypeSpecToInt = src1Val->GetValueInfo()->GetIntValMinMax(&minVal, &maxVal, /* doAggressiveIntTypeSpec = */ true);
  8933. if (shouldTypeSpecToInt)
  8934. {
  8935. Assert(this->DoAggressiveIntTypeSpec());
  8936. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, minVal, maxVal, src1OriginalVal, redoTypeSpecRef, true);
  8937. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (int), but something failed during the process.");
  8938. if (!this->IsLoopPrePass())
  8939. {
  8940. // Create bailout for INT_MIN which does not have corresponding int value on the positive side.
  8941. // Check int range: if we know the range is out of overflow, we do not need the bail out at all.
  8942. if (minVal == INT32_MIN)
  8943. {
  8944. GenerateBailAtOperation(&instr, IR::BailOnIntMin);
  8945. }
  8946. }
  8947. // Account for ::abs(INT_MIN) == INT_MIN (which is less than 0).
  8948. maxVal = ::max(
  8949. ::abs(Int32Math::NearestInRangeTo(minVal, INT_MIN + 1, INT_MAX)),
  8950. ::abs(Int32Math::NearestInRangeTo(maxVal, INT_MIN + 1, INT_MAX)));
  8951. minVal = minVal >= 0 ? minVal : 0;
  8952. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, nullptr, IR::BailOutInvalid, minVal, maxVal, pDstVal);
  8953. }
  8954. else
  8955. {
  8956. // If we couldn't do int, do float.
  8957. Assert(this->DoFloatTypeSpec());
  8958. src1Val = src1OriginalVal;
  8959. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, true);
  8960. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (float), but something failed during the process.");
  8961. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  8962. }
  8963. }
  8964. else if (instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::InlineMathRound)
  8965. {
  8966. // Type specialize src to float
  8967. src1Val = src1OriginalVal;
  8968. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  8969. AssertMsg(retVal, "For inline Math.floor and Math.ceil the src has to be type-specialized to float, but something failed during the process.");
  8970. // Type specialize dst to int
  8971. this->TypeSpecializeIntDst(
  8972. instr,
  8973. instr->m_opcode,
  8974. nullptr,
  8975. src1Val,
  8976. nullptr,
  8977. IR::BailOutInvalid,
  8978. INT32_MIN,
  8979. INT32_MAX,
  8980. pDstVal);
  8981. }
  8982. else if(instr->m_opcode == Js::OpCode::InlineArrayPop)
  8983. {
  8984. IR::Opnd *const thisOpnd = instr->GetSrc1();
  8985. Assert(thisOpnd);
  8986. // Ensure src1 (Array) is a var
  8987. this->ToVarUses(instr, thisOpnd, false, src1Val);
  8988. if(!this->IsLoopPrePass() && thisOpnd->GetValueType().IsLikelyNativeArray())
  8989. {
  8990. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during the run time.
  8991. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  8992. }
  8993. if(!instr->GetDst())
  8994. {
  8995. return;
  8996. }
  8997. // Try Type Specializing the element (return item from Pop) based on the array's profile data.
  8998. if(thisOpnd->GetValueType().IsLikelyNativeIntArray())
  8999. {
  9000. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  9001. }
  9002. else if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  9003. {
  9004. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  9005. }
  9006. else
  9007. {
  9008. // We reached here so the Element is not yet type specialized. Ensure element is a var
  9009. if(instr->GetDst()->IsRegOpnd())
  9010. {
  9011. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  9012. }
  9013. }
  9014. }
  9015. else if (instr->m_opcode == Js::OpCode::InlineMathClz)
  9016. {
  9017. Assert(this->DoAggressiveIntTypeSpec());
  9018. Assert(this->DoLossyIntTypeSpec());
  9019. //Type specialize to int
  9020. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, INT32_MIN, INT32_MAX, src1OriginalVal, redoTypeSpecRef);
  9021. AssertMsg(retVal, "For clz32, the arg has to be type-specialized to int.");
  9022. }
  9023. else
  9024. {
  9025. AssertMsg(FALSE, "Unsupported built-in!");
  9026. }
  9027. }
  9028. void
  9029. GlobOpt::TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal)
  9030. {
  9031. IR::Instr *&instr = *pInstr;
  9032. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9033. switch(instr->m_opcode)
  9034. {
  9035. case Js::OpCode::InlineMathAtan2:
  9036. {
  9037. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  9038. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  9039. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
  9040. Assert(areAllArgsAlwaysFloat);
  9041. Assert(this->DoFloatTypeSpec());
  9042. // Type-spec the src1, src2 and dst.
  9043. src1Val = src1OriginalVal;
  9044. src2Val = src2OriginalVal;
  9045. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9046. AssertMsg(retVal, "For pow and atnan2 the args have to be type-specialized to float, but something failed during the process.");
  9047. break;
  9048. }
  9049. case Js::OpCode::InlineMathPow:
  9050. {
  9051. #ifndef _M_ARM
  9052. if (src2Val->GetValueInfo()->IsLikelyInt())
  9053. {
  9054. bool lossy = false;
  9055. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, lossy);
  9056. IR::Opnd* src1 = instr->GetSrc1();
  9057. int32 valueMin, valueMax;
  9058. if (src1Val->GetValueInfo()->IsLikelyInt() &&
  9059. this->DoPowIntIntTypeSpec() &&
  9060. src2Val->GetValueInfo()->GetIntValMinMax(&valueMin, &valueMax, this->DoAggressiveIntTypeSpec()) &&
  9061. valueMin >= 0)
  9062. {
  9063. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  9064. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  9065. if(!this->IsLoopPrePass())
  9066. {
  9067. GenerateBailAtOperation(&instr, IR::BailOutOnPowIntIntOverflow);
  9068. }
  9069. }
  9070. else
  9071. {
  9072. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  9073. TypeSpecializeFloatDst(instr, nullptr, src1Val, src2Val, pDstVal);
  9074. }
  9075. }
  9076. else
  9077. {
  9078. #endif
  9079. this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9080. #ifndef _M_ARM
  9081. }
  9082. #endif
  9083. break;
  9084. }
  9085. case Js::OpCode::InlineMathImul:
  9086. {
  9087. Assert(this->DoAggressiveIntTypeSpec());
  9088. Assert(this->DoLossyIntTypeSpec());
  9089. //Type specialize to int
  9090. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, false /* skipDst */);
  9091. AssertMsg(retVal, "For imul, the args have to be type-specialized to int but something failed during the process.");
  9092. break;
  9093. }
  9094. case Js::OpCode::InlineMathMin:
  9095. case Js::OpCode::InlineMathMax:
  9096. {
  9097. if(src1Val->GetValueInfo()->IsLikelyInt() && src2Val->GetValueInfo()->IsLikelyInt())
  9098. {
  9099. // Compute resulting range info
  9100. int32 min1, max1, min2, max2, newMin, newMax;
  9101. Assert(this->DoAggressiveIntTypeSpec());
  9102. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  9103. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  9104. if (instr->m_opcode == Js::OpCode::InlineMathMin)
  9105. {
  9106. newMin = min(min1, min2);
  9107. newMax = min(max1, max2);
  9108. }
  9109. else
  9110. {
  9111. Assert(instr->m_opcode == Js::OpCode::InlineMathMax);
  9112. newMin = max(min1, min2);
  9113. newMax = max(max1, max2);
  9114. }
  9115. // Type specialize to int
  9116. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, newMin, newMax, false /* skipDst */);
  9117. AssertMsg(retVal, "For min and max, the args have to be type-specialized to int if any one of the sources is an int, but something failed during the process.");
  9118. }
  9119. // Couldn't type specialize to int, type specialize to float
  9120. else
  9121. {
  9122. Assert(this->DoFloatTypeSpec());
  9123. src1Val = src1OriginalVal;
  9124. src2Val = src2OriginalVal;
  9125. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9126. AssertMsg(retVal, "For min and max, the args have to be type-specialized to float if any one of the sources is a float, but something failed during the process.");
  9127. }
  9128. break;
  9129. }
  9130. case Js::OpCode::InlineArrayPush:
  9131. {
  9132. IR::Opnd *const thisOpnd = instr->GetSrc1();
  9133. Assert(thisOpnd);
  9134. if(instr->GetDst() && instr->GetDst()->IsRegOpnd())
  9135. {
  9136. // Set the dst as live here, as the built-ins return early from the TypeSpecialization functions - before the dst is marked as live.
  9137. // Also, we are not specializing the dst separately and we are skipping the dst to be handled when we specialize the instruction above.
  9138. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  9139. }
  9140. // Ensure src1 (Array) is a var
  9141. this->ToVarUses(instr, thisOpnd, false, src1Val);
  9142. if(!this->IsLoopPrePass())
  9143. {
  9144. if(thisOpnd->GetValueType().IsLikelyNativeArray())
  9145. {
  9146. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during run time.
  9147. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  9148. }
  9149. else
  9150. {
  9151. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  9152. }
  9153. }
  9154. // Try Type Specializing the element based on the array's profile data.
  9155. if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  9156. {
  9157. src1Val = src1OriginalVal;
  9158. src2Val = src2OriginalVal;
  9159. }
  9160. if((thisOpnd->GetValueType().IsLikelyNativeIntArray() && this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, true))
  9161. || (thisOpnd->GetValueType().IsLikelyNativeFloatArray() && this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal)))
  9162. {
  9163. break;
  9164. }
  9165. // The Element is not yet type specialized. Ensure element is a var
  9166. this->ToVarUses(instr, instr->GetSrc2(), false, src2Val);
  9167. break;
  9168. }
  9169. }
  9170. }
  9171. void
  9172. GlobOpt::TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal)
  9173. {
  9174. IR::Instr *&instr = *pInstr;
  9175. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9176. if (instr->m_opcode == Js::OpCode::InlineMathRandom)
  9177. {
  9178. Assert(this->DoFloatTypeSpec());
  9179. // Type specialize dst to float
  9180. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  9181. }
  9182. }
  9183. bool
  9184. GlobOpt::TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal)
  9185. {
  9186. // It has been determined that this instruction cannot be int-specialized. We need to determine whether to attempt to
  9187. // float-specialize the instruction, or leave it unspecialized.
  9188. #if !INT32VAR
  9189. Value*& src1Val = *pSrc1Val;
  9190. if(src1Val->GetValueInfo()->IsLikelyUntaggedInt())
  9191. {
  9192. // An input range is completely outside the range of an int31. Even if the operation may overflow, it is
  9193. // unlikely to overflow on these operations, so we leave it unspecialized on 64-bit platforms. However, on
  9194. // 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is significantly slower to
  9195. // use in an unspecialized operation compared to a tagged int. So, try to float-specialize the instruction.
  9196. src1Val = src1OriginalVal;
  9197. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  9198. }
  9199. #endif
  9200. return false;
  9201. }
  9202. bool
  9203. GlobOpt::TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst /* = false */)
  9204. {
  9205. // Consider moving the code for int type spec-ing binary functions here.
  9206. IR::Instr *&instr = *pInstr;
  9207. bool lossy = false;
  9208. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9209. {
  9210. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  9211. {
  9212. int32 intConstantValue;
  9213. bool isIntConstMissingItem = src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue);
  9214. if(isIntConstMissingItem)
  9215. {
  9216. isIntConstMissingItem = Js::SparseArraySegment<int>::IsMissingItem(&intConstantValue);
  9217. }
  9218. // Don't specialize if the element is not likelyInt or an IntConst which is a missing item value.
  9219. if(!(src2Val->GetValueInfo()->IsLikelyInt()) || isIntConstMissingItem)
  9220. {
  9221. return false;
  9222. }
  9223. // We don't want to specialize both the source operands, though it is a binary instr.
  9224. IR::Opnd * elementOpnd = instr->GetSrc2();
  9225. this->ToInt32(instr, elementOpnd, this->currentBlock, src2Val, nullptr, lossy);
  9226. }
  9227. else
  9228. {
  9229. IR::Opnd *src1 = instr->GetSrc1();
  9230. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  9231. IR::Opnd *src2 = instr->GetSrc2();
  9232. this->ToInt32(instr, src2, this->currentBlock, src2Val, nullptr, lossy);
  9233. }
  9234. if(!skipDst)
  9235. {
  9236. IR::Opnd *dst = instr->GetDst();
  9237. if (dst)
  9238. {
  9239. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, min, max, pDstVal);
  9240. }
  9241. }
  9242. return true;
  9243. }
  9244. else
  9245. {
  9246. AssertMsg(false, "Yet to move code for other binary functions here");
  9247. return false;
  9248. }
  9249. }
  9250. bool
  9251. GlobOpt::TypeSpecializeIntUnary(
  9252. IR::Instr **pInstr,
  9253. Value **pSrc1Val,
  9254. Value **pDstVal,
  9255. int32 min,
  9256. int32 max,
  9257. Value *const src1OriginalVal,
  9258. bool *redoTypeSpecRef,
  9259. bool skipDst /* = false */)
  9260. {
  9261. IR::Instr *&instr = *pInstr;
  9262. Assert(pSrc1Val);
  9263. Value *&src1Val = *pSrc1Val;
  9264. bool isTransfer = false;
  9265. Js::OpCode opcode;
  9266. int32 newMin, newMax;
  9267. bool lossy = false;
  9268. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  9269. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  9270. bool ignoredNegativeZero = false;
  9271. bool checkTypeSpecWorth = false;
  9272. if(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  9273. {
  9274. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9275. }
  9276. AddSubConstantInfo addSubConstantInfo;
  9277. switch(instr->m_opcode)
  9278. {
  9279. case Js::OpCode::Ld_A:
  9280. if (instr->GetSrc1()->IsRegOpnd())
  9281. {
  9282. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9283. if (this->IsInt32TypeSpecialized(sym, this->currentBlock) == false)
  9284. {
  9285. // Type specializing an Ld_A isn't worth it, unless the src
  9286. // is already type specialized.
  9287. return false;
  9288. }
  9289. }
  9290. newMin = min;
  9291. newMax = max;
  9292. opcode = Js::OpCode::Ld_I4;
  9293. isTransfer = true;
  9294. break;
  9295. case Js::OpCode::Conv_Num:
  9296. newMin = min;
  9297. newMax = max;
  9298. opcode = Js::OpCode::Ld_I4;
  9299. isTransfer = true;
  9300. break;
  9301. case Js::OpCode::LdC_A_I4:
  9302. newMin = newMax = instr->GetSrc1()->AsIntConstOpnd()->AsInt32();
  9303. opcode = Js::OpCode::Ld_I4;
  9304. break;
  9305. case Js::OpCode::Neg_A:
  9306. if (min <= 0 && max >= 0)
  9307. {
  9308. if(instr->ShouldCheckForNegativeZero())
  9309. {
  9310. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  9311. if(!DoAggressiveIntTypeSpec())
  9312. {
  9313. // May result in -0
  9314. // Consider adding a dynamic check for src1 == 0
  9315. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9316. }
  9317. if(min == 0 && max == 0)
  9318. {
  9319. // Always results in -0
  9320. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9321. }
  9322. bailOutKind |= IR::BailOutOnNegativeZero;
  9323. }
  9324. else
  9325. {
  9326. ignoredNegativeZero = true;
  9327. }
  9328. }
  9329. if (Int32Math::Neg(min, &newMax))
  9330. {
  9331. if(instr->ShouldCheckForIntOverflow())
  9332. {
  9333. if(!DoAggressiveIntTypeSpec())
  9334. {
  9335. // May overflow
  9336. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9337. }
  9338. if(min == max)
  9339. {
  9340. // Always overflows
  9341. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9342. }
  9343. bailOutKind |= IR::BailOutOnOverflow;
  9344. newMax = INT32_MAX;
  9345. }
  9346. else
  9347. {
  9348. ignoredIntOverflow = true;
  9349. }
  9350. }
  9351. if (Int32Math::Neg(max, &newMin))
  9352. {
  9353. if(instr->ShouldCheckForIntOverflow())
  9354. {
  9355. if(!DoAggressiveIntTypeSpec())
  9356. {
  9357. // May overflow
  9358. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9359. }
  9360. bailOutKind |= IR::BailOutOnOverflow;
  9361. newMin = INT32_MAX;
  9362. }
  9363. else
  9364. {
  9365. ignoredIntOverflow = true;
  9366. }
  9367. }
  9368. if(!instr->ShouldCheckForIntOverflow() && newMin > newMax)
  9369. {
  9370. // When ignoring overflow, the range needs to account for overflow. Since MIN_INT is the only int32 value that
  9371. // overflows on Neg, and the value resulting from overflow is also MIN_INT, if calculating only the new min or new
  9372. // max overflowed but not both, then the new min will be greater than the new max. In that case we need to consider
  9373. // the full range of int32s as possible resulting values.
  9374. newMin = INT32_MIN;
  9375. newMax = INT32_MAX;
  9376. }
  9377. opcode = Js::OpCode::Neg_I4;
  9378. checkTypeSpecWorth = true;
  9379. break;
  9380. case Js::OpCode::Not_A:
  9381. if(!DoLossyIntTypeSpec())
  9382. {
  9383. return false;
  9384. }
  9385. this->PropagateIntRangeForNot(min, max, &newMin, &newMax);
  9386. opcode = Js::OpCode::Not_I4;
  9387. lossy = true;
  9388. break;
  9389. case Js::OpCode::Incr_A:
  9390. do // while(false)
  9391. {
  9392. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  9393. {
  9394. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  9395. return
  9396. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  9397. src1ValueInfo->IsIntBounded() &&
  9398. src1ValueInfo->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(1);
  9399. };
  9400. if (Int32Math::Inc(min, &newMin))
  9401. {
  9402. if(CannotOverflowBasedOnRelativeBounds())
  9403. {
  9404. newMin = INT32_MAX;
  9405. }
  9406. else if(instr->ShouldCheckForIntOverflow())
  9407. {
  9408. // Always overflows
  9409. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9410. }
  9411. else
  9412. {
  9413. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  9414. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  9415. // we use the full range of int32s.
  9416. ignoredIntOverflow = true;
  9417. newMin = INT32_MIN;
  9418. newMax = INT32_MAX;
  9419. break;
  9420. }
  9421. }
  9422. if (Int32Math::Inc(max, &newMax))
  9423. {
  9424. if(CannotOverflowBasedOnRelativeBounds())
  9425. {
  9426. newMax = INT32_MAX;
  9427. }
  9428. else if(instr->ShouldCheckForIntOverflow())
  9429. {
  9430. if(!DoAggressiveIntTypeSpec())
  9431. {
  9432. // May overflow
  9433. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9434. }
  9435. bailOutKind |= IR::BailOutOnOverflow;
  9436. newMax = INT32_MAX;
  9437. }
  9438. else
  9439. {
  9440. // See comment about ignoring overflow above
  9441. ignoredIntOverflow = true;
  9442. newMin = INT32_MIN;
  9443. newMax = INT32_MAX;
  9444. break;
  9445. }
  9446. }
  9447. } while(false);
  9448. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  9449. {
  9450. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, 1);
  9451. }
  9452. opcode = Js::OpCode::Add_I4;
  9453. if (!this->IsLoopPrePass())
  9454. {
  9455. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  9456. }
  9457. checkTypeSpecWorth = true;
  9458. break;
  9459. case Js::OpCode::Decr_A:
  9460. do // while(false)
  9461. {
  9462. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  9463. {
  9464. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  9465. return
  9466. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  9467. src1ValueInfo->IsIntBounded() &&
  9468. src1ValueInfo->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(1);
  9469. };
  9470. if (Int32Math::Dec(max, &newMax))
  9471. {
  9472. if(CannotOverflowBasedOnRelativeBounds())
  9473. {
  9474. newMax = INT32_MIN;
  9475. }
  9476. else if(instr->ShouldCheckForIntOverflow())
  9477. {
  9478. // Always overflows
  9479. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9480. }
  9481. else
  9482. {
  9483. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  9484. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints, we
  9485. // use the full range of int32s.
  9486. ignoredIntOverflow = true;
  9487. newMin = INT32_MIN;
  9488. newMax = INT32_MAX;
  9489. break;
  9490. }
  9491. }
  9492. if (Int32Math::Dec(min, &newMin))
  9493. {
  9494. if(CannotOverflowBasedOnRelativeBounds())
  9495. {
  9496. newMin = INT32_MIN;
  9497. }
  9498. else if(instr->ShouldCheckForIntOverflow())
  9499. {
  9500. if(!DoAggressiveIntTypeSpec())
  9501. {
  9502. // May overflow
  9503. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9504. }
  9505. bailOutKind |= IR::BailOutOnOverflow;
  9506. newMin = INT32_MIN;
  9507. }
  9508. else
  9509. {
  9510. // See comment about ignoring overflow above
  9511. ignoredIntOverflow = true;
  9512. newMin = INT32_MIN;
  9513. newMax = INT32_MAX;
  9514. break;
  9515. }
  9516. }
  9517. } while(false);
  9518. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  9519. {
  9520. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, -1);
  9521. }
  9522. opcode = Js::OpCode::Sub_I4;
  9523. if (!this->IsLoopPrePass())
  9524. {
  9525. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  9526. }
  9527. checkTypeSpecWorth = true;
  9528. break;
  9529. case Js::OpCode::BrFalse_A:
  9530. case Js::OpCode::BrTrue_A:
  9531. {
  9532. if(DoConstFold() && !IsLoopPrePass() && TryOptConstFoldBrFalse(instr, src1Val, min, max))
  9533. {
  9534. return true;
  9535. }
  9536. bool specialize = true;
  9537. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  9538. {
  9539. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9540. if (this->IsInt32TypeSpecialized(sym, this->currentBlock) == false)
  9541. {
  9542. // Type specializing a BrTrue_A/BrFalse_A isn't worth it, unless the src
  9543. // is already type specialized
  9544. specialize = false;
  9545. }
  9546. }
  9547. if(instr->m_opcode == Js::OpCode::BrTrue_A)
  9548. {
  9549. UpdateIntBoundsForNotEqualBranch(src1Val, nullptr, 0);
  9550. opcode = Js::OpCode::BrTrue_I4;
  9551. }
  9552. else
  9553. {
  9554. UpdateIntBoundsForEqualBranch(src1Val, nullptr, 0);
  9555. opcode = Js::OpCode::BrFalse_I4;
  9556. }
  9557. if(!specialize)
  9558. {
  9559. return false;
  9560. }
  9561. newMin = 2; newMax = 1; // We'll assert if we make a range where min > max
  9562. break;
  9563. }
  9564. case Js::OpCode::MultiBr:
  9565. newMin = min;
  9566. newMax = max;
  9567. opcode = instr->m_opcode;
  9568. break;
  9569. case Js::OpCode::StElemI_A:
  9570. case Js::OpCode::StElemI_A_Strict:
  9571. case Js::OpCode::StElemC:
  9572. if(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyAnyArrayWithNativeFloatValues())
  9573. {
  9574. src1Val = src1OriginalVal;
  9575. }
  9576. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  9577. case Js::OpCode::NewScArray:
  9578. case Js::OpCode::NewScArrayWithMissingValues:
  9579. case Js::OpCode::InitFld:
  9580. case Js::OpCode::InitRootFld:
  9581. case Js::OpCode::StSlot:
  9582. case Js::OpCode::StSlotChkUndecl:
  9583. #if !FLOATVAR
  9584. case Js::OpCode::StSlotBoxTemp:
  9585. #endif
  9586. case Js::OpCode::StFld:
  9587. case Js::OpCode::StRootFld:
  9588. case Js::OpCode::StFldStrict:
  9589. case Js::OpCode::StRootFldStrict:
  9590. case Js::OpCode::ArgOut_A:
  9591. case Js::OpCode::ArgOut_A_Inline:
  9592. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  9593. case Js::OpCode::ArgOut_A_Dynamic:
  9594. case Js::OpCode::ArgOut_A_FromStackArgs:
  9595. case Js::OpCode::ArgOut_A_SpreadArg:
  9596. // For this one we need to implement type specialization
  9597. //case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9598. case Js::OpCode::Ret:
  9599. case Js::OpCode::LdElemUndef:
  9600. case Js::OpCode::LdElemUndefScoped:
  9601. return false;
  9602. default:
  9603. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9604. {
  9605. newMin = min;
  9606. newMax = max;
  9607. opcode = instr->m_opcode;
  9608. break; // Note: we must keep checkTypeSpecWorth = false to make sure we never return false from this function.
  9609. }
  9610. return false;
  9611. }
  9612. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it (won't
  9613. // leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value needs to be
  9614. // guaranteed to be an int
  9615. if(checkTypeSpecWorth &&
  9616. !ignoredIntOverflow &&
  9617. !ignoredNegativeZero &&
  9618. instr->ShouldCheckForIntOverflow() &&
  9619. !IsWorthSpecializingToInt32(instr, src1Val))
  9620. {
  9621. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  9622. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  9623. // the following cases.
  9624. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to be
  9625. // an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  9626. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  9627. // that case, especially if the dst sym is live on the back-edge.
  9628. if(bailOutKind == IR::BailOutInvalid &&
  9629. instr->GetDst() &&
  9630. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  9631. {
  9632. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, nullptr);
  9633. }
  9634. if(instr->GetSrc2())
  9635. {
  9636. instr->FreeSrc2();
  9637. }
  9638. return false;
  9639. }
  9640. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  9641. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  9642. {
  9643. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  9644. Value *src1IndirIndexVal = nullptr;
  9645. Value *src2Val = nullptr;
  9646. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  9647. {
  9648. *redoTypeSpecRef = true;
  9649. return false;
  9650. }
  9651. }
  9652. const Js::OpCode originalOpCode = instr->m_opcode;
  9653. if (!this->IsLoopPrePass())
  9654. {
  9655. // No re-write on prepass
  9656. instr->m_opcode = opcode;
  9657. }
  9658. Value *src1ValueToSpecialize = src1Val;
  9659. if(lossy)
  9660. {
  9661. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  9662. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  9663. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  9664. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  9665. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  9666. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  9667. src1ValueToSpecialize = src1OriginalVal;
  9668. }
  9669. // Make sure the srcs are specialized
  9670. IR::Opnd *src1 = instr->GetSrc1();
  9671. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, lossy);
  9672. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  9673. {
  9674. GenerateBailAtOperation(&instr, bailOutKind);
  9675. }
  9676. if (!skipDst)
  9677. {
  9678. IR::Opnd *dst = instr->GetDst();
  9679. if (dst)
  9680. {
  9681. AssertMsg(!(isTransfer && !this->IsLoopPrePass()) || min == newMin && max == newMax, "If this is just a copy, old/new min/max should be the same");
  9682. TypeSpecializeIntDst(
  9683. instr,
  9684. originalOpCode,
  9685. isTransfer ? src1Val : nullptr,
  9686. src1Val,
  9687. nullptr,
  9688. bailOutKind,
  9689. newMin,
  9690. newMax,
  9691. pDstVal,
  9692. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  9693. }
  9694. }
  9695. if(bailOutKind == IR::BailOutInvalid)
  9696. {
  9697. GOPT_TRACE(_u("Type specialized to INT\n"));
  9698. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9699. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9700. {
  9701. Output::Print(_u("Type specialized to INT: "));
  9702. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9703. }
  9704. #endif
  9705. }
  9706. else
  9707. {
  9708. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  9709. if(bailOutKind & IR::BailOutOnOverflow)
  9710. {
  9711. GOPT_TRACE(_u(" Overflow\n"));
  9712. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9713. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9714. {
  9715. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  9716. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9717. }
  9718. #endif
  9719. }
  9720. if(bailOutKind & IR::BailOutOnNegativeZero)
  9721. {
  9722. GOPT_TRACE(_u(" Zero\n"));
  9723. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9724. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9725. {
  9726. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  9727. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9728. }
  9729. #endif
  9730. }
  9731. }
  9732. return true;
  9733. }
  9734. void
  9735. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9736. {
  9737. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, ValueType::GetInt(IntConstantBounds(newMin, newMax).IsLikelyTaggable()), newMin, newMax, pDstVal, addSubConstantInfo);
  9738. }
  9739. void
  9740. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9741. {
  9742. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, valueType, 0, 0, pDstVal, addSubConstantInfo);
  9743. }
  9744. void
  9745. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9746. {
  9747. Assert(valueType.IsInt() || (valueType.IsNumber() && valueType.IsLikelyInt() && newMin == 0 && newMax == 0));
  9748. Assert(!valToTransfer || valToTransfer == src1Value);
  9749. Assert(!addSubConstantInfo || addSubConstantInfo->HasInfo());
  9750. IR::Opnd *dst = instr->GetDst();
  9751. Assert(dst);
  9752. bool isValueInfoPrecise;
  9753. if(IsLoopPrePass())
  9754. {
  9755. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  9756. }
  9757. else
  9758. {
  9759. isValueInfoPrecise = true;
  9760. }
  9761. // If dst has a circular reference in a loop, it probably won't get specialized. Don't mark the dst as type-specialized on
  9762. // the pre-pass. With aggressive int spec though, it will take care of bailing out if necessary so there's no need to assume
  9763. // that the dst will be a var even if it's live on the back-edge. Also if the op always produces an int32, then there's no
  9764. // ambiguity in the dst's value type even in the prepass.
  9765. if (!DoAggressiveIntTypeSpec() && this->IsLoopPrePass() && !valueType.IsInt())
  9766. {
  9767. if (dst->IsRegOpnd())
  9768. {
  9769. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  9770. }
  9771. return;
  9772. }
  9773. const IntBounds *dstBounds = nullptr;
  9774. if(addSubConstantInfo && !addSubConstantInfo->SrcValueIsLikelyConstant() && DoTrackRelativeIntBounds())
  9775. {
  9776. Assert(!ignoredIntOverflowForCurrentInstr);
  9777. // Track bounds for add or sub with a constant. For instance, consider (b = a + 2). The value of 'b' should track that
  9778. // it is equal to (the value of 'a') + 2. Additionally, the value of 'b' should inherit the bounds of 'a', offset by
  9779. // the constant value.
  9780. if(!valueType.IsInt() || !isValueInfoPrecise)
  9781. {
  9782. newMin = INT32_MIN;
  9783. newMax = INT32_MAX;
  9784. }
  9785. dstBounds =
  9786. IntBounds::Add(
  9787. addSubConstantInfo->SrcValue(),
  9788. addSubConstantInfo->Offset(),
  9789. isValueInfoPrecise,
  9790. IntConstantBounds(newMin, newMax),
  9791. alloc);
  9792. }
  9793. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  9794. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  9795. // pre-pass.
  9796. if (valToTransfer)
  9797. {
  9798. // If this is just a copy, no need for creating a new value.
  9799. Assert(!addSubConstantInfo);
  9800. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  9801. this->InsertNewValue(*pDstVal, dst);
  9802. }
  9803. else if (valueType.IsInt() && isValueInfoPrecise)
  9804. {
  9805. bool wasNegativeZeroPreventedByBailout = false;
  9806. if(newMin <= 0 && newMax >= 0)
  9807. {
  9808. switch(originalOpCode)
  9809. {
  9810. case Js::OpCode::Add_A:
  9811. // -0 + -0 == -0
  9812. Assert(src1Value);
  9813. Assert(src2Value);
  9814. wasNegativeZeroPreventedByBailout =
  9815. src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout() &&
  9816. src2Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  9817. break;
  9818. case Js::OpCode::Sub_A:
  9819. // -0 - 0 == -0
  9820. Assert(src1Value);
  9821. wasNegativeZeroPreventedByBailout = src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  9822. break;
  9823. case Js::OpCode::Neg_A:
  9824. case Js::OpCode::Mul_A:
  9825. case Js::OpCode::Div_A:
  9826. case Js::OpCode::Rem_A:
  9827. wasNegativeZeroPreventedByBailout = !!(bailOutKind & IR::BailOutOnNegativeZero);
  9828. break;
  9829. }
  9830. }
  9831. *pDstVal =
  9832. dstBounds
  9833. ? NewIntBoundedValue(valueType, dstBounds, wasNegativeZeroPreventedByBailout, nullptr)
  9834. : NewIntRangeValue(newMin, newMax, wasNegativeZeroPreventedByBailout, nullptr);
  9835. }
  9836. else
  9837. {
  9838. *pDstVal = dstBounds ? NewIntBoundedValue(valueType, dstBounds, false, nullptr) : NewGenericValue(valueType);
  9839. }
  9840. if(addSubConstantInfo || updateInductionVariableValueNumber)
  9841. {
  9842. TrackIntSpecializedAddSubConstant(instr, addSubConstantInfo, *pDstVal, !!dstBounds);
  9843. }
  9844. SetValue(&blockData, *pDstVal, dst);
  9845. AssertMsg(dst->IsRegOpnd(), "What else?");
  9846. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9847. }
  9848. bool
  9849. GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef)
  9850. {
  9851. IR::Instr *&instr = *pInstr;
  9852. int32 min1 = INT32_MIN, max1 = INT32_MAX, min2 = INT32_MIN, max2 = INT32_MAX, newMin, newMax, tmp;
  9853. Js::OpCode opcode;
  9854. IR::Opnd *src1, *src2;
  9855. Value *&src1Val = *pSrc1Val;
  9856. Value *&src2Val = *pSrc2Val;
  9857. // We don't need to do typespec for asmjs
  9858. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  9859. {
  9860. return false;
  9861. }
  9862. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9863. {
  9864. this->TypeSpecializeInlineBuiltInBinary(pInstr, src1Val, src2Val, pDstVal, src1OriginalVal, src2OriginalVal);
  9865. return true;
  9866. }
  9867. if (src1Val)
  9868. {
  9869. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  9870. }
  9871. if (src2Val)
  9872. {
  9873. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  9874. }
  9875. // Type specialize binary operators to int32
  9876. bool src1Lossy = true;
  9877. bool src2Lossy = true;
  9878. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  9879. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  9880. bool ignoredNegativeZero = false;
  9881. bool skipSrc2 = false;
  9882. bool skipDst = false;
  9883. bool needsBoolConv = false;
  9884. AddSubConstantInfo addSubConstantInfo;
  9885. switch (instr->m_opcode)
  9886. {
  9887. case Js::OpCode::Or_A:
  9888. if (!DoLossyIntTypeSpec())
  9889. {
  9890. return false;
  9891. }
  9892. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9893. opcode = Js::OpCode::Or_I4;
  9894. break;
  9895. case Js::OpCode::And_A:
  9896. if (!DoLossyIntTypeSpec())
  9897. {
  9898. return false;
  9899. }
  9900. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9901. opcode = Js::OpCode::And_I4;
  9902. break;
  9903. case Js::OpCode::Xor_A:
  9904. if (!DoLossyIntTypeSpec())
  9905. {
  9906. return false;
  9907. }
  9908. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9909. opcode = Js::OpCode::Xor_I4;
  9910. break;
  9911. case Js::OpCode::Shl_A:
  9912. if (!DoLossyIntTypeSpec())
  9913. {
  9914. return false;
  9915. }
  9916. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9917. opcode = Js::OpCode::Shl_I4;
  9918. break;
  9919. case Js::OpCode::Shr_A:
  9920. if (!DoLossyIntTypeSpec())
  9921. {
  9922. return false;
  9923. }
  9924. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9925. opcode = Js::OpCode::Shr_I4;
  9926. break;
  9927. case Js::OpCode::ShrU_A:
  9928. if (!DoLossyIntTypeSpec())
  9929. {
  9930. return false;
  9931. }
  9932. if (min1 < 0 && IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  9933. {
  9934. // Src1 may be too large to represent as a signed int32, and src2 may be zero. Unless the resulting value is only
  9935. // used as a signed int32 (hence allowing us to ignore the result's sign), don't specialize the instruction.
  9936. if (!instr->ignoreIntOverflow)
  9937. return false;
  9938. ignoredIntOverflow = true;
  9939. }
  9940. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9941. opcode = Js::OpCode::ShrU_I4;
  9942. break;
  9943. case Js::OpCode::BrUnLe_A:
  9944. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9945. // int already, so require that both sources are likely int for folding.
  9946. if (DoConstFold() &&
  9947. !IsLoopPrePass() &&
  9948. TryOptConstFoldBrUnsignedGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  9949. {
  9950. return true;
  9951. }
  9952. if (min1 >= 0 && min2 >= 0)
  9953. {
  9954. // Only handle positive values since this is unsigned...
  9955. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  9956. // (INT32_MIN, INT32_MAX), so we're good.
  9957. Assert(src1Val);
  9958. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  9959. Assert(src2Val);
  9960. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  9961. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  9962. }
  9963. if (!DoLossyIntTypeSpec())
  9964. {
  9965. return false;
  9966. }
  9967. newMin = newMax = 0;
  9968. opcode = Js::OpCode::BrUnLe_I4;
  9969. break;
  9970. case Js::OpCode::BrUnLt_A:
  9971. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9972. // int already, so require that both sources are likely int for folding.
  9973. if (DoConstFold() &&
  9974. !IsLoopPrePass() &&
  9975. TryOptConstFoldBrUnsignedLessThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  9976. {
  9977. return true;
  9978. }
  9979. if (min1 >= 0 && min2 >= 0)
  9980. {
  9981. // Only handle positive values since this is unsigned...
  9982. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  9983. // (INT32_MIN, INT32_MAX), so we're good.
  9984. Assert(src1Val);
  9985. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  9986. Assert(src2Val);
  9987. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  9988. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  9989. }
  9990. if (!DoLossyIntTypeSpec())
  9991. {
  9992. return false;
  9993. }
  9994. newMin = newMax = 0;
  9995. opcode = Js::OpCode::BrUnLt_I4;
  9996. break;
  9997. case Js::OpCode::BrUnGe_A:
  9998. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9999. // int already, so require that both sources are likely int for folding.
  10000. if (DoConstFold() &&
  10001. !IsLoopPrePass() &&
  10002. TryOptConstFoldBrUnsignedLessThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10003. {
  10004. return true;
  10005. }
  10006. if (min1 >= 0 && min2 >= 0)
  10007. {
  10008. // Only handle positive values since this is unsigned...
  10009. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  10010. // (INT32_MIN, INT32_MAX), so we're good.
  10011. Assert(src1Val);
  10012. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  10013. Assert(src2Val);
  10014. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  10015. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  10016. }
  10017. if (!DoLossyIntTypeSpec())
  10018. {
  10019. return false;
  10020. }
  10021. newMin = newMax = 0;
  10022. opcode = Js::OpCode::BrUnGe_I4;
  10023. break;
  10024. case Js::OpCode::BrUnGt_A:
  10025. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  10026. // int already, so require that both sources are likely int for folding.
  10027. if (DoConstFold() &&
  10028. !IsLoopPrePass() &&
  10029. TryOptConstFoldBrUnsignedGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10030. {
  10031. return true;
  10032. }
  10033. if (min1 >= 0 && min2 >= 0)
  10034. {
  10035. // Only handle positive values since this is unsigned...
  10036. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  10037. // (INT32_MIN, INT32_MAX), so we're good.
  10038. Assert(src1Val);
  10039. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  10040. Assert(src2Val);
  10041. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  10042. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  10043. }
  10044. if (!DoLossyIntTypeSpec())
  10045. {
  10046. return false;
  10047. }
  10048. newMin = newMax = 0;
  10049. opcode = Js::OpCode::BrUnGt_I4;
  10050. break;
  10051. case Js::OpCode::CmUnLe_A:
  10052. if (!DoLossyIntTypeSpec())
  10053. {
  10054. return false;
  10055. }
  10056. newMin = 0;
  10057. newMax = 1;
  10058. opcode = Js::OpCode::CmUnLe_I4;
  10059. needsBoolConv = true;
  10060. break;
  10061. case Js::OpCode::CmUnLt_A:
  10062. if (!DoLossyIntTypeSpec())
  10063. {
  10064. return false;
  10065. }
  10066. newMin = 0;
  10067. newMax = 1;
  10068. opcode = Js::OpCode::CmUnLt_I4;
  10069. needsBoolConv = true;
  10070. break;
  10071. case Js::OpCode::CmUnGe_A:
  10072. if (!DoLossyIntTypeSpec())
  10073. {
  10074. return false;
  10075. }
  10076. newMin = 0;
  10077. newMax = 1;
  10078. opcode = Js::OpCode::CmUnGe_I4;
  10079. needsBoolConv = true;
  10080. break;
  10081. case Js::OpCode::CmUnGt_A:
  10082. if (!DoLossyIntTypeSpec())
  10083. {
  10084. return false;
  10085. }
  10086. newMin = 0;
  10087. newMax = 1;
  10088. opcode = Js::OpCode::CmUnGt_I4;
  10089. needsBoolConv = true;
  10090. break;
  10091. case Js::OpCode::Expo_A:
  10092. {
  10093. src1Val = src1OriginalVal;
  10094. src2Val = src2OriginalVal;
  10095. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10096. }
  10097. case Js::OpCode::Div_A:
  10098. {
  10099. ValueType specializedValueType = GetDivValueType(instr, src1Val, src2Val, true);
  10100. if (specializedValueType.IsFloat())
  10101. {
  10102. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  10103. // Note: We should really constant fold cst1%cst2...
  10104. src1Val = src1OriginalVal;
  10105. src2Val = src2OriginalVal;
  10106. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10107. }
  10108. #ifdef _M_ARM
  10109. if (!AutoSystemInfo::Data.ArmDivAvailable())
  10110. {
  10111. return false;
  10112. }
  10113. #endif
  10114. if (specializedValueType.IsInt())
  10115. {
  10116. if (max2 == 0x80000000 || (min2 == 0 && max2 == 00))
  10117. {
  10118. return false;
  10119. }
  10120. if (min1 == 0x80000000 && min2 <= -1 && max2 >= -1)
  10121. {
  10122. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  10123. // Or we know we are dividing by zero (which is weird to have because the profile data
  10124. // say we got an int)
  10125. bailOutKind = IR::BailOutOnDivOfMinInt;
  10126. }
  10127. src1Lossy = false; // Detect -0 on the sources
  10128. src2Lossy = false;
  10129. opcode = Js::OpCode::Div_I4;
  10130. bailOutKind |= IR::BailOnDivResultNotInt;
  10131. if (max2 >= 0 && min2 <= 0)
  10132. {
  10133. // Need to check for divide by zero if the denominator range includes 0
  10134. bailOutKind |= IR::BailOutOnDivByZero;
  10135. }
  10136. if (max1 >= 0 && min1 <= 0)
  10137. {
  10138. // Numerator contains 0 so the result contains 0
  10139. newMin = 0;
  10140. newMax = 0;
  10141. if (min2 < 0)
  10142. {
  10143. // Denominator may be negative, so the result could be negative 0
  10144. if (instr->ShouldCheckForNegativeZero())
  10145. {
  10146. bailOutKind |= IR::BailOutOnNegativeZero;
  10147. }
  10148. else
  10149. {
  10150. ignoredNegativeZero = true;
  10151. }
  10152. }
  10153. }
  10154. else
  10155. {
  10156. // Initialize to invalid value, one of the condition below will update it correctly
  10157. newMin = INT_MAX;
  10158. newMax = INT_MIN;
  10159. }
  10160. // Deal with the positive and negative range separately for both the numerator and the denominator,
  10161. // and integrate to the overall min and max.
  10162. // If the result is positive (positive/positive or negative/negative):
  10163. // The min should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  10164. // divided by ---------------------------------------------------------------
  10165. // largest magnitude denominator (positive_Max2 | negative_Min2)
  10166. //
  10167. // The max should be the largest magnitude numerator (positive_Max1 | negative_Max1)
  10168. // divided by ---------------------------------------------------------------
  10169. // smallest magnitude denominator (positive_Min2 | negative_Max2)
  10170. // If the result is negative (positive/negative or positive/negative):
  10171. // The min should be the largest magnitude numerator (positive_Max1 | negative_Min1)
  10172. // divided by ---------------------------------------------------------------
  10173. // smallest magnitude denominator (negative_Max2 | positive_Min2)
  10174. //
  10175. // The max should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  10176. // divided by ---------------------------------------------------------------
  10177. // largest magnitude denominator (negative_Min2 | positive_Max2)
  10178. // Consider: The range can be slightly more precise if we take care of the rounding
  10179. if (max1 > 0)
  10180. {
  10181. // Take only the positive numerator range
  10182. int32 positive_Min1 = max(1, min1);
  10183. int32 positive_Max1 = max1;
  10184. if (max2 > 0)
  10185. {
  10186. // Take only the positive denominator range
  10187. int32 positive_Min2 = max(1, min2);
  10188. int32 positive_Max2 = max2;
  10189. // Positive / Positive
  10190. int32 quadrant1_Min = positive_Min1 <= positive_Max2? 1 : positive_Min1 / positive_Max2;
  10191. int32 quadrant1_Max = positive_Max1 <= positive_Min2? 1 : positive_Max1 / positive_Min2;
  10192. Assert(1 <= quadrant1_Min && quadrant1_Min <= quadrant1_Max);
  10193. // The result should positive
  10194. newMin = min(newMin, quadrant1_Min);
  10195. newMax = max(newMax, quadrant1_Max);
  10196. }
  10197. if (min2 < 0)
  10198. {
  10199. // Take only the negative denominator range
  10200. int32 negative_Min2 = min2;
  10201. int32 negative_Max2 = min(-1, max2);
  10202. // Positive / Negative
  10203. int32 quadrant2_Min = -positive_Max1 >= negative_Max2? -1 : positive_Max1 / negative_Max2;
  10204. int32 quadrant2_Max = -positive_Min1 >= negative_Min2? -1 : positive_Min1 / negative_Min2;
  10205. // The result should negative
  10206. Assert(quadrant2_Min <= quadrant2_Max && quadrant2_Max <= -1);
  10207. newMin = min(newMin, quadrant2_Min);
  10208. newMax = max(newMax, quadrant2_Max);
  10209. }
  10210. }
  10211. if (min1 < 0)
  10212. {
  10213. // Take only the native numerator range
  10214. int32 negative_Min1 = min1;
  10215. int32 negative_Max1 = min(-1, max1);
  10216. if (max2 > 0)
  10217. {
  10218. // Take only the positive denominator range
  10219. int32 positive_Min2 = max(1, min2);
  10220. int32 positive_Max2 = max2;
  10221. // Negative / Positive
  10222. int32 quadrant4_Min = negative_Min1 >= -positive_Min2? -1 : negative_Min1 / positive_Min2;
  10223. int32 quadrant4_Max = negative_Max1 >= -positive_Max2? -1 : negative_Max1 / positive_Max2;
  10224. // The result should negative
  10225. Assert(quadrant4_Min <= quadrant4_Max && quadrant4_Max <= -1);
  10226. newMin = min(newMin, quadrant4_Min);
  10227. newMax = max(newMax, quadrant4_Max);
  10228. }
  10229. if (min2 < 0)
  10230. {
  10231. // Take only the negative denominator range
  10232. int32 negative_Min2 = min2;
  10233. int32 negative_Max2 = min(-1, max2);
  10234. int32 quadrant3_Min;
  10235. int32 quadrant3_Max;
  10236. // Negative / Negative
  10237. if (negative_Max1 == 0x80000000 && negative_Min2 == -1)
  10238. {
  10239. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : (negative_Max1+1) / negative_Min2;
  10240. }
  10241. else
  10242. {
  10243. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : negative_Max1 / negative_Min2;
  10244. }
  10245. if (negative_Min1 == 0x80000000 && negative_Max2 == -1)
  10246. {
  10247. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : (negative_Min1+1) / negative_Max2;
  10248. }
  10249. else
  10250. {
  10251. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : negative_Min1 / negative_Max2;
  10252. }
  10253. // The result should positive
  10254. Assert(1 <= quadrant3_Min && quadrant3_Min <= quadrant3_Max);
  10255. newMin = min(newMin, quadrant3_Min);
  10256. newMax = max(newMax, quadrant3_Max);
  10257. }
  10258. }
  10259. Assert(newMin <= newMax);
  10260. // Continue to int type spec
  10261. break;
  10262. }
  10263. }
  10264. // fall-through
  10265. default:
  10266. {
  10267. const bool involesLargeInt32 =
  10268. (src1Val && src1Val->GetValueInfo()->IsLikelyUntaggedInt()) ||
  10269. (src2Val && src2Val->GetValueInfo()->IsLikelyUntaggedInt());
  10270. const auto trySpecializeToFloat =
  10271. [&](const bool mayOverflow) -> bool
  10272. {
  10273. // It has been determined that this instruction cannot be int-specialized. Need to determine whether to attempt
  10274. // to float-specialize the instruction, or leave it unspecialized.
  10275. if((involesLargeInt32
  10276. #if INT32VAR
  10277. && mayOverflow
  10278. #endif
  10279. ) || (instr->m_opcode == Js::OpCode::Mul_A && !this->DoAggressiveMulIntTypeSpec())
  10280. )
  10281. {
  10282. // An input range is completely outside the range of an int31 and the operation is likely to overflow.
  10283. // Additionally, on 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is
  10284. // significantly slower to use in an unspecialized operation compared to a tagged int. So, try to
  10285. // float-specialize the instruction.
  10286. src1Val = src1OriginalVal;
  10287. src2Val = src2OriginalVal;
  10288. return TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10289. }
  10290. return false;
  10291. };
  10292. if (instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  10293. {
  10294. if ((src1Val && src1Val->GetValueInfo()->IsLikelyFloat()) || (src2Val && src2Val->GetValueInfo()->IsLikelyFloat()))
  10295. {
  10296. // Try to type specialize to float
  10297. src1Val = src1OriginalVal;
  10298. src2Val = src2OriginalVal;
  10299. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10300. }
  10301. if (src1Val == nullptr ||
  10302. src2Val == nullptr ||
  10303. !src1Val->GetValueInfo()->IsLikelyInt() ||
  10304. !src2Val->GetValueInfo()->IsLikelyInt() ||
  10305. (
  10306. !DoAggressiveIntTypeSpec() &&
  10307. (
  10308. !(src1Val->GetValueInfo()->IsInt() || IsSwitchInt32TypeSpecialized(instr, currentBlock)) ||
  10309. !src2Val->GetValueInfo()->IsInt()
  10310. )
  10311. ) ||
  10312. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt) ||
  10313. (instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->m_isNotInt))
  10314. {
  10315. return trySpecializeToFloat(true);
  10316. }
  10317. }
  10318. // Try to type specialize to int32
  10319. // If one of the values is a float constant with a value that fits in a uint32 but not an int32,
  10320. // and the instruction can ignore int overflow, the source value for the purposes of int specialization
  10321. // would have been changed to an int constant value by ignoring overflow. But, the conversion is still lossy.
  10322. if (!(src1OriginalVal && src1OriginalVal->GetValueInfo()->IsFloatConstant() && src1Val && src1Val->GetValueInfo()->HasIntConstantValue()))
  10323. {
  10324. src1Lossy = false;
  10325. }
  10326. if (!(src2OriginalVal && src2OriginalVal->GetValueInfo()->IsFloatConstant() && src2Val && src2Val->GetValueInfo()->HasIntConstantValue()))
  10327. {
  10328. src2Lossy = false;
  10329. }
  10330. switch(instr->m_opcode)
  10331. {
  10332. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  10333. // If the src is already type-specialized, if we don't type-specialize ArgOut_A_InlineBuiltIn instr, we'll get additional ToVar.
  10334. // So, to avoid that, type-specialize the ArgOut_A_InlineBuiltIn instr.
  10335. // Else we don't need to type-specialize the instr, we are fine with src being Var.
  10336. if (instr->GetSrc1()->IsRegOpnd())
  10337. {
  10338. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  10339. if (this->IsInt32TypeSpecialized(sym, this->currentBlock))
  10340. {
  10341. opcode = instr->m_opcode;
  10342. skipDst = true; // We should keep dst as is, otherwise the link opnd for next ArgOut/InlineBuiltInStart would be broken.
  10343. skipSrc2 = true; // src2 is linkOpnd. We don't need to type-specialize it.
  10344. newMin = min1; newMax = max1; // Values don't matter, these are unused.
  10345. goto LOutsideSwitch; // Continue to int-type-specialize.
  10346. }
  10347. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  10348. {
  10349. src1Val = src1OriginalVal;
  10350. src2Val = src2OriginalVal;
  10351. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10352. }
  10353. else if (this->IsSimd128F4TypeSpecialized(sym, this->currentBlock))
  10354. {
  10355. // SIMD_JS
  10356. // We should be already using the SIMD type-spec sym. See TypeSpecializeSimd128.
  10357. Assert(IRType_IsSimd128(instr->GetSrc1()->GetType()));
  10358. }
  10359. }
  10360. return false;
  10361. case Js::OpCode::Add_A:
  10362. do // while(false)
  10363. {
  10364. const auto CannotOverflowBasedOnRelativeBounds = [&](int32 *const constantValueRef)
  10365. {
  10366. Assert(constantValueRef);
  10367. if(min2 == max2 &&
  10368. src1Val->GetValueInfo()->IsIntBounded() &&
  10369. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min2))
  10370. {
  10371. *constantValueRef = min2;
  10372. return true;
  10373. }
  10374. else if(
  10375. min1 == max1 &&
  10376. src2Val->GetValueInfo()->IsIntBounded() &&
  10377. src2Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min1))
  10378. {
  10379. *constantValueRef = min1;
  10380. return true;
  10381. }
  10382. return false;
  10383. };
  10384. if (Int32Math::Add(min1, min2, &newMin))
  10385. {
  10386. int32 constantSrcValue;
  10387. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  10388. {
  10389. newMin = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  10390. }
  10391. else if(instr->ShouldCheckForIntOverflow())
  10392. {
  10393. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10394. {
  10395. // May overflow
  10396. return trySpecializeToFloat(true);
  10397. }
  10398. bailOutKind |= IR::BailOutOnOverflow;
  10399. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  10400. }
  10401. else
  10402. {
  10403. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since
  10404. // overflow causes the value to wrap around, and we don't have a way to specify a lower and upper
  10405. // range of ints, we use the full range of int32s.
  10406. ignoredIntOverflow = true;
  10407. newMin = INT32_MIN;
  10408. newMax = INT32_MAX;
  10409. break;
  10410. }
  10411. }
  10412. if (Int32Math::Add(max1, max2, &newMax))
  10413. {
  10414. int32 constantSrcValue;
  10415. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  10416. {
  10417. newMax = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  10418. }
  10419. else if(instr->ShouldCheckForIntOverflow())
  10420. {
  10421. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10422. {
  10423. // May overflow
  10424. return trySpecializeToFloat(true);
  10425. }
  10426. bailOutKind |= IR::BailOutOnOverflow;
  10427. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  10428. }
  10429. else
  10430. {
  10431. // See comment about ignoring overflow above
  10432. ignoredIntOverflow = true;
  10433. newMin = INT32_MIN;
  10434. newMax = INT32_MAX;
  10435. break;
  10436. }
  10437. }
  10438. if(bailOutKind & IR::BailOutOnOverflow)
  10439. {
  10440. Assert(bailOutKind == IR::BailOutOnOverflow);
  10441. Assert(instr->ShouldCheckForIntOverflow());
  10442. int32 temp;
  10443. if(Int32Math::Add(
  10444. Int32Math::NearestInRangeTo(0, min1, max1),
  10445. Int32Math::NearestInRangeTo(0, min2, max2),
  10446. &temp))
  10447. {
  10448. // Always overflows
  10449. return trySpecializeToFloat(true);
  10450. }
  10451. }
  10452. } while(false);
  10453. if (!this->IsLoopPrePass() && newMin == newMax && bailOutKind == IR::BailOutInvalid)
  10454. {
  10455. // Take care of Add with zero here, since we know we're dealing with 2 numbers.
  10456. this->CaptureByteCodeSymUses(instr);
  10457. IR::Opnd *src;
  10458. bool isAddZero = true;
  10459. int32 intConstantValue;
  10460. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  10461. {
  10462. src = instr->UnlinkSrc2();
  10463. instr->FreeSrc1();
  10464. }
  10465. else if (src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  10466. {
  10467. src = instr->UnlinkSrc1();
  10468. instr->FreeSrc2();
  10469. }
  10470. else
  10471. {
  10472. // This should have been handled by const folding, unless:
  10473. // - A source's value was substituted with a different value here, which is after const folding happened
  10474. // - A value is not definitely int, but once converted to definite int, it would be zero due to a
  10475. // condition in the source code such as if(a === 0). Ideally, we would specialize the sources and
  10476. // remove the add, but doesn't seem too important for now.
  10477. Assert(
  10478. !DoConstFold() ||
  10479. src1Val != src1OriginalVal ||
  10480. src2Val != src2OriginalVal ||
  10481. !src1Val->GetValueInfo()->IsInt() ||
  10482. !src2Val->GetValueInfo()->IsInt());
  10483. isAddZero = false;
  10484. src = nullptr;
  10485. }
  10486. if (isAddZero)
  10487. {
  10488. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Ld_A, instr->UnlinkDst(), src, instr->m_func);
  10489. newInstr->SetByteCodeOffset(instr);
  10490. instr->m_opcode = Js::OpCode::Nop;
  10491. this->currentBlock->InsertInstrAfter(newInstr, instr);
  10492. return true;
  10493. }
  10494. }
  10495. if(!ignoredIntOverflow)
  10496. {
  10497. if(min2 == max2 &&
  10498. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  10499. instr->GetSrc1()->IsRegOpnd())
  10500. {
  10501. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, min2);
  10502. }
  10503. else if(
  10504. min1 == max1 &&
  10505. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val)) &&
  10506. instr->GetSrc2()->IsRegOpnd())
  10507. {
  10508. addSubConstantInfo.Set(instr->GetSrc2()->AsRegOpnd()->m_sym, src2Val, min2 == max2, min1);
  10509. }
  10510. }
  10511. opcode = Js::OpCode::Add_I4;
  10512. break;
  10513. case Js::OpCode::Sub_A:
  10514. do // while(false)
  10515. {
  10516. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  10517. {
  10518. return
  10519. min2 == max2 &&
  10520. src1Val->GetValueInfo()->IsIntBounded() &&
  10521. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(min2);
  10522. };
  10523. if (Int32Math::Sub(min1, max2, &newMin))
  10524. {
  10525. if(CannotOverflowBasedOnRelativeBounds())
  10526. {
  10527. Assert(min2 == max2);
  10528. newMin = min2 >= 0 ? INT32_MIN : INT32_MAX;
  10529. }
  10530. else if(instr->ShouldCheckForIntOverflow())
  10531. {
  10532. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10533. {
  10534. // May overflow
  10535. return trySpecializeToFloat(true);
  10536. }
  10537. bailOutKind |= IR::BailOutOnOverflow;
  10538. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  10539. }
  10540. else
  10541. {
  10542. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  10543. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  10544. // we use the full range of int32s.
  10545. ignoredIntOverflow = true;
  10546. newMin = INT32_MIN;
  10547. newMax = INT32_MAX;
  10548. break;
  10549. }
  10550. }
  10551. if (Int32Math::Sub(max1, min2, &newMax))
  10552. {
  10553. if(CannotOverflowBasedOnRelativeBounds())
  10554. {
  10555. Assert(min2 == max2);
  10556. newMax = min2 >= 0 ? INT32_MIN: INT32_MAX;
  10557. }
  10558. else if(instr->ShouldCheckForIntOverflow())
  10559. {
  10560. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10561. {
  10562. // May overflow
  10563. return trySpecializeToFloat(true);
  10564. }
  10565. bailOutKind |= IR::BailOutOnOverflow;
  10566. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  10567. }
  10568. else
  10569. {
  10570. // See comment about ignoring overflow above
  10571. ignoredIntOverflow = true;
  10572. newMin = INT32_MIN;
  10573. newMax = INT32_MAX;
  10574. break;
  10575. }
  10576. }
  10577. if(bailOutKind & IR::BailOutOnOverflow)
  10578. {
  10579. Assert(bailOutKind == IR::BailOutOnOverflow);
  10580. Assert(instr->ShouldCheckForIntOverflow());
  10581. int32 temp;
  10582. if(Int32Math::Sub(
  10583. Int32Math::NearestInRangeTo(-1, min1, max1),
  10584. Int32Math::NearestInRangeTo(0, min2, max2),
  10585. &temp))
  10586. {
  10587. // Always overflows
  10588. return trySpecializeToFloat(true);
  10589. }
  10590. }
  10591. } while(false);
  10592. if(!ignoredIntOverflow &&
  10593. min2 == max2 &&
  10594. min2 != INT32_MIN &&
  10595. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  10596. instr->GetSrc1()->IsRegOpnd())
  10597. {
  10598. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, -min2);
  10599. }
  10600. opcode = Js::OpCode::Sub_I4;
  10601. break;
  10602. case Js::OpCode::Mul_A:
  10603. {
  10604. if (Int32Math::Mul(min1, min2, &newMin))
  10605. {
  10606. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10607. {
  10608. // May overflow
  10609. return trySpecializeToFloat(true);
  10610. }
  10611. bailOutKind |= IR::BailOutOnMulOverflow;
  10612. newMin = (min1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  10613. }
  10614. newMax = newMin;
  10615. if (Int32Math::Mul(max1, max2, &tmp))
  10616. {
  10617. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10618. {
  10619. // May overflow
  10620. return trySpecializeToFloat(true);
  10621. }
  10622. bailOutKind |= IR::BailOutOnMulOverflow;
  10623. tmp = (max1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  10624. }
  10625. newMin = min(newMin, tmp);
  10626. newMax = max(newMax, tmp);
  10627. if (Int32Math::Mul(min1, max2, &tmp))
  10628. {
  10629. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10630. {
  10631. // May overflow
  10632. return trySpecializeToFloat(true);
  10633. }
  10634. bailOutKind |= IR::BailOutOnMulOverflow;
  10635. tmp = (min1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  10636. }
  10637. newMin = min(newMin, tmp);
  10638. newMax = max(newMax, tmp);
  10639. if (Int32Math::Mul(max1, min2, &tmp))
  10640. {
  10641. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10642. {
  10643. // May overflow
  10644. return trySpecializeToFloat(true);
  10645. }
  10646. bailOutKind |= IR::BailOutOnMulOverflow;
  10647. tmp = (max1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  10648. }
  10649. newMin = min(newMin, tmp);
  10650. newMax = max(newMax, tmp);
  10651. if (bailOutKind & IR::BailOutOnMulOverflow)
  10652. {
  10653. // CSE only if two MULs have the same overflow check behavior.
  10654. // Currently this is set to be ignore int32 overflow, but not 53-bit, or int32 overflow matters.
  10655. if (!instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  10656. {
  10657. // If we allow int to overflow then there can be anything in the resulting int
  10658. newMin = IntConstMin;
  10659. newMax = IntConstMax;
  10660. ignoredIntOverflow = true;
  10661. }
  10662. int32 temp, overflowValue;
  10663. if (Int32Math::Mul(
  10664. Int32Math::NearestInRangeTo(0, min1, max1),
  10665. Int32Math::NearestInRangeTo(0, min2, max2),
  10666. &temp,
  10667. &overflowValue))
  10668. {
  10669. Assert(instr->ignoreOverflowBitCount >= 32);
  10670. int overflowMatters = 64 - instr->ignoreOverflowBitCount;
  10671. if (!ignoredIntOverflow ||
  10672. // Use shift to check high bits in case its negative
  10673. ((overflowValue << overflowMatters) >> overflowMatters) != overflowValue
  10674. )
  10675. {
  10676. // Always overflows
  10677. return trySpecializeToFloat(true);
  10678. }
  10679. }
  10680. }
  10681. if (newMin <= 0 && newMax >= 0 && // New range crosses zero
  10682. (min1 < 0 || min2 < 0) && // An operand's range contains a negative integer
  10683. !(min1 > 0 || min2 > 0) && // Neither operand's range contains only positive integers
  10684. !instr->GetSrc1()->IsEqual(instr->GetSrc2())) // The operands don't have the same value
  10685. {
  10686. if (instr->ShouldCheckForNegativeZero())
  10687. {
  10688. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  10689. if (!DoAggressiveIntTypeSpec())
  10690. {
  10691. // May result in -0
  10692. return trySpecializeToFloat(false);
  10693. }
  10694. if (((min1 == 0 && max1 == 0) || (min2 == 0 && max2 == 0)) && (max1 < 0 || max2 < 0))
  10695. {
  10696. // Always results in -0
  10697. return trySpecializeToFloat(false);
  10698. }
  10699. bailOutKind |= IR::BailOutOnNegativeZero;
  10700. }
  10701. else
  10702. {
  10703. ignoredNegativeZero = true;
  10704. }
  10705. }
  10706. opcode = Js::OpCode::Mul_I4;
  10707. break;
  10708. }
  10709. case Js::OpCode::Rem_A:
  10710. {
  10711. src2 = instr->GetSrc2();
  10712. if (!this->IsLoopPrePass() && min2 == max2 && min1 >= 0)
  10713. {
  10714. int32 value = min2;
  10715. if (value == (1 << Math::Log2(value)) && src2->IsAddrOpnd())
  10716. {
  10717. Assert(src2->AsAddrOpnd()->IsVar());
  10718. instr->m_opcode = Js::OpCode::And_A;
  10719. src2->AsAddrOpnd()->SetAddress(Js::TaggedInt::ToVarUnchecked(value - 1),
  10720. IR::AddrOpndKindConstantVar);
  10721. *pSrc2Val = GetIntConstantValue(value - 1, instr);
  10722. src2Val = *pSrc2Val;
  10723. return this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2Val, redoTypeSpecRef);
  10724. }
  10725. }
  10726. #ifdef _M_ARM
  10727. if (!AutoSystemInfo::Data.ArmDivAvailable())
  10728. {
  10729. return false;
  10730. }
  10731. #endif
  10732. if (min1 < 0)
  10733. {
  10734. // The most negative it can be is min1, unless limited by min2/max2
  10735. int32 negMaxAbs2;
  10736. if (min2 == INT32_MIN)
  10737. {
  10738. negMaxAbs2 = INT32_MIN;
  10739. }
  10740. else
  10741. {
  10742. negMaxAbs2 = -max(abs(min2), abs(max2)) + 1;
  10743. }
  10744. newMin = max(min1, negMaxAbs2);
  10745. }
  10746. else
  10747. {
  10748. newMin = 0;
  10749. }
  10750. bool isModByPowerOf2 = (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo() &&
  10751. instr->m_func->GetReadOnlyProfileInfo()->IsModulusOpByPowerOf2(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId)));
  10752. if(isModByPowerOf2)
  10753. {
  10754. Assert(bailOutKind == IR::BailOutInvalid);
  10755. bailOutKind = IR::BailOnModByPowerOf2;
  10756. newMin = 0;
  10757. }
  10758. else
  10759. {
  10760. if (min2 <= 0 && max2 >= 0)
  10761. {
  10762. // Consider: We could handle the zero case with a check and bailout...
  10763. return false;
  10764. }
  10765. if (min1 == 0x80000000 && (min2 <= -1 && max2 >= -1))
  10766. {
  10767. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  10768. return false;
  10769. }
  10770. if (min1 < 0)
  10771. {
  10772. if(instr->ShouldCheckForNegativeZero())
  10773. {
  10774. if (!DoAggressiveIntTypeSpec())
  10775. {
  10776. return false;
  10777. }
  10778. bailOutKind |= IR::BailOutOnNegativeZero;
  10779. }
  10780. else
  10781. {
  10782. ignoredNegativeZero = true;
  10783. }
  10784. }
  10785. }
  10786. {
  10787. int32 absMax2;
  10788. if (min2 == INT32_MIN)
  10789. {
  10790. // abs(INT32_MIN) == INT32_MAX because of overflow
  10791. absMax2 = INT32_MAX;
  10792. }
  10793. else
  10794. {
  10795. absMax2 = max(abs(min2), abs(max2)) - 1;
  10796. }
  10797. newMax = min(absMax2, max(max1, 0));
  10798. newMax = max(newMin, newMax);
  10799. }
  10800. opcode = Js::OpCode::Rem_I4;
  10801. break;
  10802. }
  10803. case Js::OpCode::CmEq_A:
  10804. case Js::OpCode::CmSrEq_A:
  10805. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10806. {
  10807. return false;
  10808. }
  10809. newMin = 0;
  10810. newMax = 1;
  10811. opcode = Js::OpCode::CmEq_I4;
  10812. needsBoolConv = true;
  10813. break;
  10814. case Js::OpCode::CmNeq_A:
  10815. case Js::OpCode::CmSrNeq_A:
  10816. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10817. {
  10818. return false;
  10819. }
  10820. newMin = 0;
  10821. newMax = 1;
  10822. opcode = Js::OpCode::CmNeq_I4;
  10823. needsBoolConv = true;
  10824. break;
  10825. case Js::OpCode::CmLe_A:
  10826. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10827. {
  10828. return false;
  10829. }
  10830. newMin = 0;
  10831. newMax = 1;
  10832. opcode = Js::OpCode::CmLe_I4;
  10833. needsBoolConv = true;
  10834. break;
  10835. case Js::OpCode::CmLt_A:
  10836. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10837. {
  10838. return false;
  10839. }
  10840. newMin = 0;
  10841. newMax = 1;
  10842. opcode = Js::OpCode::CmLt_I4;
  10843. needsBoolConv = true;
  10844. break;
  10845. case Js::OpCode::CmGe_A:
  10846. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10847. {
  10848. return false;
  10849. }
  10850. newMin = 0;
  10851. newMax = 1;
  10852. opcode = Js::OpCode::CmGe_I4;
  10853. needsBoolConv = true;
  10854. break;
  10855. case Js::OpCode::CmGt_A:
  10856. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10857. {
  10858. return false;
  10859. }
  10860. newMin = 0;
  10861. newMax = 1;
  10862. opcode = Js::OpCode::CmGt_I4;
  10863. needsBoolConv = true;
  10864. break;
  10865. case Js::OpCode::BrSrEq_A:
  10866. case Js::OpCode::BrEq_A:
  10867. case Js::OpCode::BrNotNeq_A:
  10868. case Js::OpCode::BrSrNotNeq_A:
  10869. {
  10870. if(DoConstFold() &&
  10871. !IsLoopPrePass() &&
  10872. TryOptConstFoldBrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10873. {
  10874. return true;
  10875. }
  10876. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10877. UpdateIntBoundsForEqualBranch(src1Val, src2Val);
  10878. if(!specialize)
  10879. {
  10880. return false;
  10881. }
  10882. opcode = Js::OpCode::BrEq_I4;
  10883. // We'll get a warning if we don't assign a value to these...
  10884. // We'll assert if we use them and make a range where min > max
  10885. newMin = 2; newMax = 1;
  10886. break;
  10887. }
  10888. case Js::OpCode::BrSrNeq_A:
  10889. case Js::OpCode::BrNeq_A:
  10890. case Js::OpCode::BrSrNotEq_A:
  10891. case Js::OpCode::BrNotEq_A:
  10892. {
  10893. if(DoConstFold() &&
  10894. !IsLoopPrePass() &&
  10895. TryOptConstFoldBrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10896. {
  10897. return true;
  10898. }
  10899. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10900. UpdateIntBoundsForNotEqualBranch(src1Val, src2Val);
  10901. if(!specialize)
  10902. {
  10903. return false;
  10904. }
  10905. opcode = Js::OpCode::BrNeq_I4;
  10906. // We'll get a warning if we don't assign a value to these...
  10907. // We'll assert if we use them and make a range where min > max
  10908. newMin = 2; newMax = 1;
  10909. break;
  10910. }
  10911. case Js::OpCode::BrGt_A:
  10912. case Js::OpCode::BrNotLe_A:
  10913. {
  10914. if(DoConstFold() &&
  10915. !IsLoopPrePass() &&
  10916. TryOptConstFoldBrGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10917. {
  10918. return true;
  10919. }
  10920. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10921. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  10922. if(!specialize)
  10923. {
  10924. return false;
  10925. }
  10926. opcode = Js::OpCode::BrGt_I4;
  10927. // We'll get a warning if we don't assign a value to these...
  10928. // We'll assert if we use them and make a range where min > max
  10929. newMin = 2; newMax = 1;
  10930. break;
  10931. }
  10932. case Js::OpCode::BrGe_A:
  10933. case Js::OpCode::BrNotLt_A:
  10934. {
  10935. if(DoConstFold() &&
  10936. !IsLoopPrePass() &&
  10937. TryOptConstFoldBrGreaterThanOrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10938. {
  10939. return true;
  10940. }
  10941. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10942. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  10943. if(!specialize)
  10944. {
  10945. return false;
  10946. }
  10947. opcode = Js::OpCode::BrGe_I4;
  10948. // We'll get a warning if we don't assign a value to these...
  10949. // We'll assert if we use them and make a range where min > max
  10950. newMin = 2; newMax = 1;
  10951. break;
  10952. }
  10953. case Js::OpCode::BrLt_A:
  10954. case Js::OpCode::BrNotGe_A:
  10955. {
  10956. if(DoConstFold() &&
  10957. !IsLoopPrePass() &&
  10958. TryOptConstFoldBrGreaterThanOrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10959. {
  10960. return true;
  10961. }
  10962. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10963. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  10964. if(!specialize)
  10965. {
  10966. return false;
  10967. }
  10968. opcode = Js::OpCode::BrLt_I4;
  10969. // We'll get a warning if we don't assign a value to these...
  10970. // We'll assert if we use them and make a range where min > max
  10971. newMin = 2; newMax = 1;
  10972. break;
  10973. }
  10974. case Js::OpCode::BrLe_A:
  10975. case Js::OpCode::BrNotGt_A:
  10976. {
  10977. if(DoConstFold() &&
  10978. !IsLoopPrePass() &&
  10979. TryOptConstFoldBrGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10980. {
  10981. return true;
  10982. }
  10983. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10984. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  10985. if(!specialize)
  10986. {
  10987. return false;
  10988. }
  10989. opcode = Js::OpCode::BrLe_I4;
  10990. // We'll get a warning if we don't assign a value to these...
  10991. // We'll assert if we use them and make a range where min > max
  10992. newMin = 2; newMax = 1;
  10993. break;
  10994. }
  10995. default:
  10996. return false;
  10997. }
  10998. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it
  10999. // (won't leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value
  11000. // needs to be guaranteed to be an int
  11001. if(!ignoredIntOverflow &&
  11002. !ignoredNegativeZero &&
  11003. !needsBoolConv &&
  11004. instr->ShouldCheckForIntOverflow() &&
  11005. !IsWorthSpecializingToInt32(instr, src1Val, src2Val))
  11006. {
  11007. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  11008. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  11009. // the following cases.
  11010. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to
  11011. // be an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  11012. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  11013. // that case, especially if the dst sym is live on the back-edge.
  11014. if(bailOutKind == IR::BailOutInvalid &&
  11015. instr->GetDst() &&
  11016. src1Val->GetValueInfo()->IsInt() &&
  11017. src2Val->GetValueInfo()->IsInt() &&
  11018. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  11019. {
  11020. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  11021. }
  11022. return false;
  11023. }
  11024. } // case default
  11025. } // switch
  11026. LOutsideSwitch:
  11027. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  11028. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  11029. {
  11030. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  11031. Value *src1IndirIndexVal = nullptr;
  11032. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  11033. {
  11034. *redoTypeSpecRef = true;
  11035. return false;
  11036. }
  11037. }
  11038. const Js::OpCode originalOpCode = instr->m_opcode;
  11039. if (!this->IsLoopPrePass())
  11040. {
  11041. // No re-write on prepass
  11042. instr->m_opcode = opcode;
  11043. }
  11044. Value *src1ValueToSpecialize = src1Val, *src2ValueToSpecialize = src2Val;
  11045. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  11046. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  11047. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  11048. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  11049. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  11050. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  11051. if(src1Lossy)
  11052. {
  11053. src1ValueToSpecialize = src1OriginalVal;
  11054. }
  11055. if (src2Lossy)
  11056. {
  11057. src2ValueToSpecialize = src2OriginalVal;
  11058. }
  11059. // Make sure the srcs are specialized
  11060. src1 = instr->GetSrc1();
  11061. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, src1Lossy);
  11062. if (!skipSrc2)
  11063. {
  11064. src2 = instr->GetSrc2();
  11065. this->ToInt32(instr, src2, this->currentBlock, src2ValueToSpecialize, nullptr, src2Lossy);
  11066. }
  11067. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  11068. {
  11069. GenerateBailAtOperation(&instr, bailOutKind);
  11070. }
  11071. if (!skipDst && instr->GetDst())
  11072. {
  11073. if (needsBoolConv)
  11074. {
  11075. IR::RegOpnd *varDst;
  11076. if (this->IsLoopPrePass())
  11077. {
  11078. varDst = instr->GetDst()->AsRegOpnd();
  11079. this->ToVarRegOpnd(varDst, this->currentBlock);
  11080. }
  11081. else
  11082. {
  11083. // Generate:
  11084. // t1.i = CmCC t2.i, t3.i
  11085. // t1.v = Conv_bool t1.i
  11086. //
  11087. // If the only uses of t1 are ints, the conv_bool will get dead-stored
  11088. TypeSpecializeIntDst(instr, originalOpCode, nullptr, src1Val, src2Val, bailOutKind, newMin, newMax, pDstVal);
  11089. IR::RegOpnd *intDst = instr->GetDst()->AsRegOpnd();
  11090. intDst->SetIsJITOptimizedReg(true);
  11091. varDst = IR::RegOpnd::New(intDst->m_sym->GetVarEquivSym(this->func), TyVar, this->func);
  11092. IR::Instr *convBoolInstr = IR::Instr::New(Js::OpCode::Conv_Bool, varDst, intDst, this->func);
  11093. // In some cases (e.g. unsigned compare peep code), a comparison will use variables
  11094. // other than the ones initially intended for it, if we can determine that we would
  11095. // arrive at the same result. This means that we get a ByteCodeUses operation after
  11096. // the actual comparison. Since Inserting the Conv_bool just after the compare, and
  11097. // just before the ByteCodeUses, would cause issues later on with register lifetime
  11098. // calculation, we want to insert the Conv_bool after the whole compare instruction
  11099. // block.
  11100. IR::Instr *putAfter = instr;
  11101. while (putAfter->m_next && putAfter->m_next->m_opcode == Js::OpCode::ByteCodeUses)
  11102. {
  11103. putAfter = putAfter->m_next;
  11104. }
  11105. putAfter->InsertAfter(convBoolInstr);
  11106. convBoolInstr->SetByteCodeOffset(instr);
  11107. this->ToVarRegOpnd(varDst, this->currentBlock);
  11108. this->blockData.liveInt32Syms->Set(varDst->m_sym->m_id);
  11109. this->blockData.liveLossyInt32Syms->Set(varDst->m_sym->m_id);
  11110. }
  11111. *pDstVal = this->NewGenericValue(ValueType::Boolean, varDst);
  11112. }
  11113. else
  11114. {
  11115. TypeSpecializeIntDst(
  11116. instr,
  11117. originalOpCode,
  11118. nullptr,
  11119. src1Val,
  11120. src2Val,
  11121. bailOutKind,
  11122. newMin,
  11123. newMax,
  11124. pDstVal,
  11125. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  11126. }
  11127. }
  11128. if(bailOutKind == IR::BailOutInvalid)
  11129. {
  11130. GOPT_TRACE(_u("Type specialized to INT\n"));
  11131. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11132. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11133. {
  11134. Output::Print(_u("Type specialized to INT: "));
  11135. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11136. }
  11137. #endif
  11138. }
  11139. else
  11140. {
  11141. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  11142. if(bailOutKind & (IR::BailOutOnOverflow | IR::BailOutOnMulOverflow) )
  11143. {
  11144. GOPT_TRACE(_u(" Overflow\n"));
  11145. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11146. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11147. {
  11148. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  11149. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11150. }
  11151. #endif
  11152. }
  11153. if(bailOutKind & IR::BailOutOnNegativeZero)
  11154. {
  11155. GOPT_TRACE(_u(" Zero\n"));
  11156. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11157. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11158. {
  11159. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  11160. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11161. }
  11162. #endif
  11163. }
  11164. }
  11165. return true;
  11166. }
  11167. bool
  11168. GlobOpt::IsWorthSpecializingToInt32Branch(IR::Instr * instr, Value * src1Val, Value * src2Val)
  11169. {
  11170. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  11171. {
  11172. StackSym *sym1 = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11173. if (this->IsInt32TypeSpecialized(sym1, this->currentBlock) == false)
  11174. {
  11175. if (!src2Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc2()->IsRegOpnd())
  11176. {
  11177. StackSym *sym2 = instr->GetSrc2()->AsRegOpnd()->m_sym;
  11178. if (this->IsInt32TypeSpecialized(sym2, this->currentBlock) == false)
  11179. {
  11180. // Type specializing a Br itself isn't worth it, unless one src
  11181. // is already type specialized
  11182. return false;
  11183. }
  11184. }
  11185. }
  11186. }
  11187. return true;
  11188. }
  11189. bool
  11190. GlobOpt::TryOptConstFoldBrFalse(
  11191. IR::Instr *const instr,
  11192. Value *const srcValue,
  11193. const int32 min,
  11194. const int32 max)
  11195. {
  11196. Assert(instr);
  11197. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  11198. Assert(srcValue);
  11199. if(!(DoAggressiveIntTypeSpec() ? srcValue->GetValueInfo()->IsLikelyInt() : srcValue->GetValueInfo()->IsInt()))
  11200. {
  11201. return false;
  11202. }
  11203. if(ValueInfo::IsEqualTo(srcValue, min, max, nullptr, 0, 0))
  11204. {
  11205. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrFalse_A, instr, srcValue);
  11206. return true;
  11207. }
  11208. if(ValueInfo::IsNotEqualTo(srcValue, min, max, nullptr, 0, 0))
  11209. {
  11210. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrTrue_A, instr, srcValue);
  11211. return true;
  11212. }
  11213. return false;
  11214. }
  11215. bool
  11216. GlobOpt::TryOptConstFoldBrEqual(
  11217. IR::Instr *const instr,
  11218. const bool branchOnEqual,
  11219. Value *const src1Value,
  11220. const int32 min1,
  11221. const int32 max1,
  11222. Value *const src2Value,
  11223. const int32 min2,
  11224. const int32 max2)
  11225. {
  11226. Assert(instr);
  11227. Assert(src1Value);
  11228. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11229. Assert(src2Value);
  11230. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11231. if(ValueInfo::IsEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11232. {
  11233. OptConstFoldBr(branchOnEqual, instr, src1Value, src2Value);
  11234. return true;
  11235. }
  11236. if(ValueInfo::IsNotEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11237. {
  11238. OptConstFoldBr(!branchOnEqual, instr, src1Value, src2Value);
  11239. return true;
  11240. }
  11241. return false;
  11242. }
  11243. bool
  11244. GlobOpt::TryOptConstFoldBrGreaterThan(
  11245. IR::Instr *const instr,
  11246. const bool branchOnGreaterThan,
  11247. Value *const src1Value,
  11248. const int32 min1,
  11249. const int32 max1,
  11250. Value *const src2Value,
  11251. const int32 min2,
  11252. const int32 max2)
  11253. {
  11254. Assert(instr);
  11255. Assert(src1Value);
  11256. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11257. Assert(src2Value);
  11258. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11259. if(ValueInfo::IsGreaterThan(src1Value, min1, max1, src2Value, min2, max2))
  11260. {
  11261. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  11262. return true;
  11263. }
  11264. if(ValueInfo::IsLessThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11265. {
  11266. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  11267. return true;
  11268. }
  11269. return false;
  11270. }
  11271. bool
  11272. GlobOpt::TryOptConstFoldBrGreaterThanOrEqual(
  11273. IR::Instr *const instr,
  11274. const bool branchOnGreaterThanOrEqual,
  11275. Value *const src1Value,
  11276. const int32 min1,
  11277. const int32 max1,
  11278. Value *const src2Value,
  11279. const int32 min2,
  11280. const int32 max2)
  11281. {
  11282. Assert(instr);
  11283. Assert(src1Value);
  11284. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11285. Assert(src2Value);
  11286. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11287. if(ValueInfo::IsGreaterThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11288. {
  11289. OptConstFoldBr(branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  11290. return true;
  11291. }
  11292. if(ValueInfo::IsLessThan(src1Value, min1, max1, src2Value, min2, max2))
  11293. {
  11294. OptConstFoldBr(!branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  11295. return true;
  11296. }
  11297. return false;
  11298. }
  11299. bool
  11300. GlobOpt::TryOptConstFoldBrUnsignedLessThan(
  11301. IR::Instr *const instr,
  11302. const bool branchOnLessThan,
  11303. Value *const src1Value,
  11304. const int32 min1,
  11305. const int32 max1,
  11306. Value *const src2Value,
  11307. const int32 min2,
  11308. const int32 max2)
  11309. {
  11310. Assert(DoConstFold());
  11311. Assert(!IsLoopPrePass());
  11312. if(!src1Value ||
  11313. !src2Value ||
  11314. !(
  11315. DoAggressiveIntTypeSpec()
  11316. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  11317. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  11318. ))
  11319. {
  11320. return false;
  11321. }
  11322. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  11323. uint uMax1 = max((uint)min1, (uint)max1);
  11324. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  11325. uint uMax2 = max((uint)min2, (uint)max2);
  11326. if (uMax1 < uMin2)
  11327. {
  11328. // Range 1 is always lesser than Range 2
  11329. OptConstFoldBr(branchOnLessThan, instr, src1Value, src2Value);
  11330. return true;
  11331. }
  11332. if (uMin1 >= uMax2)
  11333. {
  11334. // Range 2 is always lesser than Range 1
  11335. OptConstFoldBr(!branchOnLessThan, instr, src1Value, src2Value);
  11336. return true;
  11337. }
  11338. return false;
  11339. }
  11340. bool
  11341. GlobOpt::TryOptConstFoldBrUnsignedGreaterThan(
  11342. IR::Instr *const instr,
  11343. const bool branchOnGreaterThan,
  11344. Value *const src1Value,
  11345. const int32 min1,
  11346. const int32 max1,
  11347. Value *const src2Value,
  11348. const int32 min2,
  11349. const int32 max2)
  11350. {
  11351. Assert(DoConstFold());
  11352. Assert(!IsLoopPrePass());
  11353. if(!src1Value ||
  11354. !src2Value ||
  11355. !(
  11356. DoAggressiveIntTypeSpec()
  11357. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  11358. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  11359. ))
  11360. {
  11361. return false;
  11362. }
  11363. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  11364. uint uMax1 = max((uint)min1, (uint)max1);
  11365. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  11366. uint uMax2 = max((uint)min2, (uint)max2);
  11367. if (uMin1 > uMax2)
  11368. {
  11369. // Range 1 is always greater than Range 2
  11370. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  11371. return true;
  11372. }
  11373. if (uMax1 <= uMin2)
  11374. {
  11375. // Range 2 is always greater than Range 1
  11376. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  11377. return true;
  11378. }
  11379. return false;
  11380. }
  11381. void
  11382. GlobOpt::SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info)
  11383. {
  11384. Assert(this->currentBlock->GetSuccList()->Count() == 2);
  11385. IR::Instr * fallthrough = this->currentBlock->GetNext()->GetFirstInstr();
  11386. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->currentBlock->GetSuccList())
  11387. {
  11388. if (conditionToBranch == (edge->GetSucc()->GetFirstInstr() != fallthrough))
  11389. {
  11390. edge->SetPathDependentInfo(info, alloc);
  11391. return;
  11392. }
  11393. }
  11394. NEXT_SLISTBASECOUNTED_ENTRY;
  11395. Assert(false);
  11396. }
  11397. PathDependentInfoToRestore
  11398. GlobOpt::UpdatePathDependentInfo(PathDependentInfo *const info)
  11399. {
  11400. Assert(info);
  11401. if(!info->HasInfo())
  11402. {
  11403. return PathDependentInfoToRestore();
  11404. }
  11405. decltype(&GlobOpt::UpdateIntBoundsForEqual) UpdateIntBoundsForLeftValue, UpdateIntBoundsForRightValue;
  11406. switch(info->Relationship())
  11407. {
  11408. case PathDependentRelationship::Equal:
  11409. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForEqual;
  11410. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForEqual;
  11411. break;
  11412. case PathDependentRelationship::NotEqual:
  11413. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  11414. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  11415. break;
  11416. case PathDependentRelationship::GreaterThanOrEqual:
  11417. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  11418. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  11419. break;
  11420. case PathDependentRelationship::GreaterThan:
  11421. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  11422. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThan;
  11423. break;
  11424. case PathDependentRelationship::LessThanOrEqual:
  11425. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  11426. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  11427. break;
  11428. case PathDependentRelationship::LessThan:
  11429. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThan;
  11430. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  11431. break;
  11432. default:
  11433. Assert(false);
  11434. __assume(false);
  11435. }
  11436. ValueInfo *leftValueInfo = info->LeftValue()->GetValueInfo();
  11437. IntConstantBounds leftConstantBounds;
  11438. AssertVerify(leftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  11439. ValueInfo *rightValueInfo;
  11440. IntConstantBounds rightConstantBounds;
  11441. if(info->RightValue())
  11442. {
  11443. rightValueInfo = info->RightValue()->GetValueInfo();
  11444. AssertVerify(rightValueInfo->TryGetIntConstantBounds(&rightConstantBounds, true));
  11445. }
  11446. else
  11447. {
  11448. rightValueInfo = nullptr;
  11449. rightConstantBounds = IntConstantBounds(info->RightConstantValue(), info->RightConstantValue());
  11450. }
  11451. ValueInfo *const newLeftValueInfo =
  11452. (this->*UpdateIntBoundsForLeftValue)(
  11453. info->LeftValue(),
  11454. leftConstantBounds,
  11455. info->RightValue(),
  11456. rightConstantBounds,
  11457. true);
  11458. if(newLeftValueInfo)
  11459. {
  11460. ChangeValueInfo(nullptr, info->LeftValue(), newLeftValueInfo);
  11461. AssertVerify(newLeftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  11462. }
  11463. else
  11464. {
  11465. leftValueInfo = nullptr;
  11466. }
  11467. ValueInfo *const newRightValueInfo =
  11468. (this->*UpdateIntBoundsForRightValue)(
  11469. info->RightValue(),
  11470. rightConstantBounds,
  11471. info->LeftValue(),
  11472. leftConstantBounds,
  11473. true);
  11474. if(newRightValueInfo)
  11475. {
  11476. ChangeValueInfo(nullptr, info->RightValue(), newRightValueInfo);
  11477. }
  11478. else
  11479. {
  11480. rightValueInfo = nullptr;
  11481. }
  11482. return PathDependentInfoToRestore(leftValueInfo, rightValueInfo);
  11483. }
  11484. void
  11485. GlobOpt::RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore)
  11486. {
  11487. Assert(info);
  11488. if(infoToRestore.LeftValueInfo())
  11489. {
  11490. Assert(info->LeftValue());
  11491. ChangeValueInfo(nullptr, info->LeftValue(), infoToRestore.LeftValueInfo());
  11492. }
  11493. if(infoToRestore.RightValueInfo())
  11494. {
  11495. Assert(info->RightValue());
  11496. ChangeValueInfo(nullptr, info->RightValue(), infoToRestore.RightValueInfo());
  11497. }
  11498. }
  11499. bool
  11500. GlobOpt::TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst /* = false */)
  11501. {
  11502. IR::Instr *&instr = *pInstr;
  11503. IR::Opnd *src1;
  11504. IR::Opnd *dst;
  11505. Js::OpCode opcode = instr->m_opcode;
  11506. Value *valueToTransfer = nullptr;
  11507. Assert(src1Val && src1Val->GetValueInfo()->IsLikelyNumber() || OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  11508. if (!this->DoFloatTypeSpec())
  11509. {
  11510. return false;
  11511. }
  11512. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  11513. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  11514. {
  11515. switch (opcode)
  11516. {
  11517. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  11518. skipDst = true;
  11519. // fall-through
  11520. case Js::OpCode::Ld_A:
  11521. case Js::OpCode::BrTrue_A:
  11522. case Js::OpCode::BrFalse_A:
  11523. if (instr->GetSrc1()->IsRegOpnd())
  11524. {
  11525. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11526. if (this->IsFloat64TypeSpecialized(sym, this->currentBlock) == false)
  11527. {
  11528. // Type specializing an Ld_A isn't worth it, unless the src
  11529. // is already type specialized
  11530. return false;
  11531. }
  11532. }
  11533. if (instr->m_opcode == Js::OpCode::Ld_A)
  11534. {
  11535. valueToTransfer = src1Val;
  11536. }
  11537. break;
  11538. case Js::OpCode::Neg_A:
  11539. break;
  11540. case Js::OpCode::Conv_Num:
  11541. Assert(src1Val);
  11542. opcode = Js::OpCode::Ld_A;
  11543. valueToTransfer = src1Val;
  11544. if (!src1Val->GetValueInfo()->IsNumber())
  11545. {
  11546. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11547. valueToTransfer = NewGenericValue(ValueType::Float, instr->GetDst()->GetStackSym());
  11548. if (this->IsFloat64TypeSpecialized(sym, this->currentBlock) == false)
  11549. {
  11550. // Set the dst as a nonDeadStore. We want to keep the Ld_A to prevent the FromVar from
  11551. // being dead-stored, as it could cause implicit calls.
  11552. dst = instr->GetDst();
  11553. dst->AsRegOpnd()->m_dontDeadStore = true;
  11554. }
  11555. }
  11556. break;
  11557. case Js::OpCode::StElemI_A:
  11558. case Js::OpCode::StElemI_A_Strict:
  11559. case Js::OpCode::StElemC:
  11560. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  11561. default:
  11562. return false;
  11563. }
  11564. }
  11565. // Make sure the srcs are specialized
  11566. src1 = instr->GetSrc1();
  11567. // Use original val when calling toFloat64 as this is what we'll use to try hoisting the fromVar if we're in a loop.
  11568. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  11569. if (!skipDst)
  11570. {
  11571. dst = instr->GetDst();
  11572. if (dst)
  11573. {
  11574. this->TypeSpecializeFloatDst(instr, valueToTransfer, src1Val, nullptr, pDstVal);
  11575. if (!this->IsLoopPrePass())
  11576. {
  11577. instr->m_opcode = opcode;
  11578. }
  11579. }
  11580. }
  11581. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  11582. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11583. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  11584. {
  11585. Output::Print(_u("Type specialized to FLOAT: "));
  11586. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11587. }
  11588. #endif
  11589. return true;
  11590. }
  11591. // Unconditionally type-spec dst to float.
  11592. void
  11593. GlobOpt::TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal)
  11594. {
  11595. IR::Opnd* dst = instr->GetDst();
  11596. Assert(dst);
  11597. AssertMsg(dst->IsRegOpnd(), "What else?");
  11598. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11599. if(valToTransfer)
  11600. {
  11601. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  11602. InsertNewValue(*pDstVal, dst);
  11603. }
  11604. else
  11605. {
  11606. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Value, src2Value);
  11607. }
  11608. }
  11609. void
  11610. GlobOpt::TypeSpecializeSimd128Dst(IRType type, IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value **pDstVal)
  11611. {
  11612. IR::Opnd* dst = instr->GetDst();
  11613. Assert(dst);
  11614. AssertMsg(dst->IsRegOpnd(), "What else?");
  11615. this->ToSimd128Dst(type, instr, dst->AsRegOpnd(), this->currentBlock);
  11616. if (valToTransfer)
  11617. {
  11618. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  11619. InsertNewValue(*pDstVal, dst);
  11620. }
  11621. else
  11622. {
  11623. *pDstVal = NewGenericValue(GetValueTypeFromIRType(type), instr->GetDst());
  11624. }
  11625. }
  11626. bool
  11627. GlobOpt::TypeSpecializeLdLen(
  11628. IR::Instr * *const instrRef,
  11629. Value * *const src1ValueRef,
  11630. Value * *const dstValueRef,
  11631. bool *const forceInvariantHoistingRef)
  11632. {
  11633. Assert(instrRef);
  11634. IR::Instr *&instr = *instrRef;
  11635. Assert(instr);
  11636. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  11637. Assert(src1ValueRef);
  11638. Value *&src1Value = *src1ValueRef;
  11639. Assert(dstValueRef);
  11640. Value *&dstValue = *dstValueRef;
  11641. Assert(forceInvariantHoistingRef);
  11642. bool &forceInvariantHoisting = *forceInvariantHoistingRef;
  11643. if(!DoLdLenIntSpec(instr, instr->GetSrc1()->GetValueType()))
  11644. {
  11645. return false;
  11646. }
  11647. IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  11648. if(!IsLoopPrePass())
  11649. {
  11650. IR::RegOpnd *const baseOpnd = instr->GetSrc1()->AsRegOpnd();
  11651. if(baseOpnd->IsArrayRegOpnd())
  11652. {
  11653. StackSym *const lengthSym = baseOpnd->AsArrayRegOpnd()->LengthSym();
  11654. if(lengthSym)
  11655. {
  11656. CaptureByteCodeSymUses(instr);
  11657. instr->m_opcode = Js::OpCode::Ld_I4;
  11658. instr->ReplaceSrc1(IR::RegOpnd::New(lengthSym, lengthSym->GetType(), func));
  11659. instr->ClearBailOutInfo();
  11660. // Find the hoisted length value
  11661. Value *const lengthValue = FindValue(lengthSym);
  11662. Assert(lengthValue);
  11663. src1Value = lengthValue;
  11664. ValueInfo *const lengthValueInfo = lengthValue->GetValueInfo();
  11665. Assert(lengthValueInfo->GetSymStore() != lengthSym);
  11666. IntConstantBounds lengthConstantBounds;
  11667. AssertVerify(lengthValueInfo->TryGetIntConstantBounds(&lengthConstantBounds));
  11668. Assert(lengthConstantBounds.LowerBound() >= 0);
  11669. // Int-specialize, and transfer the value to the dst
  11670. TypeSpecializeIntDst(
  11671. instr,
  11672. Js::OpCode::LdLen_A,
  11673. src1Value,
  11674. src1Value,
  11675. nullptr,
  11676. bailOutKind,
  11677. lengthConstantBounds.LowerBound(),
  11678. lengthConstantBounds.UpperBound(),
  11679. &dstValue);
  11680. // Try to force hoisting the Ld_I4 so that the length will have an invariant sym store that can be
  11681. // copy-propped. Invariant hoisting does not automatically hoist Ld_I4.
  11682. forceInvariantHoisting = true;
  11683. return true;
  11684. }
  11685. }
  11686. if (instr->HasBailOutInfo())
  11687. {
  11688. Assert(instr->GetBailOutKind() == IR::BailOutMarkTempObject);
  11689. bailOutKind = IR::BailOutOnIrregularLength | IR::BailOutMarkTempObject;
  11690. instr->SetBailOutKind(bailOutKind);
  11691. }
  11692. else
  11693. {
  11694. Assert(bailOutKind == IR::BailOutOnIrregularLength);
  11695. GenerateBailAtOperation(&instr, bailOutKind);
  11696. }
  11697. }
  11698. TypeSpecializeIntDst(
  11699. instr,
  11700. Js::OpCode::LdLen_A,
  11701. nullptr,
  11702. nullptr,
  11703. nullptr,
  11704. bailOutKind,
  11705. 0,
  11706. INT32_MAX,
  11707. &dstValue);
  11708. return true;
  11709. }
  11710. bool
  11711. GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal)
  11712. {
  11713. IR::Opnd *src1;
  11714. IR::Opnd *src2;
  11715. IR::Opnd *dst;
  11716. bool allowUndefinedOrNullSrc1 = true;
  11717. bool allowUndefinedOrNullSrc2 = true;
  11718. bool skipSrc1 = false;
  11719. bool skipSrc2 = false;
  11720. bool skipDst = false;
  11721. if (!this->DoFloatTypeSpec())
  11722. {
  11723. return false;
  11724. }
  11725. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  11726. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  11727. {
  11728. switch (instr->m_opcode)
  11729. {
  11730. case Js::OpCode::Sub_A:
  11731. case Js::OpCode::Mul_A:
  11732. case Js::OpCode::Div_A:
  11733. case Js::OpCode::Expo_A:
  11734. // Avoid if one source is known not to be a number.
  11735. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11736. {
  11737. return false;
  11738. }
  11739. break;
  11740. case Js::OpCode::BrSrEq_A:
  11741. case Js::OpCode::BrSrNeq_A:
  11742. case Js::OpCode::BrEq_A:
  11743. case Js::OpCode::BrNeq_A:
  11744. case Js::OpCode::BrSrNotEq_A:
  11745. case Js::OpCode::BrNotEq_A:
  11746. case Js::OpCode::BrSrNotNeq_A:
  11747. case Js::OpCode::BrNotNeq_A:
  11748. // Avoid if one source is known not to be a number.
  11749. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11750. {
  11751. return false;
  11752. }
  11753. // Undef == Undef, but +Undef != +Undef
  11754. // 0.0 != null, but 0.0 == +null
  11755. //
  11756. // So Bailout on anything but numbers for both src1 and src2
  11757. allowUndefinedOrNullSrc1 = false;
  11758. allowUndefinedOrNullSrc2 = false;
  11759. break;
  11760. case Js::OpCode::BrGt_A:
  11761. case Js::OpCode::BrGe_A:
  11762. case Js::OpCode::BrLt_A:
  11763. case Js::OpCode::BrLe_A:
  11764. case Js::OpCode::BrNotGt_A:
  11765. case Js::OpCode::BrNotGe_A:
  11766. case Js::OpCode::BrNotLt_A:
  11767. case Js::OpCode::BrNotLe_A:
  11768. // Avoid if one source is known not to be a number.
  11769. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11770. {
  11771. return false;
  11772. }
  11773. break;
  11774. case Js::OpCode::Add_A:
  11775. // For Add, we need both sources to be Numbers, otherwise it could be a string concat
  11776. if (!src1Val || !src2Val || !(src1Val->GetValueInfo()->IsLikelyNumber() && src2Val->GetValueInfo()->IsLikelyNumber()))
  11777. {
  11778. return false;
  11779. }
  11780. break;
  11781. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  11782. skipSrc2 = true;
  11783. skipDst = true;
  11784. break;
  11785. default:
  11786. return false;
  11787. }
  11788. }
  11789. else
  11790. {
  11791. switch (instr->m_opcode)
  11792. {
  11793. case Js::OpCode::InlineArrayPush:
  11794. bool isFloatConstMissingItem = src2Val->GetValueInfo()->IsFloatConstant();
  11795. if(isFloatConstMissingItem)
  11796. {
  11797. FloatConstType floatValue = src2Val->GetValueInfo()->AsFloatConstant()->FloatValue();
  11798. isFloatConstMissingItem = Js::SparseArraySegment<double>::IsMissingItem(&floatValue);
  11799. }
  11800. // Don't specialize if the element is not likelyNumber - we will surely bailout
  11801. if(!(src2Val->GetValueInfo()->IsLikelyNumber()) || isFloatConstMissingItem)
  11802. {
  11803. return false;
  11804. }
  11805. // Only specialize the Second source - element
  11806. skipSrc1 = true;
  11807. skipDst = true;
  11808. allowUndefinedOrNullSrc2 = false;
  11809. break;
  11810. }
  11811. }
  11812. // Make sure the srcs are specialized
  11813. if(!skipSrc1)
  11814. {
  11815. src1 = instr->GetSrc1();
  11816. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, (allowUndefinedOrNullSrc1 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  11817. }
  11818. if (!skipSrc2)
  11819. {
  11820. src2 = instr->GetSrc2();
  11821. this->ToFloat64(instr, src2, this->currentBlock, src2Val, nullptr, (allowUndefinedOrNullSrc2 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  11822. }
  11823. if (!skipDst)
  11824. {
  11825. dst = instr->GetDst();
  11826. if (dst)
  11827. {
  11828. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
  11829. AssertMsg(dst->IsRegOpnd(), "What else?");
  11830. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11831. }
  11832. }
  11833. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  11834. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11835. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  11836. {
  11837. Output::Print(_u("Type specialized to FLOAT: "));
  11838. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11839. }
  11840. #endif
  11841. return true;
  11842. }
  11843. bool
  11844. GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstVal)
  11845. {
  11846. IR::Instr *&instr = *pInstr;
  11847. IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11848. ValueType baseValueType(baseOpnd->GetValueType());
  11849. if (instr->DoStackArgsOpt(this->func) ||
  11850. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  11851. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  11852. !(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
  11853. {
  11854. GOPT_TRACE_INSTR(instr, _u("Didn't type specialize array access, because typed array type specialization is disabled, or base is not an optimized typed array.\n"));
  11855. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11856. {
  11857. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11858. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11859. baseValueType.ToString(baseValueTypeStr);
  11860. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because %s.\n"),
  11861. this->func->GetJITFunctionBody()->GetDisplayName(),
  11862. this->func->GetDebugNumberSet(debugStringBuffer),
  11863. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11864. baseValueTypeStr,
  11865. instr->DoStackArgsOpt(this->func) ?
  11866. _u("instruction uses the arguments object") :
  11867. _u("typed array type specialization is disabled, or base is not an optimized typed array"));
  11868. Output::Flush();
  11869. }
  11870. return false;
  11871. }
  11872. Assert(instr->GetSrc1()->IsRegOpnd() || (src1Val && src1Val->GetValueInfo()->HasIntConstantValue()));
  11873. StackSym *sym = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd()->m_sym : nullptr;
  11874. // Only type specialize the source of store element if the source symbol is already type specialized to int or float.
  11875. if (sym)
  11876. {
  11877. if (baseValueType.IsLikelyNativeArray())
  11878. {
  11879. // Gently coerce these src's into native if it seems likely to work.
  11880. // Otherwise we can't use the fast path to store.
  11881. // But don't try to put a float-specialized number into an int array this way.
  11882. if (!(
  11883. this->IsInt32TypeSpecialized(sym, this->currentBlock) ||
  11884. (
  11885. src1Val &&
  11886. (
  11887. DoAggressiveIntTypeSpec()
  11888. ? src1Val->GetValueInfo()->IsLikelyInt()
  11889. : src1Val->GetValueInfo()->IsInt()
  11890. )
  11891. )
  11892. ))
  11893. {
  11894. if (!(
  11895. this->IsFloat64TypeSpecialized(sym, this->currentBlock) ||
  11896. (src1Val && src1Val->GetValueInfo()->IsLikelyNumber())
  11897. ) ||
  11898. baseValueType.HasIntElements())
  11899. {
  11900. return false;
  11901. }
  11902. }
  11903. }
  11904. else if (!this->IsInt32TypeSpecialized(sym, this->currentBlock) && !this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  11905. {
  11906. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because src is not type specialized.\n"));
  11907. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11908. {
  11909. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11910. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11911. baseValueType.ToString(baseValueTypeStr);
  11912. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because src is not specialized.\n"),
  11913. this->func->GetJITFunctionBody()->GetDisplayName(),
  11914. this->func->GetDebugNumberSet(debugStringBuffer),
  11915. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11916. baseValueTypeStr);
  11917. Output::Flush();
  11918. }
  11919. return false;
  11920. }
  11921. }
  11922. int32 src1IntConstantValue;
  11923. if(baseValueType.IsLikelyNativeIntArray() && src1Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue))
  11924. {
  11925. if(Js::SparseArraySegment<int32>::IsMissingItem(&src1IntConstantValue))
  11926. {
  11927. return false;
  11928. }
  11929. }
  11930. // Note: doing ToVarUses to make sure we do get the int32 version of the index before trying to access its value in
  11931. // ShouldExpectConventionalArrayIndexValue. Not sure why that never gave us a problem before.
  11932. Assert(instr->GetDst()->IsIndirOpnd());
  11933. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  11934. // Make sure we use the int32 version of the index operand symbol, if available. Otherwise, ensure the var symbol is live (by
  11935. // potentially inserting a ToVar).
  11936. this->ToVarUses(instr, dst, /* isDst = */ true, nullptr);
  11937. if (!ShouldExpectConventionalArrayIndexValue(dst))
  11938. {
  11939. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because index is negative or likely not int.\n"));
  11940. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11941. {
  11942. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11943. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11944. baseValueType.ToString(baseValueTypeStr);
  11945. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because index is negative or likely not int.\n"),
  11946. this->func->GetJITFunctionBody()->GetDisplayName(),
  11947. this->func->GetDebugNumberSet(debugStringBuffer),
  11948. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11949. baseValueTypeStr);
  11950. Output::Flush();
  11951. }
  11952. return false;
  11953. }
  11954. IRType toType = TyVar;
  11955. bool isLossyAllowed = true;
  11956. IR::BailOutKind arrayBailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  11957. switch(baseValueType.GetObjectType())
  11958. {
  11959. case ObjectType::Int8Array:
  11960. case ObjectType::Uint8Array:
  11961. case ObjectType::Int16Array:
  11962. case ObjectType::Uint16Array:
  11963. case ObjectType::Int32Array:
  11964. case ObjectType::Int8VirtualArray:
  11965. case ObjectType::Uint8VirtualArray:
  11966. case ObjectType::Int16VirtualArray:
  11967. case ObjectType::Uint16VirtualArray:
  11968. case ObjectType::Int32VirtualArray:
  11969. case ObjectType::Int8MixedArray:
  11970. case ObjectType::Uint8MixedArray:
  11971. case ObjectType::Int16MixedArray:
  11972. case ObjectType::Uint16MixedArray:
  11973. case ObjectType::Int32MixedArray:
  11974. Int32Array:
  11975. toType = TyInt32;
  11976. break;
  11977. case ObjectType::Uint32Array:
  11978. case ObjectType::Uint32VirtualArray:
  11979. case ObjectType::Uint32MixedArray:
  11980. // Uint32Arrays may store values that overflow int32. If the value being stored comes from a symbol that's
  11981. // already losslessly type specialized to int32, we'll use it. Otherwise, if we only have a float64 specialized
  11982. // value, we don't want to force bailout if it doesn't fit in int32. Instead, we'll emit conversion in the
  11983. // lowerer, and handle overflow, if necessary.
  11984. if (!sym || this->IsInt32TypeSpecialized(sym, this->currentBlock))
  11985. {
  11986. toType = TyInt32;
  11987. }
  11988. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  11989. {
  11990. toType = TyFloat64;
  11991. }
  11992. break;
  11993. case ObjectType::Float32Array:
  11994. case ObjectType::Float64Array:
  11995. case ObjectType::Float32VirtualArray:
  11996. case ObjectType::Float32MixedArray:
  11997. case ObjectType::Float64VirtualArray:
  11998. case ObjectType::Float64MixedArray:
  11999. Float64Array:
  12000. toType = TyFloat64;
  12001. break;
  12002. case ObjectType::Uint8ClampedArray:
  12003. case ObjectType::Uint8ClampedVirtualArray:
  12004. case ObjectType::Uint8ClampedMixedArray:
  12005. // Uint8ClampedArray requires rounding (as opposed to truncation) of floating point values. If source symbol is
  12006. // float type specialized, type specialize this instruction to float as well, and handle rounding in the
  12007. // lowerer.
  12008. if (!sym || this->IsInt32TypeSpecialized(sym, this->currentBlock))
  12009. {
  12010. toType = TyInt32;
  12011. isLossyAllowed = false;
  12012. }
  12013. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  12014. {
  12015. toType = TyFloat64;
  12016. }
  12017. break;
  12018. default:
  12019. Assert(baseValueType.IsLikelyNativeArray());
  12020. isLossyAllowed = false;
  12021. arrayBailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  12022. if(baseValueType.HasIntElements())
  12023. {
  12024. goto Int32Array;
  12025. }
  12026. Assert(baseValueType.HasFloatElements());
  12027. goto Float64Array;
  12028. }
  12029. if (toType != TyVar)
  12030. {
  12031. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  12032. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  12033. {
  12034. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12035. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  12036. baseValueType.ToString(baseValueTypeStr);
  12037. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s.\n"),
  12038. this->func->GetJITFunctionBody()->GetDisplayName(),
  12039. this->func->GetDebugNumberSet(debugStringBuffer),
  12040. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  12041. baseValueTypeStr,
  12042. toType == TyInt32 ? _u("int32") : _u("float64"));
  12043. Output::Flush();
  12044. }
  12045. IR::BailOutKind bailOutKind = ((toType == TyInt32) ? IR::BailOutIntOnly : IR::BailOutNumberOnly);
  12046. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, toType, bailOutKind, /* lossy = */ isLossyAllowed);
  12047. if (!this->IsLoopPrePass())
  12048. {
  12049. bool bConvertToBailoutInstr = true;
  12050. // Definite StElemC doesn't need bailout, because it can't fail or cause conversion.
  12051. if (instr->m_opcode == Js::OpCode::StElemC && baseValueType.IsObject())
  12052. {
  12053. if (baseValueType.HasIntElements())
  12054. {
  12055. //Native int array requires a missing element check & bailout
  12056. int32 min = INT32_MIN;
  12057. int32 max = INT32_MAX;
  12058. if (src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, false))
  12059. {
  12060. bConvertToBailoutInstr = ((min <= Js::JavascriptNativeIntArray::MissingItem) && (max >= Js::JavascriptNativeIntArray::MissingItem));
  12061. }
  12062. }
  12063. else
  12064. {
  12065. bConvertToBailoutInstr = false;
  12066. }
  12067. }
  12068. if (bConvertToBailoutInstr)
  12069. {
  12070. if(instr->HasBailOutInfo())
  12071. {
  12072. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  12073. Assert(
  12074. (
  12075. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  12076. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  12077. ) &&
  12078. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  12079. if(arrayBailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  12080. {
  12081. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  12082. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  12083. // bails out for the right reason.
  12084. instr->SetBailOutKind(
  12085. arrayBailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  12086. }
  12087. else
  12088. {
  12089. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  12090. // calls to occur, so it must be merged in to eliminate generating the helper call.
  12091. Assert(arrayBailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  12092. instr->SetBailOutKind(oldBailOutKind | arrayBailOutKind);
  12093. }
  12094. }
  12095. else
  12096. {
  12097. GenerateBailAtOperation(&instr, arrayBailOutKind);
  12098. }
  12099. }
  12100. }
  12101. }
  12102. else
  12103. {
  12104. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because the source was not already specialized.\n"));
  12105. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  12106. {
  12107. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12108. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  12109. baseValueType.ToString(baseValueTypeStr);
  12110. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because of array type.\n"),
  12111. this->func->GetJITFunctionBody()->GetDisplayName(),
  12112. this->func->GetDebugNumberSet(debugStringBuffer),
  12113. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  12114. baseValueTypeStr);
  12115. Output::Flush();
  12116. }
  12117. }
  12118. return toType != TyVar;
  12119. }
  12120. IR::Instr *
  12121. GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
  12122. {
  12123. Sym *sym;
  12124. switch (opnd->GetKind())
  12125. {
  12126. case IR::OpndKindReg:
  12127. if (!isDst && !this->blockData.liveVarSyms->Test(opnd->AsRegOpnd()->m_sym->m_id))
  12128. {
  12129. instr = this->ToVar(instr, opnd->AsRegOpnd(), this->currentBlock, val, true);
  12130. }
  12131. break;
  12132. case IR::OpndKindSym:
  12133. sym = opnd->AsSymOpnd()->m_sym;
  12134. if (sym->IsPropertySym() && !this->blockData.liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id)
  12135. && sym->AsPropertySym()->m_stackSym->IsVar())
  12136. {
  12137. StackSym *propertyBase = sym->AsPropertySym()->m_stackSym;
  12138. IR::RegOpnd *newOpnd = IR::RegOpnd::New(propertyBase, TyVar, instr->m_func);
  12139. instr = this->ToVar(instr, newOpnd, this->currentBlock, this->FindValue(propertyBase), true);
  12140. }
  12141. break;
  12142. case IR::OpndKindIndir:
  12143. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  12144. if (!this->blockData.liveVarSyms->Test(baseOpnd->m_sym->m_id))
  12145. {
  12146. instr = this->ToVar(instr, baseOpnd, this->currentBlock, this->FindValue(baseOpnd->m_sym), true);
  12147. }
  12148. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  12149. if (indexOpnd && !indexOpnd->m_sym->IsTypeSpec())
  12150. {
  12151. if((indexOpnd->GetValueType().IsInt()
  12152. ? !IsTypeSpecPhaseOff(func)
  12153. : indexOpnd->GetValueType().IsLikelyInt() && DoAggressiveIntTypeSpec()) && !GetIsAsmJSFunc()) // typespec is disabled for asmjs
  12154. {
  12155. StackSym *const indexVarSym = indexOpnd->m_sym;
  12156. Value *const indexValue = FindValue(indexVarSym);
  12157. Assert(indexValue);
  12158. Assert(indexValue->GetValueInfo()->IsLikelyInt());
  12159. ToInt32(instr, indexOpnd, currentBlock, indexValue, opnd->AsIndirOpnd(), false);
  12160. Assert(indexValue->GetValueInfo()->IsInt());
  12161. if(!IsLoopPrePass())
  12162. {
  12163. indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  12164. if(indexOpnd)
  12165. {
  12166. Assert(indexOpnd->m_sym->IsTypeSpec());
  12167. IntConstantBounds indexConstantBounds;
  12168. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  12169. if(ValueInfo::IsGreaterThanOrEqualTo(
  12170. indexValue,
  12171. indexConstantBounds.LowerBound(),
  12172. indexConstantBounds.UpperBound(),
  12173. nullptr,
  12174. 0,
  12175. 0))
  12176. {
  12177. indexOpnd->SetType(TyUint32);
  12178. }
  12179. }
  12180. }
  12181. }
  12182. else if (!this->blockData.liveVarSyms->Test(indexOpnd->m_sym->m_id))
  12183. {
  12184. instr = this->ToVar(instr, indexOpnd, this->currentBlock, this->FindValue(indexOpnd->m_sym), true);
  12185. }
  12186. }
  12187. break;
  12188. }
  12189. return instr;
  12190. }
  12191. IR::Instr *
  12192. GlobOpt::ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *value, bool needsUpdate)
  12193. {
  12194. IR::Instr *newInstr;
  12195. StackSym *varSym = regOpnd->m_sym;
  12196. if (IsTypeSpecPhaseOff(this->func))
  12197. {
  12198. return instr;
  12199. }
  12200. if (this->IsLoopPrePass())
  12201. {
  12202. block->globOptData.liveVarSyms->Set(varSym->m_id);
  12203. return instr;
  12204. }
  12205. if (block->globOptData.liveVarSyms->Test(varSym->m_id))
  12206. {
  12207. // Already live, nothing to do
  12208. return instr;
  12209. }
  12210. if (!varSym->IsVar())
  12211. {
  12212. Assert(!varSym->IsTypeSpec());
  12213. // Leave non-vars alone.
  12214. return instr;
  12215. }
  12216. Assert(this->IsTypeSpecialized(varSym, block));
  12217. if (!value)
  12218. {
  12219. value = this->FindValue(block->globOptData.symToValueMap, varSym);
  12220. }
  12221. ValueInfo *valueInfo = value ? value->GetValueInfo() : nullptr;
  12222. if(valueInfo && valueInfo->IsInt())
  12223. {
  12224. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  12225. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  12226. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  12227. // lossy state.
  12228. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12229. }
  12230. IRType fromType;
  12231. StackSym *typeSpecSym;
  12232. if (block->globOptData.liveInt32Syms->Test(varSym->m_id) && !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id))
  12233. {
  12234. fromType = TyInt32;
  12235. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  12236. Assert(valueInfo);
  12237. Assert(valueInfo->IsInt());
  12238. }
  12239. else if (block->globOptData.liveFloat64Syms->Test(varSym->m_id))
  12240. {
  12241. fromType = TyFloat64;
  12242. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  12243. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any non-number
  12244. // value, even ones that have already been generated before. Float-specialized non-number values cannot be converted
  12245. // back to Var since they will not go back to the original non-number value. The dead-store pass will update the bailout
  12246. // kind on already-generated FromVars based on this bit.
  12247. typeSpecSym->m_requiresBailOnNotNumber = true;
  12248. // A previous float conversion may have used BailOutPrimitiveButString, which does not change the value type to say
  12249. // definitely float, since it can also be a non-string primitive. The convert back to Var though, will cause that
  12250. // bailout kind to be changed to BailOutNumberOnly in the dead-store phase, so from the point of the initial conversion
  12251. // to float, that the value is definitely number. Since we don't know where the FromVar is, change the value type here.
  12252. if(valueInfo)
  12253. {
  12254. if(!valueInfo->IsNumber())
  12255. {
  12256. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12257. ChangeValueInfo(block, value, valueInfo);
  12258. regOpnd->SetValueType(valueInfo->Type());
  12259. }
  12260. }
  12261. else
  12262. {
  12263. value = NewGenericValue(ValueType::Float);
  12264. valueInfo = value->GetValueInfo();
  12265. SetValue(&block->globOptData, value, varSym);
  12266. regOpnd->SetValueType(valueInfo->Type());
  12267. }
  12268. }
  12269. else
  12270. {
  12271. // SIMD_JS
  12272. Assert(IsLiveAsSimd128(varSym, &block->globOptData));
  12273. if (IsLiveAsSimd128F4(varSym, &block->globOptData))
  12274. {
  12275. fromType = TySimd128F4;
  12276. }
  12277. else
  12278. {
  12279. Assert(IsLiveAsSimd128I4(varSym, &block->globOptData));
  12280. fromType = TySimd128I4;
  12281. }
  12282. if (valueInfo)
  12283. {
  12284. if (fromType == TySimd128F4 && !valueInfo->Type().IsSimd128Float32x4())
  12285. {
  12286. valueInfo = valueInfo->SpecializeToSimd128F4(alloc);
  12287. ChangeValueInfo(block, value, valueInfo);
  12288. regOpnd->SetValueType(valueInfo->Type());
  12289. }
  12290. else if (fromType == TySimd128I4 && !valueInfo->Type().IsSimd128Int32x4())
  12291. {
  12292. if (!valueInfo->Type().IsSimd128Int32x4())
  12293. {
  12294. valueInfo = valueInfo->SpecializeToSimd128I4(alloc);
  12295. ChangeValueInfo(block, value, valueInfo);
  12296. regOpnd->SetValueType(valueInfo->Type());
  12297. }
  12298. }
  12299. }
  12300. else
  12301. {
  12302. ValueType valueType = fromType == TySimd128F4 ? ValueType::GetSimd128(ObjectType::Simd128Float32x4) : ValueType::GetSimd128(ObjectType::Simd128Int32x4);
  12303. value = NewGenericValue(valueType);
  12304. valueInfo = value->GetValueInfo();
  12305. SetValue(&block->globOptData, value, varSym);
  12306. regOpnd->SetValueType(valueInfo->Type());
  12307. }
  12308. ValueType valueType = valueInfo->Type();
  12309. // Should be definite if type-specialized
  12310. Assert(valueType.IsSimd128());
  12311. typeSpecSym = varSym->GetSimd128EquivSym(fromType, this->func);
  12312. }
  12313. Assert(valueInfo);
  12314. int32 intConstantValue;
  12315. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  12316. {
  12317. // Lower will tag or create a number directly
  12318. newInstr = IR::Instr::New(Js::OpCode::LdC_A_I4, regOpnd,
  12319. IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func), instr->m_func);
  12320. }
  12321. else
  12322. {
  12323. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, fromType, instr->m_func);
  12324. Js::OpCode opcode = Js::OpCode::ToVar;
  12325. regNew->SetIsJITOptimizedReg(true);
  12326. newInstr = IR::Instr::New(opcode, regOpnd, regNew, instr->m_func);
  12327. }
  12328. newInstr->SetByteCodeOffset(instr);
  12329. newInstr->GetDst()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  12330. ValueType valueType = valueInfo->Type();
  12331. if(fromType == TyInt32)
  12332. {
  12333. #if !INT32VAR // All 32-bit ints are taggable on 64-bit architectures
  12334. IntConstantBounds constantBounds;
  12335. AssertVerify(valueInfo->TryGetIntConstantBounds(&constantBounds));
  12336. if(constantBounds.IsTaggable())
  12337. #endif
  12338. {
  12339. // The value is within the taggable range, so set the opnd value types to TaggedInt to avoid the overflow check
  12340. valueType = ValueType::GetTaggedInt();
  12341. }
  12342. }
  12343. newInstr->GetDst()->SetValueType(valueType);
  12344. newInstr->GetSrc1()->SetValueType(valueType);
  12345. IR::Instr *insertAfterInstr = instr->m_prev;
  12346. if (instr == block->GetLastInstr() &&
  12347. (instr->IsBranchInstr() || instr->m_opcode == Js::OpCode::BailTarget))
  12348. {
  12349. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  12350. while(insertAfterInstr->m_opcode == Js::OpCode::ByteCodeUses)
  12351. {
  12352. insertAfterInstr = insertAfterInstr->m_prev;
  12353. }
  12354. }
  12355. block->InsertInstrAfter(newInstr, insertAfterInstr);
  12356. block->globOptData.liveVarSyms->Set(varSym->m_id);
  12357. GOPT_TRACE_OPND(regOpnd, _u("Converting to var\n"));
  12358. if (block->loop)
  12359. {
  12360. Assert(!this->IsLoopPrePass());
  12361. this->TryHoistInvariant(newInstr, block, value, value, nullptr, false);
  12362. }
  12363. if (needsUpdate)
  12364. {
  12365. // Make sure that the kill effect of the ToVar instruction is tracked and that the kill of a property
  12366. // type is reflected in the current instruction.
  12367. this->ProcessKills(newInstr);
  12368. this->ValueNumberObjectType(newInstr->GetDst(), newInstr);
  12369. if (instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  12370. {
  12371. // Reprocess the load source. We need to reset the PropertySymOpnd fields first.
  12372. IR::PropertySymOpnd *propertySymOpnd = instr->GetSrc1()->AsPropertySymOpnd();
  12373. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  12374. {
  12375. propertySymOpnd->SetTypeChecked(false);
  12376. propertySymOpnd->SetTypeAvailable(false);
  12377. propertySymOpnd->SetWriteGuardChecked(false);
  12378. }
  12379. this->FinishOptPropOp(instr, propertySymOpnd);
  12380. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  12381. }
  12382. }
  12383. return instr;
  12384. }
  12385. IR::Instr *
  12386. GlobOpt::ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy)
  12387. {
  12388. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyInt32, IR::BailOutIntOnly, lossy);
  12389. }
  12390. IR::Instr *
  12391. GlobOpt::ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind)
  12392. {
  12393. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyFloat64, bailOutKind);
  12394. }
  12395. IR::Instr *
  12396. GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  12397. {
  12398. Assert(bailOutKind != IR::BailOutInvalid);
  12399. IR::Instr *newInstr;
  12400. if (!val && opnd->IsRegOpnd())
  12401. {
  12402. val = this->FindValue(block->globOptData.symToValueMap, opnd->AsRegOpnd()->m_sym);
  12403. }
  12404. ValueInfo *valueInfo = val ? val->GetValueInfo() : nullptr;
  12405. bool needReplaceSrc = false;
  12406. bool updateBlockLastInstr = false;
  12407. if (instr)
  12408. {
  12409. needReplaceSrc = true;
  12410. if (!insertBeforeInstr)
  12411. {
  12412. insertBeforeInstr = instr;
  12413. }
  12414. }
  12415. else if (!insertBeforeInstr)
  12416. {
  12417. // Insert it at the end of the block
  12418. insertBeforeInstr = block->GetLastInstr();
  12419. if (insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  12420. {
  12421. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  12422. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  12423. {
  12424. insertBeforeInstr = insertBeforeInstr->m_prev;
  12425. }
  12426. }
  12427. else
  12428. {
  12429. insertBeforeInstr = insertBeforeInstr->m_next;
  12430. updateBlockLastInstr = true;
  12431. }
  12432. }
  12433. // Int constant values will be propagated into the instruction. For ArgOut_A_InlineBuiltIn, there's no benefit from
  12434. // const-propping, so those are excluded.
  12435. if (opnd->IsRegOpnd() &&
  12436. !(
  12437. valueInfo &&
  12438. (valueInfo->HasIntConstantValue() || valueInfo->IsFloatConstant()) &&
  12439. (!instr || instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  12440. ))
  12441. {
  12442. IR::RegOpnd *regSrc = opnd->AsRegOpnd();
  12443. StackSym *varSym = regSrc->m_sym;
  12444. Js::OpCode opcode = Js::OpCode::FromVar;
  12445. if (varSym->IsTypeSpec() || !block->globOptData.liveVarSyms->Test(varSym->m_id))
  12446. {
  12447. // Conversion between int32 and float64
  12448. if (varSym->IsTypeSpec())
  12449. {
  12450. varSym = varSym->GetVarEquivSym(this->func);
  12451. }
  12452. opcode = Js::OpCode::Conv_Prim;
  12453. }
  12454. Assert(block->globOptData.liveVarSyms->Test(varSym->m_id) || this->IsTypeSpecialized(varSym, block));
  12455. StackSym *typeSpecSym;
  12456. BOOL isLive;
  12457. BVSparse<JitArenaAllocator> *livenessBv;
  12458. if(valueInfo && valueInfo->IsInt())
  12459. {
  12460. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  12461. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  12462. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  12463. // lossy state.
  12464. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12465. }
  12466. if (toType == TyInt32)
  12467. {
  12468. // Need to determine whether the conversion is actually lossy or lossless. If the value is an int, then it's a
  12469. // lossless conversion despite the type of conversion requested. The liveness of the converted int32 sym needs to be
  12470. // set to reflect the actual type of conversion done. Also, a lossless conversion needs the value to determine
  12471. // whether the conversion may need to bail out.
  12472. Assert(valueInfo);
  12473. if(valueInfo->IsInt())
  12474. {
  12475. lossy = false;
  12476. }
  12477. else
  12478. {
  12479. Assert(IsLoopPrePass() || !IsInt32TypeSpecialized(varSym, block));
  12480. }
  12481. livenessBv = block->globOptData.liveInt32Syms;
  12482. isLive = livenessBv->Test(varSym->m_id) && (lossy || !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id));
  12483. if (this->IsLoopPrePass())
  12484. {
  12485. if(!isLive)
  12486. {
  12487. livenessBv->Set(varSym->m_id);
  12488. if(lossy)
  12489. {
  12490. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  12491. }
  12492. else
  12493. {
  12494. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12495. }
  12496. }
  12497. if(!lossy)
  12498. {
  12499. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  12500. valueInfo = valueInfo->SpecializeToInt32(alloc);
  12501. ChangeValueInfo(nullptr, val, valueInfo);
  12502. if(needReplaceSrc)
  12503. {
  12504. opnd->SetValueType(valueInfo->Type());
  12505. }
  12506. }
  12507. return instr;
  12508. }
  12509. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  12510. if (!isLive)
  12511. {
  12512. if (!opnd->IsVar() ||
  12513. !block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  12514. (block->globOptData.liveFloat64Syms->Test(varSym->m_id) && valueInfo && valueInfo->IsLikelyFloat()))
  12515. {
  12516. Assert(block->globOptData.liveFloat64Syms->Test(varSym->m_id));
  12517. if(!lossy && !valueInfo->IsInt())
  12518. {
  12519. // Shouldn't try to do a lossless conversion from float64 to int32 when the value is not known to be an
  12520. // int. There are cases where we need more than two passes over loops to flush out all dependencies.
  12521. // It's possible for the loop prepass to think that a sym s1 remains an int because it acquires the
  12522. // value of another sym s2 that is an int in the prepass at that time. However, s2 can become a float
  12523. // later in the loop body, in which case s1 would become a float on the second iteration of the loop. By
  12524. // that time, we would have already committed to having s1 live as a lossless int on entry into the
  12525. // loop, and we end up having to compensate by doing a lossless conversion from float to int, which will
  12526. // need a bailout and will most likely bail out.
  12527. //
  12528. // If s2 becomes a var instead of a float, then the compensation is legal although not ideal. After
  12529. // enough bailouts, rejit would be triggered with aggressive int type spec turned off. For the
  12530. // float-to-int conversion though, there's no point in emitting a bailout because we already know that
  12531. // the value is a float and has high probability of bailing out (whereas a var has a chance to be a
  12532. // tagged int), and so currently lossless conversion from float to int with bailout is not supported.
  12533. //
  12534. // So, treating this case as a compile-time bailout. The exception will trigger the jit work item to be
  12535. // restarted with aggressive int type specialization disabled.
  12536. if(bailOutKind == IR::BailOutExpectingInteger)
  12537. {
  12538. Assert(IsSwitchOptEnabled());
  12539. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  12540. }
  12541. else
  12542. {
  12543. Assert(DoAggressiveIntTypeSpec());
  12544. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12545. {
  12546. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12547. Output::Print(
  12548. _u("BailOut (compile-time): function: %s (%s) varSym: "),
  12549. this->func->GetJITFunctionBody()->GetDisplayName(),
  12550. this->func->GetDebugNumberSet(debugStringBuffer),
  12551. varSym->m_id);
  12552. #if DBG_DUMP
  12553. varSym->Dump();
  12554. #else
  12555. Output::Print(_u("s%u"), varSym->m_id);
  12556. #endif
  12557. if(varSym->HasByteCodeRegSlot())
  12558. {
  12559. Output::Print(_u(" byteCodeReg: R%u"), varSym->GetByteCodeRegSlot());
  12560. }
  12561. Output::Print(_u(" (lossless conversion from float64 to int32)\n"));
  12562. Output::Flush();
  12563. }
  12564. if(!DoAggressiveIntTypeSpec())
  12565. {
  12566. // Aggressive int type specialization is already off for some reason. Prevent trying to rejit again
  12567. // because it won't help and the same thing will happen again. Just abort jitting this function.
  12568. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12569. {
  12570. Output::Print(_u(" Aborting JIT because AggressiveIntTypeSpec is already off\n"));
  12571. Output::Flush();
  12572. }
  12573. throw Js::OperationAbortedException();
  12574. }
  12575. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  12576. }
  12577. }
  12578. if(opnd->IsVar())
  12579. {
  12580. regSrc->SetType(TyFloat64);
  12581. regSrc->m_sym = varSym->GetFloat64EquivSym(this->func);
  12582. opcode = Js::OpCode::Conv_Prim;
  12583. }
  12584. else
  12585. {
  12586. Assert(regSrc->IsFloat64());
  12587. Assert(regSrc->m_sym->IsFloat64());
  12588. Assert(opcode == Js::OpCode::Conv_Prim);
  12589. }
  12590. }
  12591. }
  12592. GOPT_TRACE_OPND(regSrc, _u("Converting to int32\n"));
  12593. }
  12594. else if (toType == TyFloat64)
  12595. {
  12596. // float64
  12597. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  12598. if(!IsLoopPrePass() && typeSpecSym->m_requiresBailOnNotNumber && IsFloat64TypeSpecialized(varSym, block))
  12599. {
  12600. // This conversion is already protected by a BailOutNumberOnly bailout (or at least it will be after the
  12601. // dead-store phase). Since 'requiresBailOnNotNumber' is not flow-based, change the value to definitely float.
  12602. if(valueInfo)
  12603. {
  12604. if(!valueInfo->IsNumber())
  12605. {
  12606. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12607. ChangeValueInfo(block, val, valueInfo);
  12608. opnd->SetValueType(valueInfo->Type());
  12609. }
  12610. }
  12611. else
  12612. {
  12613. val = NewGenericValue(ValueType::Float);
  12614. valueInfo = val->GetValueInfo();
  12615. SetValue(&block->globOptData, val, varSym);
  12616. opnd->SetValueType(valueInfo->Type());
  12617. }
  12618. }
  12619. if(bailOutKind == IR::BailOutNumberOnly)
  12620. {
  12621. if(!IsLoopPrePass())
  12622. {
  12623. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any
  12624. // non-number value, even ones that have already been generated before. The dead-store pass will update the
  12625. // bailout kind on already-generated FromVars based on this bit.
  12626. typeSpecSym->m_requiresBailOnNotNumber = true;
  12627. }
  12628. }
  12629. else if(typeSpecSym->m_requiresBailOnNotNumber)
  12630. {
  12631. Assert(bailOutKind == IR::BailOutPrimitiveButString);
  12632. bailOutKind = IR::BailOutNumberOnly;
  12633. }
  12634. livenessBv = block->globOptData.liveFloat64Syms;
  12635. isLive = livenessBv->Test(varSym->m_id);
  12636. if (this->IsLoopPrePass())
  12637. {
  12638. if(!isLive)
  12639. {
  12640. livenessBv->Set(varSym->m_id);
  12641. }
  12642. if (this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  12643. {
  12644. this->prePassLoop->forceFloat64SymsOnEntry->Set(varSym->m_id);
  12645. }
  12646. else
  12647. {
  12648. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  12649. if (symStore && symStore != varSym
  12650. && this->OptIsInvariant(symStore, block, this->prePassLoop, this->FindValue(block->globOptData.symToValueMap, symStore), false, true))
  12651. {
  12652. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  12653. // outside the loop.
  12654. this->prePassLoop->forceFloat64SymsOnEntry->Set(symStore->m_id);
  12655. }
  12656. }
  12657. if(bailOutKind == IR::BailOutNumberOnly)
  12658. {
  12659. if(valueInfo)
  12660. {
  12661. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12662. ChangeValueInfo(block, val, valueInfo);
  12663. }
  12664. else
  12665. {
  12666. val = NewGenericValue(ValueType::Float);
  12667. valueInfo = val->GetValueInfo();
  12668. SetValue(&block->globOptData, val, varSym);
  12669. }
  12670. if(needReplaceSrc)
  12671. {
  12672. opnd->SetValueType(valueInfo->Type());
  12673. }
  12674. }
  12675. return instr;
  12676. }
  12677. if (!isLive && regSrc->IsVar())
  12678. {
  12679. if (!block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  12680. (
  12681. block->globOptData.liveInt32Syms->Test(varSym->m_id) &&
  12682. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  12683. valueInfo &&
  12684. valueInfo->IsLikelyInt()
  12685. ))
  12686. {
  12687. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  12688. Assert(!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)); // Shouldn't try to convert a lossy int32 to anything
  12689. regSrc->SetType(TyInt32);
  12690. regSrc->m_sym = varSym->GetInt32EquivSym(this->func);
  12691. opcode = Js::OpCode::Conv_Prim;
  12692. }
  12693. }
  12694. GOPT_TRACE_OPND(regSrc, _u("Converting to float64\n"));
  12695. }
  12696. else
  12697. {
  12698. // SIMD_JS
  12699. Assert(IRType_IsSimd128(toType));
  12700. // Get or create type-spec sym
  12701. typeSpecSym = varSym->GetSimd128EquivSym(toType, this->func);
  12702. if (!IsLoopPrePass() && IsSimd128TypeSpecialized(toType, varSym, block))
  12703. {
  12704. // Consider: Is this needed ? Shouldn't this have been done at previous FromVar since the simd128 sym is alive ?
  12705. if (valueInfo)
  12706. {
  12707. if (!valueInfo->IsSimd128(toType))
  12708. {
  12709. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12710. ChangeValueInfo(block, val, valueInfo);
  12711. opnd->SetValueType(valueInfo->Type());
  12712. }
  12713. }
  12714. else
  12715. {
  12716. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12717. valueInfo = val->GetValueInfo();
  12718. SetValue(&block->globOptData, val, varSym);
  12719. opnd->SetValueType(valueInfo->Type());
  12720. }
  12721. }
  12722. livenessBv = block->globOptData.GetSimd128LivenessBV(toType);
  12723. isLive = livenessBv->Test(varSym->m_id);
  12724. if (this->IsLoopPrePass())
  12725. {
  12726. // FromVar Hoisting
  12727. BVSparse<Memory::JitArenaAllocator> * forceSimd128SymsOnEntry;
  12728. forceSimd128SymsOnEntry = \
  12729. toType == TySimd128F4 ? this->prePassLoop->forceSimd128F4SymsOnEntry : this->prePassLoop->forceSimd128I4SymsOnEntry;
  12730. if (!isLive)
  12731. {
  12732. livenessBv->Set(varSym->m_id);
  12733. }
  12734. // Be aggressive with hoisting only if value is always initialized to SIMD type before entering loop.
  12735. // This reduces the chance that the FromVar gets executed while the specialized instruction in the loop is not. Leading to unnecessary excessive bailouts.
  12736. if (val && !val->GetValueInfo()->HasBeenUndefined() && !val->GetValueInfo()->HasBeenNull() &&
  12737. this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  12738. {
  12739. forceSimd128SymsOnEntry->Set(varSym->m_id);
  12740. }
  12741. else
  12742. {
  12743. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  12744. Value * value = symStore ? this->FindValue(block->globOptData.symToValueMap, symStore) : nullptr;
  12745. if (symStore && symStore != varSym
  12746. && value
  12747. && !value->GetValueInfo()->HasBeenUndefined() && !value->GetValueInfo()->HasBeenNull()
  12748. && this->OptIsInvariant(symStore, block, this->prePassLoop, value, true, true))
  12749. {
  12750. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  12751. // outside the loop.
  12752. forceSimd128SymsOnEntry->Set(symStore->m_id);
  12753. }
  12754. }
  12755. Assert(bailOutKind == IR::BailOutSimd128F4Only || bailOutKind == IR::BailOutSimd128I4Only);
  12756. // We are in loop prepass, we haven't propagated the value info to the src. Do it now.
  12757. if (valueInfo)
  12758. {
  12759. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12760. ChangeValueInfo(block, val, valueInfo);
  12761. }
  12762. else
  12763. {
  12764. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12765. valueInfo = val->GetValueInfo();
  12766. SetValue(&block->globOptData, val, varSym);
  12767. }
  12768. if (needReplaceSrc)
  12769. {
  12770. opnd->SetValueType(valueInfo->Type());
  12771. }
  12772. return instr;
  12773. }
  12774. GOPT_TRACE_OPND(regSrc, _u("Converting to Simd128\n"));
  12775. }
  12776. bool needLoad = false;
  12777. if (needReplaceSrc)
  12778. {
  12779. bool wasDead = regSrc->GetIsDead();
  12780. // needReplaceSrc means we are type specializing a use, and need to replace the src on the instr
  12781. if (!isLive)
  12782. {
  12783. needLoad = true;
  12784. // ReplaceSrc will delete it.
  12785. regSrc = regSrc->Copy(instr->m_func)->AsRegOpnd();
  12786. }
  12787. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  12788. if(valueInfo)
  12789. {
  12790. regNew->SetValueType(valueInfo->Type());
  12791. regNew->m_wasNegativeZeroPreventedByBailout = valueInfo->WasNegativeZeroPreventedByBailout();
  12792. }
  12793. regNew->SetIsDead(wasDead);
  12794. regNew->SetIsJITOptimizedReg(true);
  12795. this->CaptureByteCodeSymUses(instr);
  12796. if (indir == nullptr)
  12797. {
  12798. instr->ReplaceSrc(opnd, regNew);
  12799. }
  12800. else
  12801. {
  12802. indir->ReplaceIndexOpnd(regNew);
  12803. }
  12804. opnd = regNew;
  12805. if (!needLoad)
  12806. {
  12807. Assert(isLive);
  12808. return instr;
  12809. }
  12810. }
  12811. else
  12812. {
  12813. // We just need to insert a load of a type spec sym
  12814. if(isLive)
  12815. {
  12816. return instr;
  12817. }
  12818. // Insert it before the specified instruction
  12819. instr = insertBeforeInstr;
  12820. }
  12821. IR::RegOpnd *regDst = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  12822. bool isBailout = false;
  12823. bool isHoisted = false;
  12824. bool isInLandingPad = (block->next && !block->next->isDeleted && block->next->isLoopHeader);
  12825. if (isInLandingPad)
  12826. {
  12827. Loop *loop = block->next->loop;
  12828. Assert(loop && loop->landingPad == block);
  12829. Assert(loop->bailOutInfo);
  12830. }
  12831. if (opcode == Js::OpCode::FromVar)
  12832. {
  12833. if (toType == TyInt32)
  12834. {
  12835. Assert(valueInfo);
  12836. if (lossy)
  12837. {
  12838. if (!valueInfo->IsPrimitive() && !IsTypeSpecialized(varSym, block))
  12839. {
  12840. // Lossy conversions to int32 on non-primitive values may have implicit calls to toString or valueOf, which
  12841. // may be overridden to have a side effect. The side effect needs to happen every time the conversion is
  12842. // supposed to happen, so the resulting lossy int32 value cannot be reused. Bail out on implicit calls.
  12843. Assert(DoLossyIntTypeSpec());
  12844. bailOutKind = IR::BailOutOnNotPrimitive;
  12845. isBailout = true;
  12846. }
  12847. }
  12848. else if (!valueInfo->IsInt())
  12849. {
  12850. // The operand is likely an int (hence the request to convert to int), so bail out if it's not an int. Only
  12851. // bail out if a lossless conversion to int is requested. Lossy conversions to int such as in (a | 0) don't
  12852. // need to bail out.
  12853. if (bailOutKind == IR::BailOutExpectingInteger)
  12854. {
  12855. Assert(IsSwitchOptEnabled());
  12856. }
  12857. else
  12858. {
  12859. Assert(DoAggressiveIntTypeSpec());
  12860. }
  12861. isBailout = true;
  12862. }
  12863. }
  12864. else if (toType == TyFloat64 &&
  12865. (!valueInfo || !valueInfo->IsNumber()))
  12866. {
  12867. // Bailout if converting vars to float if we can't prove they are floats:
  12868. // x = str + float; -> need to bailout if str is a string
  12869. //
  12870. // x = obj * 0.1;
  12871. // y = obj * 0.2; -> if obj has valueof, we'll only call valueof once on the FromVar conversion...
  12872. Assert(bailOutKind != IR::BailOutInvalid);
  12873. isBailout = true;
  12874. }
  12875. else if (IRType_IsSimd128(toType) &&
  12876. (!valueInfo || !valueInfo->IsSimd128(toType)))
  12877. {
  12878. Assert(toType == TySimd128F4 && bailOutKind == IR::BailOutSimd128F4Only
  12879. || toType == TySimd128I4 && bailOutKind == IR::BailOutSimd128I4Only);
  12880. isBailout = true;
  12881. }
  12882. }
  12883. if (isBailout)
  12884. {
  12885. if (isInLandingPad)
  12886. {
  12887. Loop *loop = block->next->loop;
  12888. this->EnsureBailTarget(loop);
  12889. instr = loop->bailOutInfo->bailOutInstr;
  12890. updateBlockLastInstr = false;
  12891. newInstr = IR::BailOutInstr::New(opcode, bailOutKind, loop->bailOutInfo, instr->m_func);
  12892. newInstr->SetDst(regDst);
  12893. newInstr->SetSrc1(regSrc);
  12894. }
  12895. else
  12896. {
  12897. newInstr = IR::BailOutInstr::New(opcode, regDst, regSrc, bailOutKind, instr, instr->m_func);
  12898. }
  12899. }
  12900. else
  12901. {
  12902. newInstr = IR::Instr::New(opcode, regDst, regSrc, instr->m_func);
  12903. }
  12904. newInstr->SetByteCodeOffset(instr);
  12905. instr->InsertBefore(newInstr);
  12906. if (updateBlockLastInstr)
  12907. {
  12908. block->SetLastInstr(newInstr);
  12909. }
  12910. regDst->SetIsJITOptimizedReg(true);
  12911. newInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  12912. ValueInfo *const oldValueInfo = valueInfo;
  12913. if(valueInfo)
  12914. {
  12915. newInstr->GetSrc1()->SetValueType(valueInfo->Type());
  12916. }
  12917. if(isBailout)
  12918. {
  12919. Assert(opcode == Js::OpCode::FromVar);
  12920. if(toType == TyInt32)
  12921. {
  12922. Assert(valueInfo);
  12923. if(!lossy)
  12924. {
  12925. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  12926. valueInfo = valueInfo->SpecializeToInt32(alloc, isPerformingLoopBackEdgeCompensation);
  12927. ChangeValueInfo(nullptr, val, valueInfo);
  12928. int32 intConstantValue;
  12929. if(indir && needReplaceSrc && valueInfo->TryGetIntConstantValue(&intConstantValue))
  12930. {
  12931. // A likely-int value can have constant bounds due to conditional branches narrowing its range. Now that
  12932. // the sym has been proven to be an int, the likely-int value, after specialization, will be constant.
  12933. // Replace the index opnd in the indir with an offset.
  12934. Assert(opnd == indir->GetIndexOpnd());
  12935. Assert(indir->GetScale() == 0);
  12936. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  12937. opnd = nullptr;
  12938. indir->SetOffset(intConstantValue);
  12939. }
  12940. }
  12941. }
  12942. else if (toType == TyFloat64)
  12943. {
  12944. if(bailOutKind == IR::BailOutNumberOnly)
  12945. {
  12946. if(valueInfo)
  12947. {
  12948. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12949. ChangeValueInfo(block, val, valueInfo);
  12950. }
  12951. else
  12952. {
  12953. val = NewGenericValue(ValueType::Float);
  12954. valueInfo = val->GetValueInfo();
  12955. SetValue(&block->globOptData, val, varSym);
  12956. }
  12957. }
  12958. }
  12959. else
  12960. {
  12961. Assert(IRType_IsSimd128(toType));
  12962. if (valueInfo)
  12963. {
  12964. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12965. ChangeValueInfo(block, val, valueInfo);
  12966. }
  12967. else
  12968. {
  12969. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12970. valueInfo = val->GetValueInfo();
  12971. SetValue(&block->globOptData, val, varSym);
  12972. }
  12973. }
  12974. }
  12975. if(valueInfo)
  12976. {
  12977. newInstr->GetDst()->SetValueType(valueInfo->Type());
  12978. if(needReplaceSrc && opnd)
  12979. {
  12980. opnd->SetValueType(valueInfo->Type());
  12981. }
  12982. }
  12983. if (block->loop)
  12984. {
  12985. Assert(!this->IsLoopPrePass());
  12986. isHoisted = this->TryHoistInvariant(newInstr, block, val, val, nullptr, false, lossy, false, bailOutKind);
  12987. }
  12988. if (isBailout)
  12989. {
  12990. if (!isHoisted && !isInLandingPad)
  12991. {
  12992. if(valueInfo)
  12993. {
  12994. // Since this is a pre-op bailout, the old value info should be used for the purposes of bailout. For
  12995. // instance, the value info could be LikelyInt but with a constant range. Once specialized to int, the value
  12996. // info would be an int constant. However, the int constant is only guaranteed if the value is actually an
  12997. // int, which this conversion is verifying, so bailout cannot assume the constant value.
  12998. if(oldValueInfo)
  12999. {
  13000. val->SetValueInfo(oldValueInfo);
  13001. }
  13002. else
  13003. {
  13004. block->globOptData.symToValueMap->Clear(varSym->m_id);
  13005. }
  13006. }
  13007. // Fill in bail out info if the FromVar is a bailout instr, and it wasn't hoisted as invariant.
  13008. // If it was hoisted, the invariant code will fill out the bailout info with the loop landing pad bailout info.
  13009. this->FillBailOutInfo(block, newInstr->GetBailOutInfo());
  13010. if(valueInfo)
  13011. {
  13012. // Restore the new value info after filling the bailout info
  13013. if(oldValueInfo)
  13014. {
  13015. val->SetValueInfo(valueInfo);
  13016. }
  13017. else
  13018. {
  13019. SetValue(&block->globOptData, val, varSym);
  13020. }
  13021. }
  13022. }
  13023. }
  13024. // Now that we've captured the liveness in the bailout info, we can mark this as live.
  13025. // This type specialized sym isn't live if the FromVar bails out.
  13026. livenessBv->Set(varSym->m_id);
  13027. if(toType == TyInt32)
  13028. {
  13029. if(lossy)
  13030. {
  13031. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  13032. }
  13033. else
  13034. {
  13035. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13036. }
  13037. }
  13038. }
  13039. else
  13040. {
  13041. Assert(valueInfo);
  13042. if(opnd->IsRegOpnd() && valueInfo->IsInt())
  13043. {
  13044. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  13045. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  13046. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  13047. // lossy state.
  13048. block->globOptData.liveLossyInt32Syms->Clear(opnd->AsRegOpnd()->m_sym->m_id);
  13049. if(toType == TyInt32)
  13050. {
  13051. lossy = false;
  13052. }
  13053. }
  13054. if (this->IsLoopPrePass())
  13055. {
  13056. if(opnd->IsRegOpnd())
  13057. {
  13058. StackSym *const sym = opnd->AsRegOpnd()->m_sym;
  13059. if(toType == TyInt32)
  13060. {
  13061. Assert(!sym->IsTypeSpec());
  13062. block->globOptData.liveInt32Syms->Set(sym->m_id);
  13063. if(lossy)
  13064. {
  13065. block->globOptData.liveLossyInt32Syms->Set(sym->m_id);
  13066. }
  13067. else
  13068. {
  13069. block->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  13070. }
  13071. }
  13072. else
  13073. {
  13074. Assert(toType == TyFloat64);
  13075. AnalysisAssert(instr);
  13076. StackSym *const varSym = sym->IsTypeSpec() ? sym->GetVarEquivSym(instr->m_func) : sym;
  13077. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  13078. }
  13079. }
  13080. return instr;
  13081. }
  13082. if (!needReplaceSrc)
  13083. {
  13084. instr = insertBeforeInstr;
  13085. }
  13086. IR::Opnd *constOpnd;
  13087. int32 intConstantValue;
  13088. if(valueInfo->TryGetIntConstantValue(&intConstantValue))
  13089. {
  13090. if(toType == TyInt32)
  13091. {
  13092. constOpnd = IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func);
  13093. }
  13094. else
  13095. {
  13096. Assert(toType == TyFloat64);
  13097. constOpnd = IR::FloatConstOpnd::New(static_cast<FloatConstType>(intConstantValue), TyFloat64, instr->m_func);
  13098. }
  13099. }
  13100. else if(valueInfo->IsFloatConstant())
  13101. {
  13102. const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
  13103. if(toType == TyInt32)
  13104. {
  13105. Assert(lossy);
  13106. constOpnd =
  13107. IR::IntConstOpnd::New(
  13108. Js::JavascriptMath::ToInt32(floatValue),
  13109. TyInt32,
  13110. instr->m_func);
  13111. }
  13112. else
  13113. {
  13114. Assert(toType == TyFloat64);
  13115. constOpnd = IR::FloatConstOpnd::New(floatValue, TyFloat64, instr->m_func);
  13116. }
  13117. }
  13118. else
  13119. {
  13120. Assert(opnd->IsVar());
  13121. Assert(opnd->IsAddrOpnd());
  13122. AssertMsg(opnd->AsAddrOpnd()->IsVar(), "We only expect to see addr that are var before lower.");
  13123. // Don't need to capture uses, we are only replacing an addr opnd
  13124. if(toType == TyInt32)
  13125. {
  13126. constOpnd = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opnd->AsAddrOpnd()->m_address), TyInt32, instr->m_func);
  13127. }
  13128. else
  13129. {
  13130. Assert(toType == TyFloat64);
  13131. constOpnd = IR::FloatConstOpnd::New(Js::TaggedInt::ToDouble(opnd->AsAddrOpnd()->m_address), TyFloat64, instr->m_func);
  13132. }
  13133. }
  13134. if (toType == TyInt32)
  13135. {
  13136. if (needReplaceSrc)
  13137. {
  13138. CaptureByteCodeSymUses(instr);
  13139. if(indir)
  13140. {
  13141. Assert(opnd == indir->GetIndexOpnd());
  13142. Assert(indir->GetScale() == 0);
  13143. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  13144. indir->SetOffset(constOpnd->AsIntConstOpnd()->AsInt32());
  13145. }
  13146. else
  13147. {
  13148. instr->ReplaceSrc(opnd, constOpnd);
  13149. }
  13150. }
  13151. else
  13152. {
  13153. StackSym *varSym = opnd->AsRegOpnd()->m_sym;
  13154. if(varSym->IsTypeSpec())
  13155. {
  13156. varSym = varSym->GetVarEquivSym(nullptr);
  13157. Assert(varSym);
  13158. }
  13159. if(block->globOptData.liveInt32Syms->TestAndSet(varSym->m_id))
  13160. {
  13161. Assert(!!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) == lossy);
  13162. }
  13163. else
  13164. {
  13165. if(lossy)
  13166. {
  13167. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  13168. }
  13169. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  13170. IR::RegOpnd *int32Reg = IR::RegOpnd::New(int32Sym, TyInt32, instr->m_func);
  13171. int32Reg->SetIsJITOptimizedReg(true);
  13172. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, int32Reg, constOpnd, instr->m_func);
  13173. newInstr->SetByteCodeOffset(instr);
  13174. instr->InsertBefore(newInstr);
  13175. if (updateBlockLastInstr)
  13176. {
  13177. block->SetLastInstr(newInstr);
  13178. }
  13179. }
  13180. }
  13181. }
  13182. else
  13183. {
  13184. StackSym *floatSym;
  13185. bool newFloatSym = false;
  13186. StackSym* varSym;
  13187. if (opnd->IsRegOpnd())
  13188. {
  13189. varSym = opnd->AsRegOpnd()->m_sym;
  13190. if (varSym->IsTypeSpec())
  13191. {
  13192. varSym = varSym->GetVarEquivSym(nullptr);
  13193. Assert(varSym);
  13194. }
  13195. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  13196. }
  13197. else
  13198. {
  13199. varSym = GetCopyPropSym(block, nullptr, val);
  13200. // If there is no float 64 type specialized sym for this - create a new sym.
  13201. if(!varSym || !IsFloat64TypeSpecialized(varSym, block))
  13202. {
  13203. // Clear the symstore to ensure it's set below to this new symbol
  13204. this->SetSymStoreDirect(val->GetValueInfo(), nullptr);
  13205. varSym = StackSym::New(TyVar, instr->m_func);
  13206. newFloatSym = true;
  13207. }
  13208. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  13209. }
  13210. IR::RegOpnd *floatReg = IR::RegOpnd::New(floatSym, TyFloat64, instr->m_func);
  13211. floatReg->SetIsJITOptimizedReg(true);
  13212. // If the value is not live - let's load it.
  13213. if(!block->globOptData.liveFloat64Syms->TestAndSet(varSym->m_id))
  13214. {
  13215. newInstr = IR::Instr::New(Js::OpCode::LdC_F8_R8, floatReg, constOpnd, instr->m_func);
  13216. newInstr->SetByteCodeOffset(instr);
  13217. instr->InsertBefore(newInstr);
  13218. if (updateBlockLastInstr)
  13219. {
  13220. block->SetLastInstr(newInstr);
  13221. }
  13222. if(newFloatSym)
  13223. {
  13224. this->SetValue(&block->globOptData, val, varSym);
  13225. }
  13226. // Src is always invariant, but check if the dst is, and then hoist.
  13227. if (block->loop &&
  13228. (
  13229. (newFloatSym && block->loop->CanHoistInvariants()) ||
  13230. this->OptIsInvariant(floatReg, block, block->loop, val, false, false)
  13231. ))
  13232. {
  13233. Assert(!this->IsLoopPrePass());
  13234. this->OptHoistInvariant(newInstr, block, block->loop, val, val, false);
  13235. }
  13236. }
  13237. if (needReplaceSrc)
  13238. {
  13239. CaptureByteCodeSymUses(instr);
  13240. instr->ReplaceSrc(opnd, floatReg);
  13241. }
  13242. }
  13243. return instr;
  13244. }
  13245. return newInstr;
  13246. }
  13247. void
  13248. GlobOpt::ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block)
  13249. {
  13250. ToVarStackSym(dst->m_sym, block);
  13251. }
  13252. void
  13253. GlobOpt::ToVarStackSym(StackSym *varSym, BasicBlock *block)
  13254. {
  13255. //added another check for sym , in case of asmjs there is mostly no var syms and hence added a new check to see if it is the primary sym
  13256. Assert(!varSym->IsTypeSpec());
  13257. block->globOptData.liveVarSyms->Set(varSym->m_id);
  13258. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13259. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13260. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13261. // SIMD_JS
  13262. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13263. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13264. }
  13265. void
  13266. GlobOpt::ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13267. {
  13268. StackSym *varSym = dst->m_sym;
  13269. Assert(!varSym->IsTypeSpec());
  13270. if (!this->IsLoopPrePass() && varSym->IsVar())
  13271. {
  13272. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  13273. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13274. // since we'll just be hammering the symbol.
  13275. dst = instr->UnlinkDst()->AsRegOpnd();
  13276. dst->m_sym = int32Sym;
  13277. dst->SetType(TyInt32);
  13278. instr->SetDst(dst);
  13279. }
  13280. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  13281. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  13282. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13283. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13284. // SIMD_JS
  13285. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13286. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13287. }
  13288. void
  13289. GlobOpt::ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13290. {
  13291. // We should be calling only for asmjs function
  13292. Assert(GetIsAsmJSFunc());
  13293. StackSym *varSym = dst->m_sym;
  13294. Assert(!varSym->IsTypeSpec());
  13295. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  13296. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  13297. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13298. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13299. // SIMD_JS
  13300. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13301. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13302. }
  13303. void
  13304. GlobOpt::ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13305. {
  13306. StackSym *varSym = dst->m_sym;
  13307. Assert(!varSym->IsTypeSpec());
  13308. if (!this->IsLoopPrePass() && varSym->IsVar())
  13309. {
  13310. StackSym *float64Sym = varSym->GetFloat64EquivSym(this->func);
  13311. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13312. // since we'll just be hammering the symbol.
  13313. dst = instr->UnlinkDst()->AsRegOpnd();
  13314. dst->m_sym = float64Sym;
  13315. dst->SetType(TyFloat64);
  13316. instr->SetDst(dst);
  13317. }
  13318. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  13319. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13320. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13321. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13322. // SIMD_JS
  13323. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13324. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13325. }
  13326. // SIMD_JS
  13327. void
  13328. GlobOpt::ToSimd128Dst(IRType toType, IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13329. {
  13330. StackSym *varSym = dst->m_sym;
  13331. Assert(!varSym->IsTypeSpec());
  13332. BVSparse<JitArenaAllocator> * livenessBV = block->globOptData.GetSimd128LivenessBV(toType);
  13333. Assert(livenessBV);
  13334. if (!this->IsLoopPrePass() && varSym->IsVar())
  13335. {
  13336. StackSym *simd128Sym = varSym->GetSimd128EquivSym(toType, this->func);
  13337. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13338. // since we'll just be hammering the symbol.
  13339. dst = instr->UnlinkDst()->AsRegOpnd();
  13340. dst->m_sym = simd128Sym;
  13341. dst->SetType(toType);
  13342. instr->SetDst(dst);
  13343. }
  13344. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13345. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13346. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13347. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13348. // SIMD_JS
  13349. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13350. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13351. livenessBV->Set(varSym->m_id);
  13352. }
  13353. BOOL
  13354. GlobOpt::IsInt32TypeSpecialized(Sym *sym, BasicBlock *block)
  13355. {
  13356. return IsInt32TypeSpecialized(sym, &block->globOptData);
  13357. }
  13358. BOOL
  13359. GlobOpt::IsSwitchInt32TypeSpecialized(IR::Instr * instr, BasicBlock * block)
  13360. {
  13361. return IsSwitchOptEnabled(instr->m_func->GetTopFunc()) && instr->GetSrc1()->IsRegOpnd() &&
  13362. IsInt32TypeSpecialized(instr->GetSrc1()->AsRegOpnd()->m_sym, block);
  13363. }
  13364. BOOL
  13365. GlobOpt::IsInt32TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13366. {
  13367. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13368. return sym && data->liveInt32Syms->Test(sym->m_id) && !data->liveLossyInt32Syms->Test(sym->m_id);
  13369. }
  13370. BOOL
  13371. GlobOpt::IsFloat64TypeSpecialized(Sym *sym, BasicBlock *block)
  13372. {
  13373. return IsFloat64TypeSpecialized(sym, &block->globOptData);
  13374. }
  13375. BOOL
  13376. GlobOpt::IsFloat64TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13377. {
  13378. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13379. return sym && data->liveFloat64Syms->Test(sym->m_id);
  13380. }
  13381. // SIMD_JS
  13382. BOOL
  13383. GlobOpt::IsSimd128TypeSpecialized(Sym *sym, BasicBlock *block)
  13384. {
  13385. return IsSimd128TypeSpecialized(sym, &block->globOptData);
  13386. }
  13387. BOOL
  13388. GlobOpt::IsSimd128TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13389. {
  13390. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13391. return sym && (data->liveSimd128F4Syms->Test(sym->m_id) || data->liveSimd128I4Syms->Test(sym->m_id));
  13392. }
  13393. BOOL
  13394. GlobOpt::IsSimd128TypeSpecialized(IRType type, Sym *sym, BasicBlock *block)
  13395. {
  13396. return IsSimd128TypeSpecialized(type, sym, &block->globOptData);
  13397. }
  13398. BOOL
  13399. GlobOpt::IsSimd128TypeSpecialized(IRType type, Sym *sym, GlobOptBlockData *data)
  13400. {
  13401. switch (type)
  13402. {
  13403. case TySimd128F4:
  13404. return IsSimd128F4TypeSpecialized(sym, data);
  13405. case TySimd128I4:
  13406. return IsSimd128I4TypeSpecialized(sym, data);
  13407. default:
  13408. Assert(UNREACHED);
  13409. return false;
  13410. }
  13411. }
  13412. BOOL
  13413. GlobOpt::IsSimd128F4TypeSpecialized(Sym *sym, BasicBlock *block)
  13414. {
  13415. return IsSimd128F4TypeSpecialized(sym, &block->globOptData);
  13416. }
  13417. BOOL
  13418. GlobOpt::IsSimd128F4TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13419. {
  13420. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13421. return sym && (data->liveSimd128F4Syms->Test(sym->m_id));
  13422. }
  13423. BOOL
  13424. GlobOpt::IsSimd128I4TypeSpecialized(Sym *sym, BasicBlock *block)
  13425. {
  13426. return IsSimd128I4TypeSpecialized(sym, &block->globOptData);
  13427. }
  13428. BOOL
  13429. GlobOpt::IsSimd128I4TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13430. {
  13431. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13432. return sym && (data->liveSimd128I4Syms->Test(sym->m_id));
  13433. }
  13434. BOOL
  13435. GlobOpt::IsLiveAsSimd128(Sym *sym, GlobOptBlockData *data)
  13436. {
  13437. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13438. return
  13439. sym &&
  13440. (
  13441. data->liveSimd128F4Syms->Test(sym->m_id) ||
  13442. data->liveSimd128I4Syms->Test(sym->m_id)
  13443. );
  13444. }
  13445. BOOL
  13446. GlobOpt::IsLiveAsSimd128F4(Sym *sym, GlobOptBlockData *data)
  13447. {
  13448. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13449. return sym && data->liveSimd128F4Syms->Test(sym->m_id);
  13450. }
  13451. BOOL
  13452. GlobOpt::IsLiveAsSimd128I4(Sym *sym, GlobOptBlockData *data)
  13453. {
  13454. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13455. return sym && data->liveSimd128I4Syms->Test(sym->m_id);
  13456. }
  13457. BOOL
  13458. GlobOpt::IsTypeSpecialized(Sym *sym, BasicBlock *block)
  13459. {
  13460. return IsTypeSpecialized(sym, &block->globOptData);
  13461. }
  13462. BOOL
  13463. GlobOpt::IsTypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13464. {
  13465. return IsInt32TypeSpecialized(sym, data) || IsFloat64TypeSpecialized(sym, data) || IsSimd128TypeSpecialized(sym, data);
  13466. }
  13467. BOOL
  13468. GlobOpt::IsLive(Sym *sym, BasicBlock *block)
  13469. {
  13470. return IsLive(sym, &block->globOptData);
  13471. }
  13472. BOOL
  13473. GlobOpt::IsLive(Sym *sym, GlobOptBlockData *data)
  13474. {
  13475. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13476. return
  13477. sym &&
  13478. (
  13479. data->liveVarSyms->Test(sym->m_id) ||
  13480. data->liveInt32Syms->Test(sym->m_id) ||
  13481. data->liveFloat64Syms->Test(sym->m_id) ||
  13482. data->liveSimd128F4Syms->Test(sym->m_id) ||
  13483. data->liveSimd128I4Syms->Test(sym->m_id)
  13484. );
  13485. }
  13486. void
  13487. GlobOpt::MakeLive(StackSym *const sym, GlobOptBlockData *const blockData, const bool lossy) const
  13488. {
  13489. Assert(sym);
  13490. Assert(blockData);
  13491. if(sym->IsTypeSpec())
  13492. {
  13493. const SymID varSymId = sym->GetVarEquivSym(func)->m_id;
  13494. if(sym->IsInt32())
  13495. {
  13496. blockData->liveInt32Syms->Set(varSymId);
  13497. if(lossy)
  13498. {
  13499. blockData->liveLossyInt32Syms->Set(varSymId);
  13500. }
  13501. else
  13502. {
  13503. blockData->liveLossyInt32Syms->Clear(varSymId);
  13504. }
  13505. return;
  13506. }
  13507. if (sym->IsFloat64())
  13508. {
  13509. blockData->liveFloat64Syms->Set(varSymId);
  13510. return;
  13511. }
  13512. // SIMD_JS
  13513. if (sym->IsSimd128F4())
  13514. {
  13515. blockData->liveSimd128F4Syms->Set(varSymId);
  13516. return;
  13517. }
  13518. if (sym->IsSimd128I4())
  13519. {
  13520. blockData->liveSimd128I4Syms->Set(varSymId);
  13521. return;
  13522. }
  13523. }
  13524. blockData->liveVarSyms->Set(sym->m_id);
  13525. }
  13526. bool
  13527. GlobOpt::OptConstFoldBinary(
  13528. IR::Instr * *pInstr,
  13529. const IntConstantBounds &src1IntConstantBounds,
  13530. const IntConstantBounds &src2IntConstantBounds,
  13531. Value **pDstVal)
  13532. {
  13533. IR::Instr * &instr = *pInstr;
  13534. int32 value;
  13535. IR::IntConstOpnd *constOpnd;
  13536. if (!DoConstFold())
  13537. {
  13538. return false;
  13539. }
  13540. int32 src1IntConstantValue = -1;
  13541. int32 src2IntConstantValue = -1;
  13542. int32 src1MaxIntConstantValue = -1;
  13543. int32 src2MaxIntConstantValue = -1;
  13544. int32 src1MinIntConstantValue = -1;
  13545. int32 src2MinIntConstantValue = -1;
  13546. if (instr->IsBranchInstr())
  13547. {
  13548. src1MinIntConstantValue = src1IntConstantBounds.LowerBound();
  13549. src1MaxIntConstantValue = src1IntConstantBounds.UpperBound();
  13550. src2MinIntConstantValue = src2IntConstantBounds.LowerBound();
  13551. src2MaxIntConstantValue = src2IntConstantBounds.UpperBound();
  13552. }
  13553. else if (src1IntConstantBounds.IsConstant() && src2IntConstantBounds.IsConstant())
  13554. {
  13555. src1IntConstantValue = src1IntConstantBounds.LowerBound();
  13556. src2IntConstantValue = src2IntConstantBounds.LowerBound();
  13557. }
  13558. else
  13559. {
  13560. return false;
  13561. }
  13562. IntConstType tmpValueOut;
  13563. if (!instr->BinaryCalculator(src1IntConstantValue, src2IntConstantValue, &tmpValueOut)
  13564. || !Math::FitsInDWord(tmpValueOut))
  13565. {
  13566. return false;
  13567. }
  13568. value = (int32)tmpValueOut;
  13569. this->CaptureByteCodeSymUses(instr);
  13570. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  13571. instr->ReplaceSrc1(constOpnd);
  13572. instr->FreeSrc2();
  13573. this->OptSrc(constOpnd, &instr);
  13574. IR::Opnd *dst = instr->GetDst();
  13575. Assert(dst->IsRegOpnd());
  13576. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  13577. if (dstSym->IsSingleDef())
  13578. {
  13579. dstSym->SetIsIntConst(value);
  13580. }
  13581. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  13582. *pDstVal = GetIntConstantValue(value, instr, dst);
  13583. if (IsTypeSpecPhaseOff(this->func))
  13584. {
  13585. instr->m_opcode = Js::OpCode::LdC_A_I4;
  13586. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  13587. }
  13588. else
  13589. {
  13590. instr->m_opcode = Js::OpCode::Ld_I4;
  13591. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  13592. }
  13593. return true;
  13594. }
  13595. void
  13596. GlobOpt::OptConstFoldBr(bool test, IR::Instr *instr, Value * src1Val, Value * src2Val)
  13597. {
  13598. GOPT_TRACE_INSTR(instr, _u("Constant folding to branch: "));
  13599. BasicBlock *deadBlock;
  13600. if (src1Val)
  13601. {
  13602. this->ToInt32(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, false);
  13603. }
  13604. if (src2Val)
  13605. {
  13606. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, false);
  13607. }
  13608. this->CaptureByteCodeSymUses(instr);
  13609. if (test)
  13610. {
  13611. instr->m_opcode = Js::OpCode::Br;
  13612. instr->FreeSrc1();
  13613. if(instr->GetSrc2())
  13614. {
  13615. instr->FreeSrc2();
  13616. }
  13617. deadBlock = instr->m_next->AsLabelInstr()->GetBasicBlock();
  13618. }
  13619. else
  13620. {
  13621. AssertMsg(instr->m_next->IsLabelInstr(), "Next instr of branch should be a label...");
  13622. if(instr->AsBranchInstr()->IsMultiBranch())
  13623. {
  13624. return;
  13625. }
  13626. deadBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  13627. instr->FreeSrc1();
  13628. if(instr->GetSrc2())
  13629. {
  13630. instr->FreeSrc2();
  13631. }
  13632. instr->m_opcode = Js::OpCode::Nop;
  13633. }
  13634. // Loop back edge: we would have already decremented data use count for the tail block when we processed the loop header.
  13635. if (!(this->currentBlock->loop && this->currentBlock->loop->GetHeadBlock() == deadBlock))
  13636. {
  13637. this->currentBlock->DecrementDataUseCount();
  13638. }
  13639. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  13640. if (deadBlock->GetPredList()->Count() == 0)
  13641. {
  13642. deadBlock->SetDataUseCount(0);
  13643. }
  13644. }
  13645. void
  13646. GlobOpt::ChangeValueType(
  13647. BasicBlock *const block,
  13648. Value *const value,
  13649. const ValueType newValueType,
  13650. const bool preserveSubclassInfo,
  13651. const bool allowIncompatibleType) const
  13652. {
  13653. Assert(value);
  13654. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  13655. Assert(!value->GetValueInfo()->IsJsType());
  13656. ValueInfo *const valueInfo = value->GetValueInfo();
  13657. const ValueType valueType(valueInfo->Type());
  13658. if(valueType == newValueType && (preserveSubclassInfo || valueInfo->IsGeneric()))
  13659. {
  13660. return;
  13661. }
  13662. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  13663. Assert(
  13664. !preserveSubclassInfo ||
  13665. !valueInfo->IsArrayValueInfo() ||
  13666. newValueType.IsObject() && newValueType.GetObjectType() == valueInfo->GetObjectType());
  13667. ValueInfo *const newValueInfo =
  13668. preserveSubclassInfo
  13669. ? valueInfo->Copy(alloc)
  13670. : valueInfo->CopyWithGenericStructureKind(alloc);
  13671. newValueInfo->Type() = newValueType;
  13672. ChangeValueInfo(block, value, newValueInfo, allowIncompatibleType);
  13673. }
  13674. void
  13675. GlobOpt::ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType, const bool compensated) const
  13676. {
  13677. Assert(value);
  13678. Assert(newValueInfo);
  13679. // The value type must be changed to something more specific or something more generic. For instance, it would be changed to
  13680. // something more specific if the current value type is LikelyArray and checks have been done to ensure that it's an array,
  13681. // and it would be changed to something more generic if a call kills the Array value type and it must be treated as
  13682. // LikelyArray going forward.
  13683. // There are cases where we change the type because of different profile information, and because of rejit, these profile information
  13684. // may conflict. Need to allow incompatible type in those cause. However, the old type should be indefinite.
  13685. Assert((allowIncompatibleType && !value->GetValueInfo()->IsDefinite()) ||
  13686. AreValueInfosCompatible(newValueInfo, value->GetValueInfo()));
  13687. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  13688. Assert(
  13689. !value->GetValueInfo()->IsArrayValueInfo() ||
  13690. !newValueInfo->IsArrayValueInfo() ||
  13691. newValueInfo->GetObjectType() == value->GetValueInfo()->GetObjectType());
  13692. if(block)
  13693. {
  13694. TrackValueInfoChangeForKills(block, value, newValueInfo, compensated);
  13695. }
  13696. value->SetValueInfo(newValueInfo);
  13697. }
  13698. bool
  13699. GlobOpt::AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const
  13700. {
  13701. Assert(v0);
  13702. Assert(v1);
  13703. if(v0->IsUninitialized() || v1->IsUninitialized())
  13704. {
  13705. return true;
  13706. }
  13707. const bool doAggressiveIntTypeSpec = DoAggressiveIntTypeSpec();
  13708. if(doAggressiveIntTypeSpec && (v0->IsInt() || v1->IsInt()))
  13709. {
  13710. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of
  13711. // arbitrary types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop
  13712. // prepass
  13713. return true;
  13714. }
  13715. if ((v0->Type()).IsMixedTypedArrayPair(v1->Type()) || (v1->Type()).IsMixedTypedArrayPair(v0->Type()))
  13716. {
  13717. return true;
  13718. }
  13719. const bool doFloatTypeSpec = DoFloatTypeSpec();
  13720. if(doFloatTypeSpec && (v0->IsFloat() || v1->IsFloat()))
  13721. {
  13722. // Float specialization allows specializing values of arbitrary types, even values that are definitely not float
  13723. return true;
  13724. }
  13725. // SIMD_JS
  13726. if (SIMD128_TYPE_SPEC_FLAG && v0->Type().IsSimd128())
  13727. {
  13728. // We only type-spec Undefined values, Objects (possibly merged SIMD values), or actual SIMD values.
  13729. if (v1->Type().IsLikelyUndefined() || v1->Type().IsLikelyNull())
  13730. {
  13731. return true;
  13732. }
  13733. if (v1->Type().IsLikelyObject() && v1->Type().GetObjectType() == ObjectType::Object)
  13734. {
  13735. return true;
  13736. }
  13737. if (v1->Type().IsSimd128())
  13738. {
  13739. return v0->Type().GetObjectType() == v1->Type().GetObjectType();
  13740. }
  13741. }
  13742. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  13743. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  13744. const auto AreValueTypesCompatible = [=](const ValueType t0, const ValueType t1)
  13745. {
  13746. return
  13747. t0.IsSubsetOf(t1, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec) ||
  13748. t1.IsSubsetOf(t0, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec);
  13749. };
  13750. const ValueType t0(v0->Type().ToDefinite()), t1(v1->Type().ToDefinite());
  13751. if(t0.IsLikelyObject() && t1.IsLikelyObject())
  13752. {
  13753. // Check compatibility for the primitive portions and the object portions of the value types separately
  13754. if(AreValueTypesCompatible(t0.ToDefiniteObject(), t1.ToDefiniteObject()) &&
  13755. (
  13756. !t0.HasBeenPrimitive() ||
  13757. !t1.HasBeenPrimitive() ||
  13758. AreValueTypesCompatible(t0.ToDefinitePrimitiveSubset(), t1.ToDefinitePrimitiveSubset())
  13759. ))
  13760. {
  13761. return true;
  13762. }
  13763. }
  13764. else if(AreValueTypesCompatible(t0, t1))
  13765. {
  13766. return true;
  13767. }
  13768. const FloatConstantValueInfo *floatConstantValueInfo;
  13769. const ValueInfo *likelyIntValueinfo;
  13770. if(v0->IsFloatConstant() && v1->IsLikelyInt())
  13771. {
  13772. floatConstantValueInfo = v0->AsFloatConstant();
  13773. likelyIntValueinfo = v1;
  13774. }
  13775. else if(v0->IsLikelyInt() && v1->IsFloatConstant())
  13776. {
  13777. floatConstantValueInfo = v1->AsFloatConstant();
  13778. likelyIntValueinfo = v0;
  13779. }
  13780. else
  13781. {
  13782. return false;
  13783. }
  13784. // A float constant value with a value that is actually an int is a subset of a likely-int value.
  13785. // Ideally, we should create an int constant value for this up front, such that IsInt() also returns true. There
  13786. // were other issues with that, should see if that can be done.
  13787. int32 int32Value;
  13788. return
  13789. Js::JavascriptNumber::TryGetInt32Value(floatConstantValueInfo->FloatValue(), &int32Value) &&
  13790. (!likelyIntValueinfo->IsLikelyTaggedInt() || !Js::TaggedInt::IsOverflow(int32Value));
  13791. }
  13792. #if DBG
  13793. void
  13794. GlobOpt::VerifyArrayValueInfoForTracking(
  13795. const ValueInfo *const valueInfo,
  13796. const bool isJsArray,
  13797. const BasicBlock *const block,
  13798. const bool ignoreKnownImplicitCalls) const
  13799. {
  13800. Assert(valueInfo);
  13801. Assert(valueInfo->IsAnyOptimizedArray());
  13802. Assert(isJsArray == valueInfo->IsArrayOrObjectWithArray());
  13803. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13804. Assert(block);
  13805. Loop *implicitCallsLoop;
  13806. if(block->next && !block->next->isDeleted && block->next->isLoopHeader)
  13807. {
  13808. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  13809. // landing pad based on the loop for which this block is the landing pad.
  13810. implicitCallsLoop = block->next->loop;
  13811. Assert(implicitCallsLoop);
  13812. Assert(implicitCallsLoop->landingPad == block);
  13813. }
  13814. else
  13815. {
  13816. implicitCallsLoop = block->loop;
  13817. }
  13818. Assert(
  13819. !isJsArray ||
  13820. DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop) ||
  13821. (
  13822. ignoreKnownImplicitCalls &&
  13823. !(implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  13824. ));
  13825. Assert(!(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist()));
  13826. Assert(
  13827. !(
  13828. valueInfo->IsArrayValueInfo() &&
  13829. (
  13830. valueInfo->AsArrayValueInfo()->HeadSegmentSym() ||
  13831. valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  13832. ) &&
  13833. !DoArraySegmentHoist(valueInfo->Type())
  13834. ));
  13835. #if 0
  13836. // We can't assert here that there is only a head segment length sym if hoisting is allowed in the current block,
  13837. // because we may have propagated the sym forward out of a loop, and hoisting may be allowed inside but not
  13838. // outside the loop.
  13839. Assert(
  13840. isJsArray ||
  13841. !valueInfo->IsArrayValueInfo() ||
  13842. !valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym() ||
  13843. DoTypedArraySegmentLengthHoist(implicitCallsLoop) ||
  13844. ignoreKnownImplicitCalls ||
  13845. (implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  13846. );
  13847. #endif
  13848. Assert(
  13849. !(
  13850. isJsArray &&
  13851. valueInfo->IsArrayValueInfo() &&
  13852. valueInfo->AsArrayValueInfo()->LengthSym() &&
  13853. !DoArrayLengthHoist()
  13854. ));
  13855. }
  13856. #endif
  13857. void
  13858. GlobOpt::TrackNewValueForKills(Value *const value)
  13859. {
  13860. Assert(value);
  13861. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13862. {
  13863. return;
  13864. }
  13865. DoTrackNewValueForKills(value);
  13866. }
  13867. void
  13868. GlobOpt::DoTrackNewValueForKills(Value *const value)
  13869. {
  13870. Assert(value);
  13871. ValueInfo *const valueInfo = value->GetValueInfo();
  13872. Assert(valueInfo->IsAnyOptimizedArray());
  13873. Assert(!valueInfo->IsArrayValueInfo());
  13874. // The value and value info here are new, so it's okay to modify the value info in-place
  13875. Assert(!valueInfo->GetSymStore());
  13876. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13877. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13878. Loop *implicitCallsLoop;
  13879. if(currentBlock->next && !currentBlock->next->isDeleted && currentBlock->next->isLoopHeader)
  13880. {
  13881. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  13882. // landing pad based on the loop for which this block is the landing pad.
  13883. implicitCallsLoop = currentBlock->next->loop;
  13884. Assert(implicitCallsLoop);
  13885. Assert(implicitCallsLoop->landingPad == currentBlock);
  13886. }
  13887. else
  13888. {
  13889. implicitCallsLoop = currentBlock->loop;
  13890. }
  13891. if(isJsArray)
  13892. {
  13893. if(!DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop))
  13894. {
  13895. // Array opts are disabled for this value type, so treat it as an indefinite value type going forward
  13896. valueInfo->Type() = valueInfo->Type().ToLikely();
  13897. return;
  13898. }
  13899. if(valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  13900. {
  13901. valueInfo->Type() = valueInfo->Type().SetHasNoMissingValues(false);
  13902. }
  13903. }
  13904. #if DBG
  13905. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  13906. #endif
  13907. if(!isJsArray)
  13908. {
  13909. return;
  13910. }
  13911. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13912. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13913. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13914. // revert the value type to a likely version.
  13915. blockData.valuesToKillOnCalls->Add(value);
  13916. }
  13917. void
  13918. GlobOpt::TrackCopiedValueForKills(Value *const value)
  13919. {
  13920. Assert(value);
  13921. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13922. {
  13923. return;
  13924. }
  13925. DoTrackCopiedValueForKills(value);
  13926. }
  13927. void
  13928. GlobOpt::DoTrackCopiedValueForKills(Value *const value)
  13929. {
  13930. Assert(value);
  13931. ValueInfo *const valueInfo = value->GetValueInfo();
  13932. Assert(valueInfo->IsAnyOptimizedArray());
  13933. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13934. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13935. #if DBG
  13936. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  13937. #endif
  13938. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  13939. {
  13940. return;
  13941. }
  13942. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13943. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13944. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13945. // revert the value type to a likely version.
  13946. blockData.valuesToKillOnCalls->Add(value);
  13947. }
  13948. void
  13949. GlobOpt::TrackMergedValueForKills(
  13950. Value *const value,
  13951. GlobOptBlockData *const blockData,
  13952. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  13953. {
  13954. Assert(value);
  13955. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13956. {
  13957. return;
  13958. }
  13959. DoTrackMergedValueForKills(value, blockData, mergedValueTypesTrackedForKills);
  13960. }
  13961. void
  13962. GlobOpt::DoTrackMergedValueForKills(
  13963. Value *const value,
  13964. GlobOptBlockData *const blockData,
  13965. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  13966. {
  13967. Assert(value);
  13968. Assert(blockData);
  13969. ValueInfo *valueInfo = value->GetValueInfo();
  13970. Assert(valueInfo->IsAnyOptimizedArray());
  13971. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13972. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13973. #if DBG
  13974. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock, true);
  13975. #endif
  13976. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  13977. {
  13978. return;
  13979. }
  13980. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13981. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13982. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13983. // revert the value type to a likely version.
  13984. if(!mergedValueTypesTrackedForKills || !mergedValueTypesTrackedForKills->TestAndSet(value->GetValueNumber()))
  13985. {
  13986. blockData->valuesToKillOnCalls->Add(value);
  13987. }
  13988. }
  13989. void
  13990. GlobOpt::TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const
  13991. {
  13992. Assert(block);
  13993. Assert(value);
  13994. Assert(newValueInfo);
  13995. ValueInfo *const oldValueInfo = value->GetValueInfo();
  13996. #if DBG
  13997. if(oldValueInfo->IsAnyOptimizedArray())
  13998. {
  13999. VerifyArrayValueInfoForTracking(oldValueInfo, oldValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  14000. }
  14001. #endif
  14002. const bool trackOldValueInfo =
  14003. oldValueInfo->IsArrayOrObjectWithArray() ||
  14004. (
  14005. oldValueInfo->IsOptimizedTypedArray() &&
  14006. oldValueInfo->IsArrayValueInfo() &&
  14007. oldValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  14008. );
  14009. Assert(trackOldValueInfo == block->globOptData.valuesToKillOnCalls->ContainsKey(value));
  14010. #if DBG
  14011. if(newValueInfo->IsAnyOptimizedArray())
  14012. {
  14013. VerifyArrayValueInfoForTracking(newValueInfo, newValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  14014. }
  14015. #endif
  14016. const bool trackNewValueInfo =
  14017. newValueInfo->IsArrayOrObjectWithArray() ||
  14018. (
  14019. newValueInfo->IsOptimizedTypedArray() &&
  14020. newValueInfo->IsArrayValueInfo() &&
  14021. newValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  14022. );
  14023. if(trackOldValueInfo == trackNewValueInfo)
  14024. {
  14025. return;
  14026. }
  14027. if(trackNewValueInfo)
  14028. {
  14029. block->globOptData.valuesToKillOnCalls->Add(value);
  14030. }
  14031. else
  14032. {
  14033. block->globOptData.valuesToKillOnCalls->Remove(value);
  14034. }
  14035. }
  14036. void
  14037. GlobOpt::ProcessValueKills(IR::Instr *const instr)
  14038. {
  14039. Assert(instr);
  14040. ValueSet *const valuesToKillOnCalls = blockData.valuesToKillOnCalls;
  14041. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  14042. {
  14043. return;
  14044. }
  14045. const JsArrayKills kills = CheckJsArrayKills(instr);
  14046. Assert(!kills.KillsArrayHeadSegments() || kills.KillsArrayHeadSegmentLengths());
  14047. if(IsLoopPrePass())
  14048. {
  14049. rootLoopPrePass->jsArrayKills = rootLoopPrePass->jsArrayKills.Merge(kills);
  14050. Assert(
  14051. !rootLoopPrePass->parent ||
  14052. rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  14053. if(kills.KillsAllArrays())
  14054. {
  14055. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = false;
  14056. }
  14057. if(valuesToKillOnCalls->Count() == 0)
  14058. {
  14059. return;
  14060. }
  14061. }
  14062. if(kills.KillsAllArrays())
  14063. {
  14064. Assert(kills.KillsTypedArrayHeadSegmentLengths());
  14065. // - Calls need to kill the value types of values in the following list. For instance, calls can transform a JS array
  14066. // into an ES5 array, so any definitely-array value types need to be killed. Update the value types.
  14067. // - Calls also need to kill typed array head segment lengths. A typed array's array buffer may be transferred to a web
  14068. // worker, in which case the typed array's length is set to zero.
  14069. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14070. {
  14071. Value *const value = it.CurrentValue();
  14072. ValueInfo *const valueInfo = value->GetValueInfo();
  14073. Assert(
  14074. valueInfo->IsArrayOrObjectWithArray() ||
  14075. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14076. if(valueInfo->IsArrayOrObjectWithArray())
  14077. {
  14078. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14079. continue;
  14080. }
  14081. ChangeValueInfo(
  14082. nullptr,
  14083. value,
  14084. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14085. }
  14086. valuesToKillOnCalls->Clear();
  14087. return;
  14088. }
  14089. if(kills.KillsArraysWithNoMissingValues())
  14090. {
  14091. // Some operations may kill arrays with no missing values in unlikely circumstances. Convert their value types to likely
  14092. // versions so that the checks have to be redone.
  14093. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14094. {
  14095. Value *const value = it.CurrentValue();
  14096. ValueInfo *const valueInfo = value->GetValueInfo();
  14097. Assert(
  14098. valueInfo->IsArrayOrObjectWithArray() ||
  14099. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14100. if(!valueInfo->IsArrayOrObjectWithArray() || !valueInfo->HasNoMissingValues())
  14101. {
  14102. continue;
  14103. }
  14104. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14105. it.RemoveCurrent();
  14106. }
  14107. }
  14108. if(kills.KillsNativeArrays())
  14109. {
  14110. // Some operations may kill native arrays in (what should be) unlikely circumstances. Convert their value types to
  14111. // likely versions so that the checks have to be redone.
  14112. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14113. {
  14114. Value *const value = it.CurrentValue();
  14115. ValueInfo *const valueInfo = value->GetValueInfo();
  14116. Assert(
  14117. valueInfo->IsArrayOrObjectWithArray() ||
  14118. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14119. if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->HasVarElements())
  14120. {
  14121. continue;
  14122. }
  14123. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14124. it.RemoveCurrent();
  14125. }
  14126. }
  14127. const bool likelyKillsJsArraysWithNoMissingValues = IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr);
  14128. if(!kills.KillsArrayHeadSegmentLengths())
  14129. {
  14130. Assert(!kills.KillsArrayHeadSegments());
  14131. if(!likelyKillsJsArraysWithNoMissingValues && !kills.KillsArrayLengths())
  14132. {
  14133. return;
  14134. }
  14135. }
  14136. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14137. {
  14138. Value *const value = it.CurrentValue();
  14139. ValueInfo *valueInfo = value->GetValueInfo();
  14140. Assert(
  14141. valueInfo->IsArrayOrObjectWithArray() ||
  14142. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14143. if(!valueInfo->IsArrayOrObjectWithArray())
  14144. {
  14145. continue;
  14146. }
  14147. if(likelyKillsJsArraysWithNoMissingValues && valueInfo->HasNoMissingValues())
  14148. {
  14149. ChangeValueType(nullptr, value, valueInfo->Type().SetHasNoMissingValues(false), true);
  14150. valueInfo = value->GetValueInfo();
  14151. }
  14152. if(!valueInfo->IsArrayValueInfo())
  14153. {
  14154. continue;
  14155. }
  14156. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  14157. const bool removeHeadSegment = kills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  14158. const bool removeHeadSegmentLength = kills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  14159. const bool removeLength = kills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  14160. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  14161. {
  14162. ChangeValueInfo(
  14163. nullptr,
  14164. value,
  14165. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  14166. valueInfo = value->GetValueInfo();
  14167. }
  14168. }
  14169. }
  14170. void
  14171. GlobOpt::ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData)
  14172. {
  14173. Assert(block);
  14174. Assert(blockData);
  14175. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  14176. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  14177. {
  14178. return;
  14179. }
  14180. // If the current block or loop has implicit calls, kill all definitely-array value types, as using that info will cause
  14181. // implicit calls to be disabled, resulting in unnecessary bailouts
  14182. const bool killValuesOnImplicitCalls =
  14183. (block->loop ? !this->ImplicitCallFlagsAllowOpts(block->loop) : !this->ImplicitCallFlagsAllowOpts(func));
  14184. if (!killValuesOnImplicitCalls)
  14185. {
  14186. return;
  14187. }
  14188. if(IsLoopPrePass() && block->loop == rootLoopPrePass)
  14189. {
  14190. AnalysisAssert(rootLoopPrePass);
  14191. for (Loop * loop = rootLoopPrePass; loop != nullptr; loop = loop->parent)
  14192. {
  14193. loop->jsArrayKills.SetKillsAllArrays();
  14194. }
  14195. Assert(!rootLoopPrePass->parent || rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  14196. if(valuesToKillOnCalls->Count() == 0)
  14197. {
  14198. return;
  14199. }
  14200. }
  14201. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14202. {
  14203. Value *const value = it.CurrentValue();
  14204. ValueInfo *const valueInfo = value->GetValueInfo();
  14205. Assert(
  14206. valueInfo->IsArrayOrObjectWithArray() ||
  14207. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14208. if(valueInfo->IsArrayOrObjectWithArray())
  14209. {
  14210. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14211. continue;
  14212. }
  14213. ChangeValueInfo(
  14214. nullptr,
  14215. value,
  14216. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14217. }
  14218. valuesToKillOnCalls->Clear();
  14219. }
  14220. void
  14221. GlobOpt::ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData)
  14222. {
  14223. Assert(block);
  14224. Assert(block->isLoopHeader);
  14225. Assert(blockData);
  14226. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  14227. if(valuesToKillOnCalls->Count() == 0)
  14228. {
  14229. return;
  14230. }
  14231. const JsArrayKills loopKills(block->loop->jsArrayKills);
  14232. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14233. {
  14234. Value *const value = it.CurrentValue();
  14235. ValueInfo *valueInfo = value->GetValueInfo();
  14236. Assert(
  14237. valueInfo->IsArrayOrObjectWithArray() ||
  14238. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14239. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  14240. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  14241. if(isJsArray ? loopKills.KillsValueType(valueInfo->Type()) : loopKills.KillsTypedArrayHeadSegmentLengths())
  14242. {
  14243. // Hoisting array checks and other related things for this type is disabled for the loop due to the kill, as
  14244. // compensation code is currently not added on back-edges. When merging values from a back-edge, the array value
  14245. // type cannot be definite, as that may require adding compensation code on the back-edge if the optimization pass
  14246. // chooses to not optimize the array.
  14247. if(isJsArray)
  14248. {
  14249. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14250. }
  14251. else
  14252. {
  14253. ChangeValueInfo(
  14254. nullptr,
  14255. value,
  14256. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14257. }
  14258. it.RemoveCurrent();
  14259. continue;
  14260. }
  14261. if(!isJsArray || !valueInfo->IsArrayValueInfo())
  14262. {
  14263. continue;
  14264. }
  14265. // Similarly, if the loop contains an operation that kills JS array segments, don't make the segment or other related
  14266. // syms available initially inside the loop
  14267. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  14268. const bool removeHeadSegment = loopKills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  14269. const bool removeHeadSegmentLength = loopKills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  14270. const bool removeLength = loopKills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  14271. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  14272. {
  14273. ChangeValueInfo(
  14274. nullptr,
  14275. value,
  14276. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  14277. valueInfo = value->GetValueInfo();
  14278. }
  14279. }
  14280. }
  14281. bool
  14282. GlobOpt::NeedBailOnImplicitCallForLiveValues(BasicBlock *const block, const bool isForwardPass) const
  14283. {
  14284. if(isForwardPass)
  14285. {
  14286. return block->globOptData.valuesToKillOnCalls->Count() != 0;
  14287. }
  14288. if(block->noImplicitCallUses->IsEmpty())
  14289. {
  14290. Assert(block->noImplicitCallNoMissingValuesUses->IsEmpty());
  14291. Assert(block->noImplicitCallNativeArrayUses->IsEmpty());
  14292. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty());
  14293. Assert(block->noImplicitCallArrayLengthSymUses->IsEmpty());
  14294. return false;
  14295. }
  14296. return true;
  14297. }
  14298. IR::Instr*
  14299. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func)
  14300. {
  14301. IR::Instr* instr = IR::Instr::New(Js::OpCode::BoundCheck, func);
  14302. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  14303. }
  14304. IR::Instr*
  14305. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func * func)
  14306. {
  14307. IR::Instr* instr = IR::BailOutInstr::New(Js::OpCode::BoundCheck, bailoutkind, bailoutInfo, func);
  14308. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  14309. }
  14310. IR::Instr*
  14311. GlobOpt::AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset)
  14312. {
  14313. instr->SetSrc1(lowerBound);
  14314. instr->SetSrc2(upperBound);
  14315. if (offset != 0)
  14316. {
  14317. instr->SetDst(IR::IntConstOpnd::New(offset, TyInt32, instr->m_func));
  14318. }
  14319. return instr;
  14320. }
  14321. void
  14322. GlobOpt::OptArraySrc(IR::Instr * *const instrRef)
  14323. {
  14324. Assert(instrRef);
  14325. IR::Instr *&instr = *instrRef;
  14326. Assert(instr);
  14327. IR::Instr *baseOwnerInstr;
  14328. IR::IndirOpnd *baseOwnerIndir;
  14329. IR::RegOpnd *baseOpnd;
  14330. bool isProfilableLdElem, isProfilableStElem;
  14331. bool isLoad, isStore;
  14332. bool needsHeadSegment, needsHeadSegmentLength, needsLength, needsBoundChecks;
  14333. switch(instr->m_opcode)
  14334. {
  14335. // SIMD_JS
  14336. case Js::OpCode::Simd128_LdArr_F4:
  14337. case Js::OpCode::Simd128_LdArr_I4:
  14338. // no type-spec for Asm.js
  14339. if (this->GetIsAsmJSFunc())
  14340. {
  14341. return;
  14342. }
  14343. // fall through
  14344. case Js::OpCode::LdElemI_A:
  14345. case Js::OpCode::LdMethodElem:
  14346. if(!instr->GetSrc1()->IsIndirOpnd())
  14347. {
  14348. return;
  14349. }
  14350. baseOwnerInstr = nullptr;
  14351. baseOwnerIndir = instr->GetSrc1()->AsIndirOpnd();
  14352. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  14353. isProfilableLdElem = instr->m_opcode == Js::OpCode::LdElemI_A; // LdMethodElem is currently not profiled
  14354. isProfilableLdElem |= Js::IsSimd128Load(instr->m_opcode);
  14355. needsBoundChecks = needsHeadSegmentLength = needsHeadSegment = isLoad = true;
  14356. needsLength = isStore = isProfilableStElem = false;
  14357. break;
  14358. // SIMD_JS
  14359. case Js::OpCode::Simd128_StArr_F4:
  14360. case Js::OpCode::Simd128_StArr_I4:
  14361. if (this->GetIsAsmJSFunc())
  14362. {
  14363. return;
  14364. }
  14365. // fall through
  14366. case Js::OpCode::StElemI_A:
  14367. case Js::OpCode::StElemI_A_Strict:
  14368. case Js::OpCode::StElemC:
  14369. if(!instr->GetDst()->IsIndirOpnd())
  14370. {
  14371. return;
  14372. }
  14373. baseOwnerInstr = nullptr;
  14374. baseOwnerIndir = instr->GetDst()->AsIndirOpnd();
  14375. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  14376. needsBoundChecks = isProfilableStElem = instr->m_opcode != Js::OpCode::StElemC;
  14377. isProfilableStElem |= Js::IsSimd128Store(instr->m_opcode);
  14378. needsHeadSegmentLength = needsHeadSegment = isStore = true;
  14379. needsLength = isLoad = isProfilableLdElem = false;
  14380. break;
  14381. case Js::OpCode::InlineArrayPush:
  14382. case Js::OpCode::InlineArrayPop:
  14383. {
  14384. baseOwnerInstr = instr;
  14385. baseOwnerIndir = nullptr;
  14386. IR::Opnd * thisOpnd = instr->GetSrc1();
  14387. // Return if it not a LikelyArray or Object with Array - No point in doing array check elimination.
  14388. if(!thisOpnd->IsRegOpnd() || !thisOpnd->GetValueType().IsLikelyArrayOrObjectWithArray())
  14389. {
  14390. return;
  14391. }
  14392. baseOpnd = thisOpnd->AsRegOpnd();
  14393. isLoad = instr->m_opcode == Js::OpCode::InlineArrayPop;
  14394. isStore = instr->m_opcode == Js::OpCode::InlineArrayPush;
  14395. needsLength = needsHeadSegmentLength = needsHeadSegment = true;
  14396. needsBoundChecks = isProfilableLdElem = isProfilableStElem = false;
  14397. break;
  14398. }
  14399. case Js::OpCode::LdLen_A:
  14400. if(!instr->GetSrc1()->IsRegOpnd())
  14401. {
  14402. return;
  14403. }
  14404. baseOwnerInstr = instr;
  14405. baseOwnerIndir = nullptr;
  14406. baseOpnd = instr->GetSrc1()->AsRegOpnd();
  14407. if(baseOpnd->GetValueType().IsLikelyObject() &&
  14408. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  14409. {
  14410. return;
  14411. }
  14412. needsLength = true;
  14413. needsBoundChecks =
  14414. needsHeadSegmentLength =
  14415. needsHeadSegment =
  14416. isStore =
  14417. isLoad =
  14418. isProfilableStElem =
  14419. isProfilableLdElem = false;
  14420. break;
  14421. default:
  14422. return;
  14423. }
  14424. Assert(!(baseOwnerInstr && baseOwnerIndir));
  14425. Assert(!needsHeadSegmentLength || needsHeadSegment);
  14426. if(baseOwnerIndir && !IsLoopPrePass())
  14427. {
  14428. // Since this happens before type specialization, make sure that any necessary conversions are done, and that the index
  14429. // is int-specialized if possible such that the const flags are correct.
  14430. ToVarUses(instr, baseOwnerIndir, baseOwnerIndir == instr->GetDst(), nullptr);
  14431. }
  14432. if(isProfilableStElem && !IsLoopPrePass())
  14433. {
  14434. // If the dead-store pass decides to add the bailout kind IR::BailOutInvalidatedArrayHeadSegment, and the fast path is
  14435. // generated, it may bail out before the operation is done, so this would need to be a pre-op bailout.
  14436. if(instr->HasBailOutInfo())
  14437. {
  14438. Assert(
  14439. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  14440. instr->GetBailOutInfo()->bailOutOffset <= instr->GetByteCodeOffset());
  14441. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  14442. Assert(
  14443. !(bailOutKind & ~IR::BailOutKindBits) ||
  14444. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  14445. if(!(bailOutKind & ~IR::BailOutKindBits))
  14446. {
  14447. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  14448. }
  14449. }
  14450. else
  14451. {
  14452. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  14453. }
  14454. }
  14455. Value *const baseValue = FindValue(baseOpnd->m_sym);
  14456. if(!baseValue)
  14457. {
  14458. return;
  14459. }
  14460. ValueInfo *baseValueInfo = baseValue->GetValueInfo();
  14461. ValueType baseValueType(baseValueInfo->Type());
  14462. baseOpnd->SetValueType(baseValueType);
  14463. if(!baseValueType.IsLikelyAnyOptimizedArray() ||
  14464. !DoArrayCheckHoist(baseValueType, currentBlock->loop, instr) ||
  14465. (baseOwnerIndir && !ShouldExpectConventionalArrayIndexValue(baseOwnerIndir)))
  14466. {
  14467. return;
  14468. }
  14469. const bool isLikelyJsArray = !baseValueType.IsLikelyTypedArray();
  14470. Assert(isLikelyJsArray == baseValueType.IsLikelyArrayOrObjectWithArray());
  14471. Assert(!isLikelyJsArray == baseValueType.IsLikelyOptimizedTypedArray());
  14472. if(!isLikelyJsArray && instr->m_opcode == Js::OpCode::LdMethodElem)
  14473. {
  14474. // Fast path is not generated in this case since the subsequent call will throw
  14475. return;
  14476. }
  14477. ValueType newBaseValueType(baseValueType.ToDefiniteObject());
  14478. if(isLikelyJsArray && newBaseValueType.HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  14479. {
  14480. newBaseValueType = newBaseValueType.SetHasNoMissingValues(false);
  14481. }
  14482. Assert((newBaseValueType == baseValueType) == baseValueType.IsObject());
  14483. ArrayValueInfo *baseArrayValueInfo = nullptr;
  14484. const auto UpdateValue = [&](StackSym *newHeadSegmentSym, StackSym *newHeadSegmentLengthSym, StackSym *newLengthSym)
  14485. {
  14486. Assert(baseValueType.GetObjectType() == newBaseValueType.GetObjectType());
  14487. Assert(newBaseValueType.IsObject());
  14488. Assert(baseValueType.IsLikelyArray() || !newLengthSym);
  14489. if(!(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym))
  14490. {
  14491. // We're not adding new information to the value other than changing the value type. Preserve any existing
  14492. // information and just change the value type.
  14493. ChangeValueType(currentBlock, baseValue, newBaseValueType, true);
  14494. return;
  14495. }
  14496. // Merge the new syms into the value while preserving any existing information, and change the value type
  14497. if(baseArrayValueInfo)
  14498. {
  14499. if(!newHeadSegmentSym)
  14500. {
  14501. newHeadSegmentSym = baseArrayValueInfo->HeadSegmentSym();
  14502. }
  14503. if(!newHeadSegmentLengthSym)
  14504. {
  14505. newHeadSegmentLengthSym = baseArrayValueInfo->HeadSegmentLengthSym();
  14506. }
  14507. if(!newLengthSym)
  14508. {
  14509. newLengthSym = baseArrayValueInfo->LengthSym();
  14510. }
  14511. Assert(
  14512. !baseArrayValueInfo->HeadSegmentSym() ||
  14513. newHeadSegmentSym == baseArrayValueInfo->HeadSegmentSym());
  14514. Assert(
  14515. !baseArrayValueInfo->HeadSegmentLengthSym() ||
  14516. newHeadSegmentLengthSym == baseArrayValueInfo->HeadSegmentLengthSym());
  14517. Assert(!baseArrayValueInfo->LengthSym() || newLengthSym == baseArrayValueInfo->LengthSym());
  14518. }
  14519. ArrayValueInfo *const newBaseArrayValueInfo =
  14520. ArrayValueInfo::New(
  14521. alloc,
  14522. newBaseValueType,
  14523. newHeadSegmentSym,
  14524. newHeadSegmentLengthSym,
  14525. newLengthSym,
  14526. baseValueInfo->GetSymStore());
  14527. ChangeValueInfo(currentBlock, baseValue, newBaseArrayValueInfo);
  14528. };
  14529. if(IsLoopPrePass())
  14530. {
  14531. if(newBaseValueType != baseValueType)
  14532. {
  14533. UpdateValue(nullptr, nullptr, nullptr);
  14534. }
  14535. // For javascript arrays and objects with javascript arrays:
  14536. // - Implicit calls need to be disabled and calls cannot be allowed in the loop since the array vtable may be changed
  14537. // into an ES5 array.
  14538. // For typed arrays:
  14539. // - A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the
  14540. // typed array's length is set to zero. Implicit calls need to be disabled if the typed array's head segment length
  14541. // is going to be loaded and used later.
  14542. // Since we don't know if the loop has kills after this instruction, the kill information may not be complete. If a kill
  14543. // is found later, this information will be updated to not require disabling implicit calls.
  14544. if(!(
  14545. isLikelyJsArray
  14546. ? rootLoopPrePass->jsArrayKills.KillsValueType(newBaseValueType)
  14547. : rootLoopPrePass->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  14548. ))
  14549. {
  14550. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  14551. }
  14552. return;
  14553. }
  14554. if(baseValueInfo->IsArrayValueInfo())
  14555. {
  14556. baseArrayValueInfo = baseValueInfo->AsArrayValueInfo();
  14557. }
  14558. const bool doArrayChecks = !baseValueType.IsObject();
  14559. const bool doArraySegmentHoist = DoArraySegmentHoist(baseValueType) && instr->m_opcode != Js::OpCode::StElemC;
  14560. const bool headSegmentIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym();
  14561. const bool doHeadSegmentLoad = doArraySegmentHoist && needsHeadSegment && !headSegmentIsAvailable;
  14562. const bool doArraySegmentLengthHoist =
  14563. doArraySegmentHoist && (isLikelyJsArray || DoTypedArraySegmentLengthHoist(currentBlock->loop));
  14564. const bool headSegmentLengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentLengthSym();
  14565. const bool doHeadSegmentLengthLoad =
  14566. doArraySegmentLengthHoist &&
  14567. (needsHeadSegmentLength || (!isLikelyJsArray && needsLength)) &&
  14568. !headSegmentLengthIsAvailable;
  14569. const bool lengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->LengthSym();
  14570. const bool doLengthLoad =
  14571. DoArrayLengthHoist() &&
  14572. needsLength &&
  14573. !lengthIsAvailable &&
  14574. baseValueType.IsLikelyArray() &&
  14575. DoLdLenIntSpec(instr->m_opcode == Js::OpCode::LdLen_A ? instr : nullptr, baseValueType);
  14576. StackSym *const newHeadSegmentSym = doHeadSegmentLoad ? StackSym::New(TyMachPtr, instr->m_func) : nullptr;
  14577. StackSym *const newHeadSegmentLengthSym = doHeadSegmentLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  14578. StackSym *const newLengthSym = doLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  14579. bool canBailOutOnArrayAccessHelperCall;
  14580. if (Js::IsSimd128LoadStore(instr->m_opcode))
  14581. {
  14582. // SIMD_JS
  14583. // simd load/store never call helper
  14584. canBailOutOnArrayAccessHelperCall = true;
  14585. }
  14586. else
  14587. {
  14588. canBailOutOnArrayAccessHelperCall = (isProfilableLdElem || isProfilableStElem) &&
  14589. DoEliminateArrayAccessHelperCall() &&
  14590. !(
  14591. instr->IsProfiledInstr() &&
  14592. (
  14593. isProfilableLdElem
  14594. ? instr->AsProfiledInstr()->u.ldElemInfo->LikelyNeedsHelperCall()
  14595. : instr->AsProfiledInstr()->u.stElemInfo->LikelyNeedsHelperCall()
  14596. )
  14597. );
  14598. }
  14599. bool doExtractBoundChecks = false, eliminatedLowerBoundCheck = false, eliminatedUpperBoundCheck = false;
  14600. StackSym *indexVarSym = nullptr;
  14601. Value *indexValue = nullptr;
  14602. IntConstantBounds indexConstantBounds;
  14603. Value *headSegmentLengthValue = nullptr;
  14604. IntConstantBounds headSegmentLengthConstantBounds;
  14605. #if ENABLE_FAST_ARRAYBUFFER
  14606. if (baseValueType.IsLikelyOptimizedVirtualTypedArray() && !Js::IsSimd128LoadStore(instr->m_opcode) /*Always extract bounds for SIMD */)
  14607. {
  14608. if (isProfilableStElem ||
  14609. !instr->IsDstNotAlwaysConvertedToInt32() ||
  14610. ( (baseValueType.GetObjectType() == ObjectType::Float32VirtualArray ||
  14611. baseValueType.GetObjectType() == ObjectType::Float64VirtualArray) &&
  14612. !instr->IsDstNotAlwaysConvertedToNumber()
  14613. )
  14614. )
  14615. {
  14616. // Unless we're in asm.js (where it is guaranteed that virtual typed array accesses cannot read/write beyond 4GB),
  14617. // check the range of the index to make sure we won't access beyond the reserved memory beforing eliminating bounds
  14618. // checks in jitted code.
  14619. if (!GetIsAsmJSFunc())
  14620. {
  14621. IR::RegOpnd * idxOpnd = baseOwnerIndir->GetIndexOpnd();
  14622. if (idxOpnd)
  14623. {
  14624. StackSym * idxSym = idxOpnd->m_sym->IsTypeSpec() ? idxOpnd->m_sym->GetVarEquivSym(nullptr) : idxOpnd->m_sym;
  14625. Value * idxValue = FindValue(idxSym);
  14626. IntConstantBounds idxConstantBounds;
  14627. if (idxValue && idxValue->GetValueInfo()->TryGetIntConstantBounds(&idxConstantBounds))
  14628. {
  14629. BYTE indirScale = Lowerer::GetArrayIndirScale(baseValueType);
  14630. int32 upperBound = idxConstantBounds.UpperBound();
  14631. int32 lowerBound = idxConstantBounds.LowerBound();
  14632. if (lowerBound >= 0 && ((static_cast<uint64>(upperBound) << indirScale) < MAX_ASMJS_ARRAYBUFFER_LENGTH))
  14633. {
  14634. eliminatedLowerBoundCheck = true;
  14635. eliminatedUpperBoundCheck = true;
  14636. canBailOutOnArrayAccessHelperCall = false;
  14637. }
  14638. }
  14639. }
  14640. }
  14641. else
  14642. {
  14643. eliminatedLowerBoundCheck = true;
  14644. eliminatedUpperBoundCheck = true;
  14645. canBailOutOnArrayAccessHelperCall = false;
  14646. }
  14647. }
  14648. }
  14649. #endif
  14650. if(needsBoundChecks && DoBoundCheckElimination())
  14651. {
  14652. AnalysisAssert(baseOwnerIndir);
  14653. Assert(needsHeadSegmentLength);
  14654. // Bound checks can be separated from the instruction only if it can bail out instead of making a helper call when a
  14655. // bound check fails. And only if it would bail out, can we use a bound check to eliminate redundant bound checks later
  14656. // on that path.
  14657. doExtractBoundChecks = (headSegmentLengthIsAvailable || doHeadSegmentLengthLoad) && canBailOutOnArrayAccessHelperCall;
  14658. do
  14659. {
  14660. // Get the index value
  14661. IR::RegOpnd *const indexOpnd = baseOwnerIndir->GetIndexOpnd();
  14662. if(indexOpnd)
  14663. {
  14664. StackSym *const indexSym = indexOpnd->m_sym;
  14665. if(indexSym->IsTypeSpec())
  14666. {
  14667. Assert(indexSym->IsInt32());
  14668. indexVarSym = indexSym->GetVarEquivSym(nullptr);
  14669. Assert(indexVarSym);
  14670. indexValue = FindValue(indexVarSym);
  14671. Assert(indexValue);
  14672. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  14673. Assert(indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  14674. Assert(
  14675. (indexOpnd->GetType() == TyUint32) ==
  14676. ValueInfo::IsGreaterThanOrEqualTo(
  14677. indexValue,
  14678. indexConstantBounds.LowerBound(),
  14679. indexConstantBounds.UpperBound(),
  14680. nullptr,
  14681. 0,
  14682. 0));
  14683. if(indexOpnd->GetType() == TyUint32)
  14684. {
  14685. eliminatedLowerBoundCheck = true;
  14686. }
  14687. }
  14688. else
  14689. {
  14690. doExtractBoundChecks = false; // Bound check instruction operates only on int-specialized operands
  14691. indexValue = FindValue(indexSym);
  14692. if(!indexValue || !indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds))
  14693. {
  14694. break;
  14695. }
  14696. if(ValueInfo::IsGreaterThanOrEqualTo(
  14697. indexValue,
  14698. indexConstantBounds.LowerBound(),
  14699. indexConstantBounds.UpperBound(),
  14700. nullptr,
  14701. 0,
  14702. 0))
  14703. {
  14704. eliminatedLowerBoundCheck = true;
  14705. }
  14706. }
  14707. if(!eliminatedLowerBoundCheck &&
  14708. ValueInfo::IsLessThan(
  14709. indexValue,
  14710. indexConstantBounds.LowerBound(),
  14711. indexConstantBounds.UpperBound(),
  14712. nullptr,
  14713. 0,
  14714. 0))
  14715. {
  14716. eliminatedUpperBoundCheck = true;
  14717. doExtractBoundChecks = false;
  14718. break;
  14719. }
  14720. }
  14721. else
  14722. {
  14723. const int32 indexConstantValue = baseOwnerIndir->GetOffset();
  14724. if(indexConstantValue < 0)
  14725. {
  14726. eliminatedUpperBoundCheck = true;
  14727. doExtractBoundChecks = false;
  14728. break;
  14729. }
  14730. if(indexConstantValue == INT32_MAX)
  14731. {
  14732. eliminatedLowerBoundCheck = true;
  14733. doExtractBoundChecks = false;
  14734. break;
  14735. }
  14736. indexConstantBounds = IntConstantBounds(indexConstantValue, indexConstantValue);
  14737. eliminatedLowerBoundCheck = true;
  14738. }
  14739. if(!headSegmentLengthIsAvailable)
  14740. {
  14741. break;
  14742. }
  14743. headSegmentLengthValue = FindValue(baseArrayValueInfo->HeadSegmentLengthSym());
  14744. if(!headSegmentLengthValue)
  14745. {
  14746. if(doExtractBoundChecks)
  14747. {
  14748. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  14749. }
  14750. break;
  14751. }
  14752. AssertVerify(headSegmentLengthValue->GetValueInfo()->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  14753. if (ValueInfo::IsLessThanOrEqualTo(
  14754. indexValue,
  14755. indexConstantBounds.LowerBound(),
  14756. indexConstantBounds.UpperBound(),
  14757. headSegmentLengthValue,
  14758. headSegmentLengthConstantBounds.LowerBound(),
  14759. headSegmentLengthConstantBounds.UpperBound(),
  14760. GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1)
  14761. ))
  14762. {
  14763. eliminatedUpperBoundCheck = true;
  14764. if(eliminatedLowerBoundCheck)
  14765. {
  14766. doExtractBoundChecks = false;
  14767. }
  14768. }
  14769. } while(false);
  14770. }
  14771. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad || doExtractBoundChecks)
  14772. {
  14773. // Find the loops out of which array checks and head segment loads need to be hoisted
  14774. Loop *hoistChecksOutOfLoop = nullptr;
  14775. Loop *hoistHeadSegmentLoadOutOfLoop = nullptr;
  14776. Loop *hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  14777. Loop *hoistLengthLoadOutOfLoop = nullptr;
  14778. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  14779. {
  14780. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  14781. {
  14782. const JsArrayKills loopKills(loop->jsArrayKills);
  14783. Value *baseValueInLoopLandingPad;
  14784. if((isLikelyJsArray && loopKills.KillsValueType(newBaseValueType)) ||
  14785. !OptIsInvariant(baseOpnd->m_sym, currentBlock, loop, baseValue, true, true, &baseValueInLoopLandingPad) ||
  14786. !(doArrayChecks || baseValueInLoopLandingPad->GetValueInfo()->IsObject()))
  14787. {
  14788. break;
  14789. }
  14790. // The value types should be the same, except:
  14791. // - The value type in the landing pad is a type that can merge to a specific object type. Typically, these
  14792. // cases will use BailOnNoProfile, but that can be disabled due to excessive bailouts. Those value types
  14793. // merge aggressively to the other side's object type, so the value type may have started off as
  14794. // Uninitialized, [Likely]Undefined|Null, [Likely]UninitializedObject, etc., and changed in the loop to an
  14795. // array type during a prepass.
  14796. // - StElems in the loop can kill the no-missing-values info.
  14797. // - The native array type may be made more conservative based on profile data by an instruction in the loop.
  14798. Assert(
  14799. baseValueInLoopLandingPad->GetValueInfo()->CanMergeToSpecificObjectType() ||
  14800. baseValueInLoopLandingPad->GetValueInfo()->Type().SetCanBeTaggedValue(false) ==
  14801. baseValueType.SetCanBeTaggedValue(false) ||
  14802. baseValueInLoopLandingPad->GetValueInfo()->Type().SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ==
  14803. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ||
  14804. baseValueInLoopLandingPad->GetValueInfo()->Type().SetHasNoMissingValues(false).ToLikely().SetCanBeTaggedValue(false) ==
  14805. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ||
  14806. (
  14807. baseValueInLoopLandingPad->GetValueInfo()->Type().IsLikelyNativeArray() &&
  14808. baseValueInLoopLandingPad->GetValueInfo()->Type().Merge(baseValueType).SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ==
  14809. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false)
  14810. ));
  14811. if(doArrayChecks)
  14812. {
  14813. hoistChecksOutOfLoop = loop;
  14814. }
  14815. if(isLikelyJsArray && loopKills.KillsArrayHeadSegments())
  14816. {
  14817. Assert(loopKills.KillsArrayHeadSegmentLengths());
  14818. if(!(doArrayChecks || doLengthLoad))
  14819. {
  14820. break;
  14821. }
  14822. }
  14823. else
  14824. {
  14825. if(doHeadSegmentLoad || headSegmentIsAvailable)
  14826. {
  14827. // If the head segment is already available, we may need to rehoist the value including other
  14828. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  14829. // the head segment length is not being loaded here.
  14830. hoistHeadSegmentLoadOutOfLoop = loop;
  14831. }
  14832. if(isLikelyJsArray
  14833. ? loopKills.KillsArrayHeadSegmentLengths()
  14834. : loopKills.KillsTypedArrayHeadSegmentLengths())
  14835. {
  14836. if(!(doArrayChecks || doHeadSegmentLoad || doLengthLoad))
  14837. {
  14838. break;
  14839. }
  14840. }
  14841. else if(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable)
  14842. {
  14843. // If the head segment length is already available, we may need to rehoist the value including other
  14844. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  14845. // the head segment length is not being loaded here.
  14846. hoistHeadSegmentLengthLoadOutOfLoop = loop;
  14847. }
  14848. }
  14849. if(isLikelyJsArray && loopKills.KillsArrayLengths())
  14850. {
  14851. if(!(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad))
  14852. {
  14853. break;
  14854. }
  14855. }
  14856. else if(doLengthLoad || lengthIsAvailable)
  14857. {
  14858. // If the length is already available, we may need to rehoist the value including other information. So,
  14859. // need to track the loop out of which the head segment length can be hoisted even if the length is not
  14860. // being loaded here.
  14861. hoistLengthLoadOutOfLoop = loop;
  14862. }
  14863. }
  14864. }
  14865. IR::Instr *insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  14866. const auto InsertInstrInLandingPad = [&](IR::Instr *const instr, Loop *const hoistOutOfLoop)
  14867. {
  14868. if(hoistOutOfLoop->bailOutInfo->bailOutInstr)
  14869. {
  14870. instr->SetByteCodeOffset(hoistOutOfLoop->bailOutInfo->bailOutInstr);
  14871. hoistOutOfLoop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  14872. }
  14873. else
  14874. {
  14875. instr->SetByteCodeOffset(hoistOutOfLoop->landingPad->GetLastInstr());
  14876. hoistOutOfLoop->landingPad->InsertAfter(instr);
  14877. }
  14878. };
  14879. BailOutInfo *shareableBailOutInfo = nullptr;
  14880. IR::Instr *shareableBailOutInfoOriginalOwner = nullptr;
  14881. const auto ShareBailOut = [&]()
  14882. {
  14883. Assert(shareableBailOutInfo);
  14884. if(shareableBailOutInfo->bailOutInstr != shareableBailOutInfoOriginalOwner)
  14885. {
  14886. return;
  14887. }
  14888. Assert(shareableBailOutInfoOriginalOwner->GetBailOutInfo() == shareableBailOutInfo);
  14889. IR::Instr *const sharedBailOut = shareableBailOutInfoOriginalOwner->ShareBailOut();
  14890. Assert(sharedBailOut->GetBailOutInfo() == shareableBailOutInfo);
  14891. shareableBailOutInfoOriginalOwner = nullptr;
  14892. sharedBailOut->Unlink();
  14893. insertBeforeInstr->InsertBefore(sharedBailOut);
  14894. insertBeforeInstr = sharedBailOut;
  14895. };
  14896. if(doArrayChecks)
  14897. {
  14898. TRACE_TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Separating array checks with bailout\n"));
  14899. IR::Instr *bailOnNotArray = IR::Instr::New(Js::OpCode::BailOnNotArray, instr->m_func);
  14900. bailOnNotArray->SetSrc1(baseOpnd);
  14901. bailOnNotArray->GetSrc1()->SetIsJITOptimizedReg(true);
  14902. const IR::BailOutKind bailOutKind =
  14903. newBaseValueType.IsLikelyNativeArray() ? IR::BailOutOnNotNativeArray : IR::BailOutOnNotArray;
  14904. if(hoistChecksOutOfLoop)
  14905. {
  14906. Assert(!(isLikelyJsArray && hoistChecksOutOfLoop->jsArrayKills.KillsValueType(newBaseValueType)));
  14907. TRACE_PHASE_INSTR(
  14908. Js::ArrayCheckHoistPhase,
  14909. instr,
  14910. _u("Hoisting array checks with bailout out of loop %u to landing pad block %u\n"),
  14911. hoistChecksOutOfLoop->GetLoopNumber(),
  14912. hoistChecksOutOfLoop->landingPad->GetBlockNum());
  14913. TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Hoisting array checks with bailout out of loop\n"));
  14914. Assert(hoistChecksOutOfLoop->bailOutInfo);
  14915. EnsureBailTarget(hoistChecksOutOfLoop);
  14916. InsertInstrInLandingPad(bailOnNotArray, hoistChecksOutOfLoop);
  14917. bailOnNotArray = bailOnNotArray->ConvertToBailOutInstr(hoistChecksOutOfLoop->bailOutInfo, bailOutKind);
  14918. }
  14919. else
  14920. {
  14921. bailOnNotArray->SetByteCodeOffset(instr);
  14922. insertBeforeInstr->InsertBefore(bailOnNotArray);
  14923. GenerateBailAtOperation(&bailOnNotArray, bailOutKind);
  14924. shareableBailOutInfo = bailOnNotArray->GetBailOutInfo();
  14925. shareableBailOutInfoOriginalOwner = bailOnNotArray;
  14926. }
  14927. baseValueType = newBaseValueType;
  14928. baseOpnd->SetValueType(newBaseValueType);
  14929. }
  14930. if(doLengthLoad)
  14931. {
  14932. Assert(baseValueType.IsArray());
  14933. Assert(newLengthSym);
  14934. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Separating array length load\n"));
  14935. // Create an initial value for the length
  14936. blockData.liveVarSyms->Set(newLengthSym->m_id);
  14937. Value *const lengthValue = NewIntRangeValue(0, INT32_MAX, false);
  14938. SetValue(&blockData, lengthValue, newLengthSym);
  14939. // SetValue above would have set the sym store to newLengthSym. This sym won't be used for copy-prop though, so
  14940. // remove it as the sym store.
  14941. this->SetSymStoreDirect(lengthValue->GetValueInfo(), nullptr);
  14942. // length = [array + offsetOf(length)]
  14943. IR::Instr *const loadLength =
  14944. IR::Instr::New(
  14945. Js::OpCode::LdIndir,
  14946. IR::RegOpnd::New(newLengthSym, newLengthSym->GetType(), instr->m_func),
  14947. IR::IndirOpnd::New(
  14948. baseOpnd,
  14949. Js::JavascriptArray::GetOffsetOfLength(),
  14950. newLengthSym->GetType(),
  14951. instr->m_func),
  14952. instr->m_func);
  14953. loadLength->GetDst()->SetIsJITOptimizedReg(true);
  14954. loadLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  14955. // BailOnNegative length (BailOutOnIrregularLength)
  14956. IR::Instr *bailOnIrregularLength = IR::Instr::New(Js::OpCode::BailOnNegative, instr->m_func);
  14957. bailOnIrregularLength->SetSrc1(loadLength->GetDst());
  14958. const IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  14959. if(hoistLengthLoadOutOfLoop)
  14960. {
  14961. Assert(!hoistLengthLoadOutOfLoop->jsArrayKills.KillsArrayLengths());
  14962. TRACE_PHASE_INSTR(
  14963. Js::Phase::ArrayLengthHoistPhase,
  14964. instr,
  14965. _u("Hoisting array length load out of loop %u to landing pad block %u\n"),
  14966. hoistLengthLoadOutOfLoop->GetLoopNumber(),
  14967. hoistLengthLoadOutOfLoop->landingPad->GetBlockNum());
  14968. TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Hoisting array length load out of loop\n"));
  14969. Assert(hoistLengthLoadOutOfLoop->bailOutInfo);
  14970. EnsureBailTarget(hoistLengthLoadOutOfLoop);
  14971. InsertInstrInLandingPad(loadLength, hoistLengthLoadOutOfLoop);
  14972. InsertInstrInLandingPad(bailOnIrregularLength, hoistLengthLoadOutOfLoop);
  14973. bailOnIrregularLength =
  14974. bailOnIrregularLength->ConvertToBailOutInstr(hoistLengthLoadOutOfLoop->bailOutInfo, bailOutKind);
  14975. // Hoist the length value
  14976. for(InvariantBlockBackwardIterator it(
  14977. this,
  14978. currentBlock,
  14979. hoistLengthLoadOutOfLoop->landingPad,
  14980. baseOpnd->m_sym,
  14981. baseValue->GetValueNumber());
  14982. it.IsValid();
  14983. it.MoveNext())
  14984. {
  14985. BasicBlock *const block = it.Block();
  14986. block->globOptData.liveVarSyms->Set(newLengthSym->m_id);
  14987. Assert(!FindValue(block->globOptData.symToValueMap, newLengthSym));
  14988. Value *const lengthValueCopy = CopyValue(lengthValue, lengthValue->GetValueNumber());
  14989. SetValue(&block->globOptData, lengthValueCopy, newLengthSym);
  14990. this->SetSymStoreDirect(lengthValueCopy->GetValueInfo(), nullptr);
  14991. }
  14992. }
  14993. else
  14994. {
  14995. loadLength->SetByteCodeOffset(instr);
  14996. insertBeforeInstr->InsertBefore(loadLength);
  14997. bailOnIrregularLength->SetByteCodeOffset(instr);
  14998. insertBeforeInstr->InsertBefore(bailOnIrregularLength);
  14999. if(shareableBailOutInfo)
  15000. {
  15001. ShareBailOut();
  15002. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(shareableBailOutInfo, bailOutKind);
  15003. }
  15004. else
  15005. {
  15006. GenerateBailAtOperation(&bailOnIrregularLength, bailOutKind);
  15007. shareableBailOutInfo = bailOnIrregularLength->GetBailOutInfo();
  15008. shareableBailOutInfoOriginalOwner = bailOnIrregularLength;
  15009. }
  15010. }
  15011. }
  15012. const auto InsertHeadSegmentLoad = [&]()
  15013. {
  15014. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment load\n"));
  15015. Assert(newHeadSegmentSym);
  15016. IR::RegOpnd *const headSegmentOpnd =
  15017. IR::RegOpnd::New(newHeadSegmentSym, newHeadSegmentSym->GetType(), instr->m_func);
  15018. headSegmentOpnd->SetIsJITOptimizedReg(true);
  15019. IR::RegOpnd *const jitOptimizedBaseOpnd = baseOpnd->Copy(instr->m_func)->AsRegOpnd();
  15020. jitOptimizedBaseOpnd->SetIsJITOptimizedReg(true);
  15021. IR::Instr *loadObjectArray;
  15022. if(baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  15023. {
  15024. loadObjectArray =
  15025. IR::Instr::New(
  15026. Js::OpCode::LdIndir,
  15027. headSegmentOpnd,
  15028. IR::IndirOpnd::New(
  15029. jitOptimizedBaseOpnd,
  15030. Js::DynamicObject::GetOffsetOfObjectArray(),
  15031. jitOptimizedBaseOpnd->GetType(),
  15032. instr->m_func),
  15033. instr->m_func);
  15034. }
  15035. else
  15036. {
  15037. loadObjectArray = nullptr;
  15038. }
  15039. IR::Instr *const loadHeadSegment =
  15040. IR::Instr::New(
  15041. Js::OpCode::LdIndir,
  15042. headSegmentOpnd,
  15043. IR::IndirOpnd::New(
  15044. loadObjectArray ? headSegmentOpnd : jitOptimizedBaseOpnd,
  15045. Lowerer::GetArrayOffsetOfHeadSegment(baseValueType),
  15046. headSegmentOpnd->GetType(),
  15047. instr->m_func),
  15048. instr->m_func);
  15049. if(hoistHeadSegmentLoadOutOfLoop)
  15050. {
  15051. Assert(!(isLikelyJsArray && hoistHeadSegmentLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegments()));
  15052. TRACE_PHASE_INSTR(
  15053. Js::ArraySegmentHoistPhase,
  15054. instr,
  15055. _u("Hoisting array segment load out of loop %u to landing pad block %u\n"),
  15056. hoistHeadSegmentLoadOutOfLoop->GetLoopNumber(),
  15057. hoistHeadSegmentLoadOutOfLoop->landingPad->GetBlockNum());
  15058. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment load out of loop\n"));
  15059. if(loadObjectArray)
  15060. {
  15061. InsertInstrInLandingPad(loadObjectArray, hoistHeadSegmentLoadOutOfLoop);
  15062. }
  15063. InsertInstrInLandingPad(loadHeadSegment, hoistHeadSegmentLoadOutOfLoop);
  15064. }
  15065. else
  15066. {
  15067. if(loadObjectArray)
  15068. {
  15069. loadObjectArray->SetByteCodeOffset(instr);
  15070. insertBeforeInstr->InsertBefore(loadObjectArray);
  15071. }
  15072. loadHeadSegment->SetByteCodeOffset(instr);
  15073. insertBeforeInstr->InsertBefore(loadHeadSegment);
  15074. instr->loadedArrayHeadSegment = true;
  15075. }
  15076. };
  15077. if(doHeadSegmentLoad && isLikelyJsArray)
  15078. {
  15079. // For javascript arrays, the head segment is required to load the head segment length
  15080. InsertHeadSegmentLoad();
  15081. }
  15082. if(doHeadSegmentLengthLoad)
  15083. {
  15084. Assert(!isLikelyJsArray || newHeadSegmentSym || baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym());
  15085. Assert(newHeadSegmentLengthSym);
  15086. Assert(!headSegmentLengthValue);
  15087. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment length load\n"));
  15088. // Create an initial value for the head segment length
  15089. blockData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  15090. headSegmentLengthValue = NewIntRangeValue(0, Js::SparseArraySegmentBase::MaxLength, false);
  15091. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  15092. SetValue(&blockData, headSegmentLengthValue, newHeadSegmentLengthSym);
  15093. // SetValue above would have set the sym store to newHeadSegmentLengthSym. This sym won't be used for copy-prop
  15094. // though, so remove it as the sym store.
  15095. this->SetSymStoreDirect(headSegmentLengthValue->GetValueInfo(), nullptr);
  15096. StackSym *const headSegmentSym =
  15097. isLikelyJsArray
  15098. ? newHeadSegmentSym ? newHeadSegmentSym : baseArrayValueInfo->HeadSegmentSym()
  15099. : nullptr;
  15100. IR::Instr *const loadHeadSegmentLength =
  15101. IR::Instr::New(
  15102. Js::OpCode::LdIndir,
  15103. IR::RegOpnd::New(newHeadSegmentLengthSym, newHeadSegmentLengthSym->GetType(), instr->m_func),
  15104. IR::IndirOpnd::New(
  15105. isLikelyJsArray ? IR::RegOpnd::New(headSegmentSym, headSegmentSym->GetType(), instr->m_func) : baseOpnd,
  15106. isLikelyJsArray
  15107. ? Js::SparseArraySegmentBase::GetOffsetOfLength()
  15108. : Lowerer::GetArrayOffsetOfLength(baseValueType),
  15109. newHeadSegmentLengthSym->GetType(),
  15110. instr->m_func),
  15111. instr->m_func);
  15112. loadHeadSegmentLength->GetDst()->SetIsJITOptimizedReg(true);
  15113. loadHeadSegmentLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  15114. // We don't check the head segment length for negative (very large uint32) values. For JS arrays, the bound checks
  15115. // cover that. For typed arrays, we currently don't allocate array buffers with more than 1 GB elements.
  15116. if(hoistHeadSegmentLengthLoadOutOfLoop)
  15117. {
  15118. Assert(
  15119. !(
  15120. isLikelyJsArray
  15121. ? hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegmentLengths()
  15122. : hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  15123. ));
  15124. TRACE_PHASE_INSTR(
  15125. Js::ArraySegmentHoistPhase,
  15126. instr,
  15127. _u("Hoisting array segment length load out of loop %u to landing pad block %u\n"),
  15128. hoistHeadSegmentLengthLoadOutOfLoop->GetLoopNumber(),
  15129. hoistHeadSegmentLengthLoadOutOfLoop->landingPad->GetBlockNum());
  15130. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment length load out of loop\n"));
  15131. InsertInstrInLandingPad(loadHeadSegmentLength, hoistHeadSegmentLengthLoadOutOfLoop);
  15132. // Hoist the head segment length value
  15133. for(InvariantBlockBackwardIterator it(
  15134. this,
  15135. currentBlock,
  15136. hoistHeadSegmentLengthLoadOutOfLoop->landingPad,
  15137. baseOpnd->m_sym,
  15138. baseValue->GetValueNumber());
  15139. it.IsValid();
  15140. it.MoveNext())
  15141. {
  15142. BasicBlock *const block = it.Block();
  15143. block->globOptData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  15144. Assert(!FindValue(block->globOptData.symToValueMap, newHeadSegmentLengthSym));
  15145. Value *const headSegmentLengthValueCopy =
  15146. CopyValue(headSegmentLengthValue, headSegmentLengthValue->GetValueNumber());
  15147. SetValue(&block->globOptData, headSegmentLengthValueCopy, newHeadSegmentLengthSym);
  15148. this->SetSymStoreDirect(headSegmentLengthValueCopy->GetValueInfo(), nullptr);
  15149. }
  15150. }
  15151. else
  15152. {
  15153. loadHeadSegmentLength->SetByteCodeOffset(instr);
  15154. insertBeforeInstr->InsertBefore(loadHeadSegmentLength);
  15155. instr->loadedArrayHeadSegmentLength = true;
  15156. }
  15157. }
  15158. if(doExtractBoundChecks)
  15159. {
  15160. Assert(!(eliminatedLowerBoundCheck && eliminatedUpperBoundCheck));
  15161. Assert(baseOwnerIndir);
  15162. Assert(!baseOwnerIndir->GetIndexOpnd() || baseOwnerIndir->GetIndexOpnd()->m_sym->IsTypeSpec());
  15163. Assert(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable);
  15164. Assert(canBailOutOnArrayAccessHelperCall);
  15165. Assert(!isStore || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || Js::IsSimd128LoadStore(instr->m_opcode));
  15166. StackSym *const headSegmentLengthSym =
  15167. headSegmentLengthIsAvailable ? baseArrayValueInfo->HeadSegmentLengthSym() : newHeadSegmentLengthSym;
  15168. Assert(headSegmentLengthSym);
  15169. Assert(headSegmentLengthValue);
  15170. ArrayLowerBoundCheckHoistInfo lowerBoundCheckHoistInfo;
  15171. ArrayUpperBoundCheckHoistInfo upperBoundCheckHoistInfo;
  15172. bool failedToUpdateCompatibleLowerBoundCheck = false, failedToUpdateCompatibleUpperBoundCheck = false;
  15173. if(DoBoundCheckHoist())
  15174. {
  15175. if(indexVarSym)
  15176. {
  15177. TRACE_PHASE_INSTR_VERBOSE(
  15178. Js::Phase::BoundCheckHoistPhase,
  15179. instr,
  15180. _u("Determining array bound check hoistability for index s%u\n"),
  15181. indexVarSym->m_id);
  15182. }
  15183. else
  15184. {
  15185. TRACE_PHASE_INSTR_VERBOSE(
  15186. Js::Phase::BoundCheckHoistPhase,
  15187. instr,
  15188. _u("Determining array bound check hoistability for index %d\n"),
  15189. indexConstantBounds.LowerBound());
  15190. }
  15191. DetermineArrayBoundCheckHoistability(
  15192. !eliminatedLowerBoundCheck,
  15193. !eliminatedUpperBoundCheck,
  15194. lowerBoundCheckHoistInfo,
  15195. upperBoundCheckHoistInfo,
  15196. isLikelyJsArray,
  15197. indexVarSym,
  15198. indexValue,
  15199. indexConstantBounds,
  15200. headSegmentLengthSym,
  15201. headSegmentLengthValue,
  15202. headSegmentLengthConstantBounds,
  15203. hoistHeadSegmentLengthLoadOutOfLoop,
  15204. failedToUpdateCompatibleLowerBoundCheck,
  15205. failedToUpdateCompatibleUpperBoundCheck);
  15206. #ifdef ENABLE_SIMDJS
  15207. // SIMD_JS
  15208. UpdateBoundCheckHoistInfoForSimd(upperBoundCheckHoistInfo, newBaseValueType, instr);
  15209. #endif
  15210. }
  15211. if(!eliminatedLowerBoundCheck)
  15212. {
  15213. eliminatedLowerBoundCheck = true;
  15214. Assert(indexVarSym);
  15215. Assert(baseOwnerIndir->GetIndexOpnd());
  15216. Assert(indexValue);
  15217. ArrayLowerBoundCheckHoistInfo &hoistInfo = lowerBoundCheckHoistInfo;
  15218. if(hoistInfo.HasAnyInfo())
  15219. {
  15220. BasicBlock *hoistBlock;
  15221. if(hoistInfo.CompatibleBoundCheckBlock())
  15222. {
  15223. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  15224. TRACE_PHASE_INSTR(
  15225. Js::Phase::BoundCheckHoistPhase,
  15226. instr,
  15227. _u("Hoisting array lower bound check into existing bound check instruction in block %u\n"),
  15228. hoistBlock->GetBlockNum());
  15229. TESTTRACE_PHASE_INSTR(
  15230. Js::Phase::BoundCheckHoistPhase,
  15231. instr,
  15232. _u("Hoisting array lower bound check into existing bound check instruction\n"));
  15233. }
  15234. else
  15235. {
  15236. Assert(hoistInfo.Loop());
  15237. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  15238. hoistBlock = landingPad;
  15239. StackSym *indexIntSym;
  15240. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  15241. {
  15242. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad))
  15243. {
  15244. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize
  15245. // it in this block if it is invariant, as the conversion will be hoisted along with value
  15246. // updates.
  15247. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  15248. IR::Instr *specializeBeforeInstr = nullptr;
  15249. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), &blockData) &&
  15250. OptIsInvariant(
  15251. hoistInfo.IndexSym(),
  15252. currentBlock,
  15253. hoistInfo.Loop(),
  15254. FindValue(hoistInfo.IndexSym()),
  15255. false,
  15256. true))
  15257. {
  15258. specializationBlock = currentBlock;
  15259. specializeBeforeInstr = insertBeforeInstr;
  15260. }
  15261. Assert(tempBv->IsEmpty());
  15262. tempBv->Set(hoistInfo.IndexSym()->m_id);
  15263. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  15264. tempBv->ClearAll();
  15265. Assert(IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad));
  15266. }
  15267. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  15268. Assert(indexIntSym);
  15269. }
  15270. else
  15271. {
  15272. indexIntSym = hoistInfo.IndexSym();
  15273. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  15274. }
  15275. // The info in the landing pad may be better than the info in the current block due to changes made to
  15276. // the index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the
  15277. // landing pad.
  15278. if(!ValueInfo::IsLessThanOrEqualTo(
  15279. nullptr,
  15280. 0,
  15281. 0,
  15282. hoistInfo.IndexValue(),
  15283. hoistInfo.IndexConstantBounds().LowerBound(),
  15284. hoistInfo.IndexConstantBounds().UpperBound(),
  15285. hoistInfo.Offset()))
  15286. {
  15287. Assert(hoistInfo.IndexSym());
  15288. Assert(hoistInfo.Loop()->bailOutInfo);
  15289. EnsureBailTarget(hoistInfo.Loop());
  15290. if(hoistInfo.LoopCount())
  15291. {
  15292. // Generate the loop count and loop count based bound that will be used for the bound check
  15293. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  15294. {
  15295. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  15296. }
  15297. GenerateSecondaryInductionVariableBound(
  15298. hoistInfo.Loop(),
  15299. indexVarSym->GetInt32EquivSym(nullptr),
  15300. hoistInfo.LoopCount(),
  15301. hoistInfo.MaxMagnitudeChange(),
  15302. hoistInfo.IndexSym());
  15303. }
  15304. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  15305. IR::Opnd* upperBound = IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func);
  15306. upperBound->SetIsJITOptimizedReg(true);
  15307. // 0 <= indexSym + offset (src1 <= src2 + dst)
  15308. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  15309. lowerBound,
  15310. upperBound,
  15311. hoistInfo.Offset(),
  15312. hoistInfo.IsLoopCountBasedBound()
  15313. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  15314. : IR::BailOutOnFailedHoistedBoundCheck,
  15315. hoistInfo.Loop()->bailOutInfo,
  15316. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  15317. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  15318. TRACE_PHASE_INSTR(
  15319. Js::Phase::BoundCheckHoistPhase,
  15320. instr,
  15321. _u("Hoisting array lower bound check out of loop %u to landing pad block %u, as (0 <= s%u + %d)\n"),
  15322. hoistInfo.Loop()->GetLoopNumber(),
  15323. landingPad->GetBlockNum(),
  15324. hoistInfo.IndexSym()->m_id,
  15325. hoistInfo.Offset());
  15326. TESTTRACE_PHASE_INSTR(
  15327. Js::Phase::BoundCheckHoistPhase,
  15328. instr,
  15329. _u("Hoisting array lower bound check out of loop\n"));
  15330. // Record the bound check instruction as available
  15331. const IntBoundCheck boundCheckInfo(
  15332. ZeroValueNumber,
  15333. hoistInfo.IndexValueNumber(),
  15334. boundCheck,
  15335. landingPad);
  15336. {
  15337. const bool added = blockData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15338. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15339. }
  15340. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  15341. it.IsValid();
  15342. it.MoveNext())
  15343. {
  15344. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15345. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15346. }
  15347. }
  15348. }
  15349. // Update values of the syms involved in the bound check to reflect the bound check
  15350. if(hoistBlock != currentBlock && hoistInfo.IndexSym() && hoistInfo.Offset() != INT32_MIN)
  15351. {
  15352. for(InvariantBlockBackwardIterator it(
  15353. this,
  15354. currentBlock->next,
  15355. hoistBlock,
  15356. hoistInfo.IndexSym(),
  15357. hoistInfo.IndexValueNumber());
  15358. it.IsValid();
  15359. it.MoveNext())
  15360. {
  15361. Value *const value = it.InvariantSymValue();
  15362. IntConstantBounds constantBounds;
  15363. AssertVerify(value->GetValueInfo()->TryGetIntConstantBounds(&constantBounds, true));
  15364. ValueInfo *const newValueInfo =
  15365. UpdateIntBoundsForGreaterThanOrEqual(
  15366. value,
  15367. constantBounds,
  15368. nullptr,
  15369. IntConstantBounds(-hoistInfo.Offset(), -hoistInfo.Offset()),
  15370. false);
  15371. if(newValueInfo)
  15372. {
  15373. ChangeValueInfo(nullptr, value, newValueInfo);
  15374. if(it.Block() == currentBlock && value == indexValue)
  15375. {
  15376. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15377. }
  15378. }
  15379. }
  15380. }
  15381. }
  15382. else
  15383. {
  15384. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  15385. IR::Opnd* upperBound = baseOwnerIndir->GetIndexOpnd();
  15386. upperBound->SetIsJITOptimizedReg(true);
  15387. const int offset = 0;
  15388. IR::Instr *boundCheck;
  15389. if(shareableBailOutInfo)
  15390. {
  15391. ShareBailOut();
  15392. boundCheck = CreateBoundsCheckInstr(
  15393. lowerBound,
  15394. upperBound,
  15395. offset,
  15396. IR::BailOutOnArrayAccessHelperCall,
  15397. shareableBailOutInfo,
  15398. shareableBailOutInfo->bailOutFunc);
  15399. }
  15400. else
  15401. {
  15402. boundCheck = CreateBoundsCheckInstr(
  15403. lowerBound,
  15404. upperBound,
  15405. offset,
  15406. instr->m_func);
  15407. }
  15408. boundCheck->SetByteCodeOffset(instr);
  15409. insertBeforeInstr->InsertBefore(boundCheck);
  15410. if(!shareableBailOutInfo)
  15411. {
  15412. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  15413. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  15414. shareableBailOutInfoOriginalOwner = boundCheck;
  15415. }
  15416. TRACE_PHASE_INSTR(
  15417. Js::Phase::BoundCheckEliminationPhase,
  15418. instr,
  15419. _u("Separating array lower bound check, as (0 <= s%u)\n"),
  15420. indexVarSym->m_id);
  15421. TESTTRACE_PHASE_INSTR(
  15422. Js::Phase::BoundCheckEliminationPhase,
  15423. instr,
  15424. _u("Separating array lower bound check\n"));
  15425. if(DoBoundCheckHoist())
  15426. {
  15427. // Record the bound check instruction as available
  15428. const bool added =
  15429. blockData.availableIntBoundChecks->AddNew(
  15430. IntBoundCheck(ZeroValueNumber, indexValue->GetValueNumber(), boundCheck, currentBlock)) >= 0;
  15431. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15432. }
  15433. }
  15434. // Update the index value to reflect the bound check
  15435. ValueInfo *const newValueInfo =
  15436. UpdateIntBoundsForGreaterThanOrEqual(
  15437. indexValue,
  15438. indexConstantBounds,
  15439. nullptr,
  15440. IntConstantBounds(0, 0),
  15441. false);
  15442. if(newValueInfo)
  15443. {
  15444. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  15445. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15446. }
  15447. }
  15448. if(!eliminatedUpperBoundCheck)
  15449. {
  15450. eliminatedUpperBoundCheck = true;
  15451. ArrayUpperBoundCheckHoistInfo &hoistInfo = upperBoundCheckHoistInfo;
  15452. if(hoistInfo.HasAnyInfo())
  15453. {
  15454. BasicBlock *hoistBlock;
  15455. if(hoistInfo.CompatibleBoundCheckBlock())
  15456. {
  15457. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  15458. TRACE_PHASE_INSTR(
  15459. Js::Phase::BoundCheckHoistPhase,
  15460. instr,
  15461. _u("Hoisting array upper bound check into existing bound check instruction in block %u\n"),
  15462. hoistBlock->GetBlockNum());
  15463. TESTTRACE_PHASE_INSTR(
  15464. Js::Phase::BoundCheckHoistPhase,
  15465. instr,
  15466. _u("Hoisting array upper bound check into existing bound check instruction\n"));
  15467. }
  15468. else
  15469. {
  15470. Assert(hoistInfo.Loop());
  15471. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  15472. hoistBlock = landingPad;
  15473. StackSym *indexIntSym;
  15474. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  15475. {
  15476. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad))
  15477. {
  15478. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize it
  15479. // in this block if it is invariant, as the conversion will be hoisted along with value updates.
  15480. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  15481. IR::Instr *specializeBeforeInstr = nullptr;
  15482. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), &blockData) &&
  15483. OptIsInvariant(
  15484. hoistInfo.IndexSym(),
  15485. currentBlock,
  15486. hoistInfo.Loop(),
  15487. FindValue(hoistInfo.IndexSym()),
  15488. false,
  15489. true))
  15490. {
  15491. specializationBlock = currentBlock;
  15492. specializeBeforeInstr = insertBeforeInstr;
  15493. }
  15494. Assert(tempBv->IsEmpty());
  15495. tempBv->Set(hoistInfo.IndexSym()->m_id);
  15496. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  15497. tempBv->ClearAll();
  15498. Assert(IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad));
  15499. }
  15500. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  15501. Assert(indexIntSym);
  15502. }
  15503. else
  15504. {
  15505. indexIntSym = hoistInfo.IndexSym();
  15506. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  15507. }
  15508. // The info in the landing pad may be better than the info in the current block due to changes made to the
  15509. // index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the landing pad.
  15510. if(!ValueInfo::IsLessThanOrEqualTo(
  15511. hoistInfo.IndexValue(),
  15512. hoistInfo.IndexConstantBounds().LowerBound(),
  15513. hoistInfo.IndexConstantBounds().UpperBound(),
  15514. hoistInfo.HeadSegmentLengthValue(),
  15515. hoistInfo.HeadSegmentLengthConstantBounds().LowerBound(),
  15516. hoistInfo.HeadSegmentLengthConstantBounds().UpperBound(),
  15517. hoistInfo.Offset()))
  15518. {
  15519. Assert(hoistInfo.Loop()->bailOutInfo);
  15520. EnsureBailTarget(hoistInfo.Loop());
  15521. if(hoistInfo.LoopCount())
  15522. {
  15523. // Generate the loop count and loop count based bound that will be used for the bound check
  15524. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  15525. {
  15526. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  15527. }
  15528. GenerateSecondaryInductionVariableBound(
  15529. hoistInfo.Loop(),
  15530. indexVarSym->GetInt32EquivSym(nullptr),
  15531. hoistInfo.LoopCount(),
  15532. hoistInfo.MaxMagnitudeChange(),
  15533. hoistInfo.IndexSym());
  15534. }
  15535. IR::Opnd* lowerBound = indexIntSym
  15536. ? static_cast<IR::Opnd *>(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func))
  15537. : IR::IntConstOpnd::New(
  15538. hoistInfo.IndexConstantBounds().LowerBound(),
  15539. TyInt32,
  15540. instr->m_func);
  15541. lowerBound->SetIsJITOptimizedReg(true);
  15542. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  15543. upperBound->SetIsJITOptimizedReg(true);
  15544. // indexSym <= headSegmentLength + offset (src1 <= src2 + dst)
  15545. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  15546. lowerBound,
  15547. upperBound,
  15548. hoistInfo.Offset(),
  15549. hoistInfo.IsLoopCountBasedBound()
  15550. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  15551. : IR::BailOutOnFailedHoistedBoundCheck,
  15552. hoistInfo.Loop()->bailOutInfo,
  15553. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  15554. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  15555. if(indexIntSym)
  15556. {
  15557. TRACE_PHASE_INSTR(
  15558. Js::Phase::BoundCheckHoistPhase,
  15559. instr,
  15560. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"),
  15561. hoistInfo.Loop()->GetLoopNumber(),
  15562. landingPad->GetBlockNum(),
  15563. hoistInfo.IndexSym()->m_id,
  15564. headSegmentLengthSym->m_id,
  15565. hoistInfo.Offset());
  15566. }
  15567. else
  15568. {
  15569. TRACE_PHASE_INSTR(
  15570. Js::Phase::BoundCheckHoistPhase,
  15571. instr,
  15572. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"),
  15573. hoistInfo.Loop()->GetLoopNumber(),
  15574. landingPad->GetBlockNum(),
  15575. hoistInfo.IndexConstantBounds().LowerBound(),
  15576. headSegmentLengthSym->m_id,
  15577. hoistInfo.Offset());
  15578. }
  15579. TESTTRACE_PHASE_INSTR(
  15580. Js::Phase::BoundCheckHoistPhase,
  15581. instr,
  15582. _u("Hoisting array upper bound check out of loop\n"));
  15583. // Record the bound check instruction as available
  15584. const IntBoundCheck boundCheckInfo(
  15585. hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber,
  15586. hoistInfo.HeadSegmentLengthValue()->GetValueNumber(),
  15587. boundCheck,
  15588. landingPad);
  15589. {
  15590. const bool added = blockData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15591. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15592. }
  15593. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  15594. it.IsValid();
  15595. it.MoveNext())
  15596. {
  15597. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15598. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15599. }
  15600. }
  15601. }
  15602. // Update values of the syms involved in the bound check to reflect the bound check
  15603. Assert(!hoistInfo.Loop() || hoistBlock != currentBlock);
  15604. if(hoistBlock != currentBlock)
  15605. {
  15606. for(InvariantBlockBackwardIterator it(this, currentBlock->next, hoistBlock, nullptr);
  15607. it.IsValid();
  15608. it.MoveNext())
  15609. {
  15610. BasicBlock *const block = it.Block();
  15611. Value *leftValue;
  15612. IntConstantBounds leftConstantBounds;
  15613. if(hoistInfo.IndexSym())
  15614. {
  15615. leftValue = FindValue(block->globOptData.symToValueMap, hoistInfo.IndexSym());
  15616. if(!leftValue || leftValue->GetValueNumber() != hoistInfo.IndexValueNumber())
  15617. {
  15618. continue;
  15619. }
  15620. AssertVerify(leftValue->GetValueInfo()->TryGetIntConstantBounds(&leftConstantBounds, true));
  15621. }
  15622. else
  15623. {
  15624. leftValue = nullptr;
  15625. leftConstantBounds = hoistInfo.IndexConstantBounds();
  15626. }
  15627. Value *const rightValue = FindValue(block->globOptData.symToValueMap, headSegmentLengthSym);
  15628. if(!rightValue)
  15629. {
  15630. continue;
  15631. }
  15632. Assert(rightValue->GetValueNumber() == headSegmentLengthValue->GetValueNumber());
  15633. IntConstantBounds rightConstantBounds;
  15634. AssertVerify(rightValue->GetValueInfo()->TryGetIntConstantBounds(&rightConstantBounds));
  15635. ValueInfo *const newValueInfoForLessThanOrEqual =
  15636. UpdateIntBoundsForLessThanOrEqual(
  15637. leftValue,
  15638. leftConstantBounds,
  15639. rightValue,
  15640. rightConstantBounds,
  15641. hoistInfo.Offset(),
  15642. false);
  15643. if (newValueInfoForLessThanOrEqual)
  15644. {
  15645. ChangeValueInfo(nullptr, leftValue, newValueInfoForLessThanOrEqual);
  15646. AssertVerify(newValueInfoForLessThanOrEqual->TryGetIntConstantBounds(&leftConstantBounds, true));
  15647. if(block == currentBlock && leftValue == indexValue)
  15648. {
  15649. Assert(newValueInfoForLessThanOrEqual->IsInt());
  15650. indexConstantBounds = leftConstantBounds;
  15651. }
  15652. }
  15653. if(hoistInfo.Offset() != INT32_MIN)
  15654. {
  15655. ValueInfo *const newValueInfoForGreaterThanOrEqual =
  15656. UpdateIntBoundsForGreaterThanOrEqual(
  15657. rightValue,
  15658. rightConstantBounds,
  15659. leftValue,
  15660. leftConstantBounds,
  15661. -hoistInfo.Offset(),
  15662. false);
  15663. if (newValueInfoForGreaterThanOrEqual)
  15664. {
  15665. ChangeValueInfo(nullptr, rightValue, newValueInfoForGreaterThanOrEqual);
  15666. if(block == currentBlock)
  15667. {
  15668. Assert(rightValue == headSegmentLengthValue);
  15669. AssertVerify(newValueInfoForGreaterThanOrEqual->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  15670. }
  15671. }
  15672. }
  15673. }
  15674. }
  15675. }
  15676. else
  15677. {
  15678. IR::Opnd* lowerBound = baseOwnerIndir->GetIndexOpnd()
  15679. ? static_cast<IR::Opnd *>(baseOwnerIndir->GetIndexOpnd())
  15680. : IR::IntConstOpnd::New(baseOwnerIndir->GetOffset(), TyInt32, instr->m_func);
  15681. lowerBound->SetIsJITOptimizedReg(true);
  15682. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  15683. upperBound->SetIsJITOptimizedReg(true);
  15684. const int offset = GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1);
  15685. IR::Instr *boundCheck;
  15686. // index <= headSegmentLength - 1 (src1 <= src2 + dst)
  15687. if (shareableBailOutInfo)
  15688. {
  15689. ShareBailOut();
  15690. boundCheck = CreateBoundsCheckInstr(
  15691. lowerBound,
  15692. upperBound,
  15693. offset,
  15694. IR::BailOutOnArrayAccessHelperCall,
  15695. shareableBailOutInfo,
  15696. shareableBailOutInfo->bailOutFunc);
  15697. }
  15698. else
  15699. {
  15700. boundCheck = CreateBoundsCheckInstr(
  15701. lowerBound,
  15702. upperBound,
  15703. offset,
  15704. instr->m_func);
  15705. }
  15706. boundCheck->SetByteCodeOffset(instr);
  15707. insertBeforeInstr->InsertBefore(boundCheck);
  15708. if(!shareableBailOutInfo)
  15709. {
  15710. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  15711. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  15712. shareableBailOutInfoOriginalOwner = boundCheck;
  15713. }
  15714. instr->extractedUpperBoundCheckWithoutHoisting = true;
  15715. if(baseOwnerIndir->GetIndexOpnd())
  15716. {
  15717. TRACE_PHASE_INSTR(
  15718. Js::Phase::BoundCheckEliminationPhase,
  15719. instr,
  15720. _u("Separating array upper bound check, as (s%u < s%u)\n"),
  15721. indexVarSym->m_id,
  15722. headSegmentLengthSym->m_id);
  15723. }
  15724. else
  15725. {
  15726. TRACE_PHASE_INSTR(
  15727. Js::Phase::BoundCheckEliminationPhase,
  15728. instr,
  15729. _u("Separating array upper bound check, as (%d < s%u)\n"),
  15730. baseOwnerIndir->GetOffset(),
  15731. headSegmentLengthSym->m_id);
  15732. }
  15733. TESTTRACE_PHASE_INSTR(
  15734. Js::Phase::BoundCheckEliminationPhase,
  15735. instr,
  15736. _u("Separating array upper bound check\n"));
  15737. if(DoBoundCheckHoist())
  15738. {
  15739. // Record the bound check instruction as available
  15740. const bool added =
  15741. blockData.availableIntBoundChecks->AddNew(
  15742. IntBoundCheck(
  15743. indexValue ? indexValue->GetValueNumber() : ZeroValueNumber,
  15744. headSegmentLengthValue->GetValueNumber(),
  15745. boundCheck,
  15746. currentBlock)) >= 0;
  15747. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15748. }
  15749. }
  15750. // Update the index and head segment length values to reflect the bound check
  15751. ValueInfo *newValueInfo =
  15752. UpdateIntBoundsForLessThan(
  15753. indexValue,
  15754. indexConstantBounds,
  15755. headSegmentLengthValue,
  15756. headSegmentLengthConstantBounds,
  15757. false);
  15758. if(newValueInfo)
  15759. {
  15760. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  15761. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15762. }
  15763. newValueInfo =
  15764. UpdateIntBoundsForGreaterThan(
  15765. headSegmentLengthValue,
  15766. headSegmentLengthConstantBounds,
  15767. indexValue,
  15768. indexConstantBounds,
  15769. false);
  15770. if(newValueInfo)
  15771. {
  15772. ChangeValueInfo(nullptr, headSegmentLengthValue, newValueInfo);
  15773. }
  15774. }
  15775. }
  15776. if(doHeadSegmentLoad && !isLikelyJsArray)
  15777. {
  15778. // For typed arrays, load the length first, followed by the bound checks, and then load the head segment. This
  15779. // allows the length sym to become dead by the time of the head segment load, freeing up the register for use by the
  15780. // head segment sym.
  15781. InsertHeadSegmentLoad();
  15782. }
  15783. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  15784. {
  15785. UpdateValue(newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  15786. baseValueInfo = baseValue->GetValueInfo();
  15787. baseArrayValueInfo = baseValueInfo->IsArrayValueInfo() ? baseValueInfo->AsArrayValueInfo() : nullptr;
  15788. // Iterate up to the root loop's landing pad until all necessary value info is updated
  15789. uint hoistItemCount =
  15790. static_cast<uint>(!!hoistChecksOutOfLoop) +
  15791. !!hoistHeadSegmentLoadOutOfLoop +
  15792. !!hoistHeadSegmentLengthLoadOutOfLoop +
  15793. !!hoistLengthLoadOutOfLoop;
  15794. if(hoistItemCount != 0)
  15795. {
  15796. Loop *rootLoop = nullptr;
  15797. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  15798. {
  15799. rootLoop = loop;
  15800. }
  15801. Assert(rootLoop);
  15802. ValueInfo *valueInfoToHoist = baseValueInfo;
  15803. bool removeHeadSegment, removeHeadSegmentLength, removeLength;
  15804. if(baseArrayValueInfo)
  15805. {
  15806. removeHeadSegment = baseArrayValueInfo->HeadSegmentSym() && !hoistHeadSegmentLoadOutOfLoop;
  15807. removeHeadSegmentLength =
  15808. baseArrayValueInfo->HeadSegmentLengthSym() && !hoistHeadSegmentLengthLoadOutOfLoop;
  15809. removeLength = baseArrayValueInfo->LengthSym() && !hoistLengthLoadOutOfLoop;
  15810. }
  15811. else
  15812. {
  15813. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  15814. }
  15815. for(InvariantBlockBackwardIterator it(
  15816. this,
  15817. currentBlock,
  15818. rootLoop->landingPad,
  15819. baseOpnd->m_sym,
  15820. baseValue->GetValueNumber());
  15821. it.IsValid();
  15822. it.MoveNext())
  15823. {
  15824. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  15825. {
  15826. // Remove information that shouldn't be there anymore, from the value info
  15827. valueInfoToHoist =
  15828. valueInfoToHoist->AsArrayValueInfo()->Copy(
  15829. alloc,
  15830. !removeHeadSegment,
  15831. !removeHeadSegmentLength,
  15832. !removeLength);
  15833. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  15834. }
  15835. BasicBlock *const block = it.Block();
  15836. Value *const blockBaseValue = it.InvariantSymValue();
  15837. HoistInvariantValueInfo(valueInfoToHoist, blockBaseValue, block);
  15838. // See if we have completed hoisting value info for one of the items
  15839. if(hoistChecksOutOfLoop && block == hoistChecksOutOfLoop->landingPad)
  15840. {
  15841. // All other items depend on array checks, so we can just stop here
  15842. hoistChecksOutOfLoop = nullptr;
  15843. break;
  15844. }
  15845. if(hoistHeadSegmentLoadOutOfLoop && block == hoistHeadSegmentLoadOutOfLoop->landingPad)
  15846. {
  15847. hoistHeadSegmentLoadOutOfLoop = nullptr;
  15848. if(--hoistItemCount == 0)
  15849. break;
  15850. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentSym())
  15851. removeHeadSegment = true;
  15852. }
  15853. if(hoistHeadSegmentLengthLoadOutOfLoop && block == hoistHeadSegmentLengthLoadOutOfLoop->landingPad)
  15854. {
  15855. hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  15856. if(--hoistItemCount == 0)
  15857. break;
  15858. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentLengthSym())
  15859. removeHeadSegmentLength = true;
  15860. }
  15861. if(hoistLengthLoadOutOfLoop && block == hoistLengthLoadOutOfLoop->landingPad)
  15862. {
  15863. hoistLengthLoadOutOfLoop = nullptr;
  15864. if(--hoistItemCount == 0)
  15865. break;
  15866. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->LengthSym())
  15867. removeLength = true;
  15868. }
  15869. }
  15870. }
  15871. }
  15872. }
  15873. IR::ArrayRegOpnd *baseArrayOpnd;
  15874. if(baseArrayValueInfo)
  15875. {
  15876. // Update the opnd to include the associated syms
  15877. baseArrayOpnd =
  15878. baseArrayValueInfo->CreateOpnd(
  15879. baseOpnd,
  15880. needsHeadSegment,
  15881. needsHeadSegmentLength || (!isLikelyJsArray && needsLength),
  15882. needsLength,
  15883. eliminatedLowerBoundCheck,
  15884. eliminatedUpperBoundCheck,
  15885. instr->m_func);
  15886. if(baseOwnerInstr)
  15887. {
  15888. Assert(baseOwnerInstr->GetSrc1() == baseOpnd);
  15889. baseOwnerInstr->ReplaceSrc1(baseArrayOpnd);
  15890. }
  15891. else
  15892. {
  15893. Assert(baseOwnerIndir);
  15894. Assert(baseOwnerIndir->GetBaseOpnd() == baseOpnd);
  15895. baseOwnerIndir->ReplaceBaseOpnd(baseArrayOpnd);
  15896. }
  15897. baseOpnd = baseArrayOpnd;
  15898. }
  15899. else
  15900. {
  15901. baseArrayOpnd = nullptr;
  15902. }
  15903. if(isLikelyJsArray)
  15904. {
  15905. // Insert an instruction to indicate to the dead-store pass that implicit calls need to be kept disabled until this
  15906. // instruction. Operations other than LdElem and StElem don't benefit much from arrays having no missing values, so
  15907. // no need to ensure that the array still has no missing values. For a particular array, if none of the accesses
  15908. // benefit much from the no-missing-values information, it may be beneficial to avoid checking for no missing
  15909. // values, especially in the case for a single array access, where the cost of the check could be relatively
  15910. // significant. An StElem has to do additional checks in the common path if the array may have missing values, and
  15911. // a StElem that operates on an array that has no missing values is more likely to keep the no-missing-values info
  15912. // on the array more precise, so it still benefits a little from the no-missing-values info.
  15913. CaptureNoImplicitCallUses(baseOpnd, isLoad || isStore);
  15914. }
  15915. else if(baseArrayOpnd && baseArrayOpnd->HeadSegmentLengthSym())
  15916. {
  15917. // A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the typed
  15918. // array's length is set to zero. Insert an instruction to indicate to the dead-store pass that implicit calls need to
  15919. // be disabled until this instruction.
  15920. IR::RegOpnd *const headSegmentLengthOpnd =
  15921. IR::RegOpnd::New(
  15922. baseArrayOpnd->HeadSegmentLengthSym(),
  15923. baseArrayOpnd->HeadSegmentLengthSym()->GetType(),
  15924. instr->m_func);
  15925. const IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd(headSegmentLengthOpnd, instr->m_func);
  15926. CaptureNoImplicitCallUses(headSegmentLengthOpnd, false);
  15927. }
  15928. const auto OnEliminated = [&](const Js::Phase phase, const char *const eliminatedLoad)
  15929. {
  15930. TRACE_TESTTRACE_PHASE_INSTR(phase, instr, _u("Eliminating array %S\n"), eliminatedLoad);
  15931. };
  15932. OnEliminated(Js::Phase::ArrayCheckHoistPhase, "checks");
  15933. if(baseArrayOpnd)
  15934. {
  15935. if(baseArrayOpnd->HeadSegmentSym())
  15936. {
  15937. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment load");
  15938. }
  15939. if(baseArrayOpnd->HeadSegmentLengthSym())
  15940. {
  15941. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment length load");
  15942. }
  15943. if(baseArrayOpnd->LengthSym())
  15944. {
  15945. OnEliminated(Js::Phase::ArrayLengthHoistPhase, "length load");
  15946. }
  15947. if(baseArrayOpnd->EliminatedLowerBoundCheck())
  15948. {
  15949. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "lower bound check");
  15950. }
  15951. if(baseArrayOpnd->EliminatedUpperBoundCheck())
  15952. {
  15953. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "upper bound check");
  15954. }
  15955. }
  15956. if(!canBailOutOnArrayAccessHelperCall)
  15957. {
  15958. return;
  15959. }
  15960. // Bail out instead of generating a helper call. This helps to remove the array reference when the head segment and head
  15961. // segment length are available, reduces code size, and allows bound checks to be separated.
  15962. if(instr->HasBailOutInfo())
  15963. {
  15964. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  15965. Assert(
  15966. !(bailOutKind & ~IR::BailOutKindBits) ||
  15967. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  15968. instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
  15969. }
  15970. else
  15971. {
  15972. GenerateBailAtOperation(&instr, IR::BailOutOnArrayAccessHelperCall);
  15973. }
  15974. }
  15975. void
  15976. GlobOpt::CaptureNoImplicitCallUses(
  15977. IR::Opnd *opnd,
  15978. const bool usesNoMissingValuesInfo,
  15979. IR::Instr *const includeCurrentInstr)
  15980. {
  15981. Assert(!IsLoopPrePass());
  15982. Assert(noImplicitCallUsesToInsert);
  15983. Assert(opnd);
  15984. // The opnd may be deleted later, so make a copy to ensure it is alive for inserting NoImplicitCallUses later
  15985. opnd = opnd->Copy(func);
  15986. if(!usesNoMissingValuesInfo)
  15987. {
  15988. const ValueType valueType(opnd->GetValueType());
  15989. if(valueType.IsArrayOrObjectWithArray() && valueType.HasNoMissingValues())
  15990. {
  15991. // Inserting NoImplicitCallUses for an opnd with a definitely-array-with-no-missing-values value type means that the
  15992. // instruction following it uses the information that the array has no missing values in some way, for instance, it
  15993. // may omit missing value checks. Based on that, the dead-store phase in turn ensures that the necessary bailouts
  15994. // are inserted to ensure that the array still has no missing values until the following instruction. Since
  15995. // 'usesNoMissingValuesInfo' is false, change the value type to indicate to the dead-store phase that the following
  15996. // instruction does not use the no-missing-values information.
  15997. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  15998. }
  15999. }
  16000. if(includeCurrentInstr)
  16001. {
  16002. IR::Instr *const noImplicitCallUses =
  16003. IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, includeCurrentInstr->m_func);
  16004. noImplicitCallUses->SetSrc1(opnd);
  16005. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  16006. includeCurrentInstr->InsertAfter(noImplicitCallUses);
  16007. return;
  16008. }
  16009. noImplicitCallUsesToInsert->Add(opnd);
  16010. }
  16011. void
  16012. GlobOpt::InsertNoImplicitCallUses(IR::Instr *const instr)
  16013. {
  16014. Assert(noImplicitCallUsesToInsert);
  16015. const int n = noImplicitCallUsesToInsert->Count();
  16016. if(n == 0)
  16017. {
  16018. return;
  16019. }
  16020. IR::Instr *const insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  16021. for(int i = 0; i < n;)
  16022. {
  16023. IR::Instr *const noImplicitCallUses = IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, instr->m_func);
  16024. noImplicitCallUses->SetSrc1(noImplicitCallUsesToInsert->Item(i));
  16025. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  16026. ++i;
  16027. if(i < n)
  16028. {
  16029. noImplicitCallUses->SetSrc2(noImplicitCallUsesToInsert->Item(i));
  16030. noImplicitCallUses->GetSrc2()->SetIsJITOptimizedReg(true);
  16031. ++i;
  16032. }
  16033. noImplicitCallUses->SetByteCodeOffset(instr);
  16034. insertBeforeInstr->InsertBefore(noImplicitCallUses);
  16035. }
  16036. noImplicitCallUsesToInsert->Clear();
  16037. }
  16038. void
  16039. GlobOpt::PrepareLoopArrayCheckHoist()
  16040. {
  16041. if(IsLoopPrePass() || !currentBlock->loop || !currentBlock->isLoopHeader || !currentBlock->loop->parent)
  16042. {
  16043. return;
  16044. }
  16045. if(currentBlock->loop->parent->needImplicitCallBailoutChecksForJsArrayCheckHoist)
  16046. {
  16047. // If the parent loop is an array check elimination candidate, so is the current loop. Even though the current loop may
  16048. // not have array accesses, if the parent loop hoists array checks, the current loop also needs implicit call checks.
  16049. currentBlock->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  16050. }
  16051. }
  16052. JsArrayKills
  16053. GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
  16054. {
  16055. Assert(instr);
  16056. JsArrayKills kills;
  16057. if(instr->UsesAllFields())
  16058. {
  16059. // Calls can (but are unlikely to) change a javascript array into an ES5 array, which may have different behavior for
  16060. // index properties.
  16061. kills.SetKillsAllArrays();
  16062. return kills;
  16063. }
  16064. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  16065. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  16066. const bool doArraySegmentHoist = DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array));
  16067. Assert(doArraySegmentHoist == DoArraySegmentHoist(ValueType::GetObject(ObjectType::ObjectWithArray)));
  16068. const bool doArrayLengthHoist = DoArrayLengthHoist();
  16069. if(!doArrayMissingValueCheckHoist && !doNativeArrayTypeSpec && !doArraySegmentHoist && !doArrayLengthHoist)
  16070. {
  16071. return kills;
  16072. }
  16073. // The following operations may create missing values in an array in an unlikely circumstance. Even though they don't kill
  16074. // the fact that the 'this' parameter is an array (when implicit calls are disabled), we don't have a way to say the value
  16075. // type is definitely array but it likely has no missing values. So, these will kill the definite value type as well, making
  16076. // it likely array, such that the array checks will have to be redone.
  16077. const bool useValueTypes = !IsLoopPrePass(); // Source value types are not guaranteed to be correct in a loop prepass
  16078. switch(instr->m_opcode)
  16079. {
  16080. case Js::OpCode::StElemI_A:
  16081. case Js::OpCode::StElemI_A_Strict:
  16082. {
  16083. Assert(instr->GetDst());
  16084. if(!instr->GetDst()->IsIndirOpnd())
  16085. {
  16086. break;
  16087. }
  16088. const ValueType baseValueType =
  16089. useValueTypes ? instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType() : ValueType::Uninitialized;
  16090. if(useValueTypes && baseValueType.IsNotArrayOrObjectWithArray())
  16091. {
  16092. break;
  16093. }
  16094. if(instr->IsProfiledInstr())
  16095. {
  16096. const Js::StElemInfo *const stElemInfo = instr->AsProfiledInstr()->u.stElemInfo;
  16097. if(doArraySegmentHoist && stElemInfo->LikelyStoresOutsideHeadSegmentBounds())
  16098. {
  16099. kills.SetKillsArrayHeadSegments();
  16100. kills.SetKillsArrayHeadSegmentLengths();
  16101. }
  16102. if(doArrayLengthHoist &&
  16103. !(useValueTypes && baseValueType.IsNotArray()) &&
  16104. stElemInfo->LikelyStoresOutsideArrayBounds())
  16105. {
  16106. kills.SetKillsArrayLengths();
  16107. }
  16108. }
  16109. break;
  16110. }
  16111. case Js::OpCode::DeleteElemI_A:
  16112. case Js::OpCode::DeleteElemIStrict_A:
  16113. Assert(instr->GetSrc1());
  16114. if(!instr->GetSrc1()->IsIndirOpnd() ||
  16115. (useValueTypes && instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsNotArrayOrObjectWithArray()))
  16116. {
  16117. break;
  16118. }
  16119. if(doArrayMissingValueCheckHoist)
  16120. {
  16121. kills.SetKillsArraysWithNoMissingValues();
  16122. }
  16123. if(doArraySegmentHoist)
  16124. {
  16125. kills.SetKillsArrayHeadSegmentLengths();
  16126. }
  16127. break;
  16128. case Js::OpCode::StFld:
  16129. case Js::OpCode::StFldStrict:
  16130. {
  16131. Assert(instr->GetDst());
  16132. if(!doArraySegmentHoist && !doArrayLengthHoist)
  16133. {
  16134. break;
  16135. }
  16136. IR::SymOpnd *const symDst = instr->GetDst()->AsSymOpnd();
  16137. if(!symDst->IsPropertySymOpnd())
  16138. {
  16139. break;
  16140. }
  16141. IR::PropertySymOpnd *const dst = symDst->AsPropertySymOpnd();
  16142. if(dst->m_sym->AsPropertySym()->m_propertyId != Js::PropertyIds::length)
  16143. {
  16144. break;
  16145. }
  16146. if(useValueTypes && dst->GetPropertyOwnerValueType().IsNotArray())
  16147. {
  16148. // Setting the 'length' property of an object that is not an array, even if it has an internal array, does
  16149. // not kill the head segment or head segment length of any arrays.
  16150. break;
  16151. }
  16152. if(doArraySegmentHoist)
  16153. {
  16154. kills.SetKillsArrayHeadSegmentLengths();
  16155. }
  16156. if(doArrayLengthHoist)
  16157. {
  16158. kills.SetKillsArrayLengths();
  16159. }
  16160. break;
  16161. }
  16162. case Js::OpCode::InlineArrayPush:
  16163. {
  16164. Assert(instr->GetSrc2());
  16165. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  16166. Assert(arrayOpnd);
  16167. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16168. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16169. {
  16170. break;
  16171. }
  16172. if(doArrayMissingValueCheckHoist)
  16173. {
  16174. kills.SetKillsArraysWithNoMissingValues();
  16175. }
  16176. if(doArraySegmentHoist)
  16177. {
  16178. kills.SetKillsArrayHeadSegments();
  16179. kills.SetKillsArrayHeadSegmentLengths();
  16180. }
  16181. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16182. {
  16183. kills.SetKillsArrayLengths();
  16184. }
  16185. // Don't kill NativeArray, if there is no mismatch between array's type and element's type.
  16186. if(doNativeArrayTypeSpec &&
  16187. !(useValueTypes && arrayValueType.IsNativeArray() &&
  16188. ((arrayValueType.IsLikelyNativeIntArray() && instr->GetSrc2()->IsInt32()) ||
  16189. (arrayValueType.IsLikelyNativeFloatArray() && instr->GetSrc2()->IsFloat()))
  16190. ) &&
  16191. !(useValueTypes && arrayValueType.IsNotNativeArray()))
  16192. {
  16193. kills.SetKillsNativeArrays();
  16194. }
  16195. break;
  16196. }
  16197. case Js::OpCode::InlineArrayPop:
  16198. {
  16199. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  16200. Assert(arrayOpnd);
  16201. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16202. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16203. {
  16204. break;
  16205. }
  16206. if(doArraySegmentHoist)
  16207. {
  16208. kills.SetKillsArrayHeadSegmentLengths();
  16209. }
  16210. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16211. {
  16212. kills.SetKillsArrayLengths();
  16213. }
  16214. break;
  16215. }
  16216. case Js::OpCode::CallDirect:
  16217. {
  16218. Assert(instr->GetSrc1());
  16219. // Find the 'this' parameter and check if it's possible for it to be an array
  16220. IR::Opnd *const arrayOpnd = instr->FindCallArgumentOpnd(1);
  16221. Assert(arrayOpnd);
  16222. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16223. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16224. {
  16225. break;
  16226. }
  16227. const IR::JnHelperMethod helperMethod = instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  16228. if(doArrayMissingValueCheckHoist)
  16229. {
  16230. switch(helperMethod)
  16231. {
  16232. case IR::HelperArray_Reverse:
  16233. case IR::HelperArray_Shift:
  16234. case IR::HelperArray_Splice:
  16235. case IR::HelperArray_Unshift:
  16236. kills.SetKillsArraysWithNoMissingValues();
  16237. break;
  16238. }
  16239. }
  16240. if(doArraySegmentHoist)
  16241. {
  16242. switch(helperMethod)
  16243. {
  16244. case IR::HelperArray_Reverse:
  16245. case IR::HelperArray_Shift:
  16246. case IR::HelperArray_Splice:
  16247. case IR::HelperArray_Unshift:
  16248. kills.SetKillsArrayHeadSegments();
  16249. kills.SetKillsArrayHeadSegmentLengths();
  16250. break;
  16251. }
  16252. }
  16253. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16254. {
  16255. switch(helperMethod)
  16256. {
  16257. case IR::HelperArray_Shift:
  16258. case IR::HelperArray_Splice:
  16259. case IR::HelperArray_Unshift:
  16260. kills.SetKillsArrayLengths();
  16261. break;
  16262. }
  16263. }
  16264. if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
  16265. {
  16266. switch(helperMethod)
  16267. {
  16268. case IR::HelperArray_Reverse:
  16269. case IR::HelperArray_Shift:
  16270. case IR::HelperArray_Slice:
  16271. // Currently not inlined.
  16272. //case IR::HelperArray_Sort:
  16273. case IR::HelperArray_Splice:
  16274. case IR::HelperArray_Unshift:
  16275. kills.SetKillsNativeArrays();
  16276. break;
  16277. }
  16278. }
  16279. break;
  16280. }
  16281. }
  16282. return kills;
  16283. }
  16284. bool
  16285. GlobOpt::IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr)
  16286. {
  16287. // StElem is profiled with information indicating whether it will likely create a missing value in the array. In that case,
  16288. // we prefer to kill the no-missing-values information in the value so that we don't bail out in a likely circumstance.
  16289. return
  16290. (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  16291. DoArrayMissingValueCheckHoist() &&
  16292. instr->IsProfiledInstr() &&
  16293. instr->AsProfiledInstr()->u.stElemInfo->LikelyCreatesMissingValue();
  16294. }
  16295. bool
  16296. GlobOpt::NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock *const block, const bool isForwardPass) const
  16297. {
  16298. Assert(block);
  16299. return isForwardPass && block->loop && block->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist;
  16300. }
  16301. bool
  16302. GlobOpt::PrepareForIgnoringIntOverflow(IR::Instr *const instr)
  16303. {
  16304. Assert(instr);
  16305. const bool isBoundary = instr->m_opcode == Js::OpCode::NoIntOverflowBoundary;
  16306. // Update the instruction's "int overflow matters" flag based on whether we are currently allowing ignoring int overflows.
  16307. // Some operations convert their srcs to int32s, those can still ignore int overflow.
  16308. if(instr->ignoreIntOverflowInRange)
  16309. {
  16310. instr->ignoreIntOverflowInRange = !intOverflowCurrentlyMattersInRange || OpCodeAttr::IsInt32(instr->m_opcode);
  16311. }
  16312. if(!intOverflowDoesNotMatterRange)
  16313. {
  16314. Assert(intOverflowCurrentlyMattersInRange);
  16315. // There are no more ranges of instructions where int overflow does not matter, in this block.
  16316. return isBoundary;
  16317. }
  16318. if(instr == intOverflowDoesNotMatterRange->LastInstr())
  16319. {
  16320. Assert(isBoundary);
  16321. // Reached the last instruction in the range
  16322. intOverflowCurrentlyMattersInRange = true;
  16323. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  16324. return isBoundary;
  16325. }
  16326. if(!intOverflowCurrentlyMattersInRange)
  16327. {
  16328. return isBoundary;
  16329. }
  16330. if(instr != intOverflowDoesNotMatterRange->FirstInstr())
  16331. {
  16332. // Have not reached the next range
  16333. return isBoundary;
  16334. }
  16335. Assert(isBoundary);
  16336. // This is the first instruction in a range of instructions where int overflow does not matter. There can be many inputs to
  16337. // instructions in the range, some of which are inputs to the range itself (that is, the values are not defined in the
  16338. // range). Ignoring int overflow is only valid for int operations, so we need to ensure that all inputs to the range are
  16339. // int (not "likely int") before ignoring any overflows in the range. Ensuring that a sym with a "likely int" value is an
  16340. // int requires a bail-out. These bail-out check need to happen before any overflows are ignored, otherwise it's too late.
  16341. // The backward pass tracked all inputs into the range. Iterate over them and verify the values, and insert lossless
  16342. // conversions to int as necessary, before the first instruction in the range. If for any reason all values cannot be
  16343. // guaranteed to be ints, the optimization will be disabled for this range.
  16344. intOverflowCurrentlyMattersInRange = false;
  16345. {
  16346. BVSparse<JitArenaAllocator> tempBv1(tempAlloc);
  16347. BVSparse<JitArenaAllocator> tempBv2(tempAlloc);
  16348. {
  16349. // Just renaming the temp BVs for this section to indicate how they're used so that it makes sense
  16350. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  16351. BVSparse<JitArenaAllocator> &symsToInclude = tempBv2;
  16352. #if DBG_DUMP
  16353. SymID couldNotConvertSymId = 0;
  16354. #endif
  16355. FOREACH_BITSET_IN_SPARSEBV(id, intOverflowDoesNotMatterRange->SymsRequiredToBeInt())
  16356. {
  16357. Sym *const sym = func->m_symTable->Find(id);
  16358. Assert(sym);
  16359. // Some instructions with property syms are also tracked by the backward pass, and may be included in the range
  16360. // (LdSlot for instance). These property syms don't get their values until either copy-prop resolves a value for
  16361. // them, or a new value is created once the use of the property sym is reached. In either case, we're not that
  16362. // far yet, so we need to find the future value of the property sym by evaluating copy-prop in reverse.
  16363. Value *const value = sym->IsStackSym() ? FindValue(sym) : FindFuturePropertyValue(sym->AsPropertySym());
  16364. if(!value)
  16365. {
  16366. #if DBG_DUMP
  16367. couldNotConvertSymId = id;
  16368. #endif
  16369. intOverflowCurrentlyMattersInRange = true;
  16370. BREAK_BITSET_IN_SPARSEBV;
  16371. }
  16372. const bool isInt32OrUInt32Float =
  16373. value->GetValueInfo()->IsFloatConstant() &&
  16374. Js::JavascriptNumber::IsInt32OrUInt32(value->GetValueInfo()->AsFloatConstant()->FloatValue());
  16375. if(value->GetValueInfo()->IsInt() || isInt32OrUInt32Float)
  16376. {
  16377. if(!IsLoopPrePass())
  16378. {
  16379. // Input values that are already int can be excluded from int-specialization. We can treat unsigned
  16380. // int32 values as int32 values (ignoring the overflow), since the values will only be used inside the
  16381. // range where overflow does not matter.
  16382. symsToExclude.Set(sym->m_id);
  16383. }
  16384. continue;
  16385. }
  16386. if(!DoAggressiveIntTypeSpec() || !value->GetValueInfo()->IsLikelyInt())
  16387. {
  16388. // When aggressive int specialization is off, syms with "likely int" values cannot be forced to int since
  16389. // int bail-out checks are not allowed in that mode. Similarly, with aggressive int specialization on, it
  16390. // wouldn't make sense to force non-"likely int" values to int since it would almost guarantee a bail-out at
  16391. // runtime. In both cases, just disable ignoring overflow for this range.
  16392. #if DBG_DUMP
  16393. couldNotConvertSymId = id;
  16394. #endif
  16395. intOverflowCurrentlyMattersInRange = true;
  16396. BREAK_BITSET_IN_SPARSEBV;
  16397. }
  16398. if(IsLoopPrePass())
  16399. {
  16400. // The loop prepass does not modify bit-vectors. Since it doesn't add bail-out checks, it also does not need
  16401. // to specialize anything up-front. It only needs to be consistent in how it determines whether to allow
  16402. // ignoring overflow for a range, based on the values of inputs into the range.
  16403. continue;
  16404. }
  16405. // Since input syms are tracked in the backward pass, where there is no value tracking, it will not be aware of
  16406. // copy-prop. If a copy-prop sym is available, it will be used instead, so exclude the original sym and include
  16407. // the copy-prop sym for specialization.
  16408. StackSym *const copyPropSym = GetCopyPropSym(sym, value);
  16409. if(copyPropSym)
  16410. {
  16411. symsToExclude.Set(sym->m_id);
  16412. Assert(!symsToExclude.Test(copyPropSym->m_id));
  16413. const bool needsToBeLossless =
  16414. !intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(sym->m_id);
  16415. if(intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(copyPropSym->m_id) ||
  16416. symsToInclude.TestAndSet(copyPropSym->m_id))
  16417. {
  16418. // The copy-prop sym is already included
  16419. if(needsToBeLossless)
  16420. {
  16421. // The original sym needs to be lossless, so make the copy-prop sym lossless as well.
  16422. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(copyPropSym->m_id);
  16423. }
  16424. }
  16425. else if(!needsToBeLossless)
  16426. {
  16427. // The copy-prop sym was not included before, and the original sym can be lossy, so make it lossy.
  16428. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(copyPropSym->m_id);
  16429. }
  16430. }
  16431. else if(!sym->IsStackSym())
  16432. {
  16433. // Only stack syms can be converted to int, and copy-prop syms are stack syms. If a copy-prop sym was not
  16434. // found for the property sym, we can't ignore overflows in this range.
  16435. #if DBG_DUMP
  16436. couldNotConvertSymId = id;
  16437. #endif
  16438. intOverflowCurrentlyMattersInRange = true;
  16439. BREAK_BITSET_IN_SPARSEBV;
  16440. }
  16441. } NEXT_BITSET_IN_SPARSEBV;
  16442. if(intOverflowCurrentlyMattersInRange)
  16443. {
  16444. #if DBG_DUMP
  16445. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func) && !IsLoopPrePass())
  16446. {
  16447. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16448. Output::Print(
  16449. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u, Disabled ignoring overflows\n"),
  16450. func->GetJITFunctionBody()->GetDisplayName(),
  16451. func->GetDebugNumberSet(debugStringBuffer),
  16452. Js::PhaseNames[Js::ForwardPhase],
  16453. currentBlock->GetBlockNum());
  16454. Output::Print(_u(" Input sym could not be turned into an int: %u\n"), couldNotConvertSymId);
  16455. Output::Print(_u(" First instr: "));
  16456. instr->m_next->Dump();
  16457. Output::Flush();
  16458. }
  16459. #endif
  16460. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  16461. return isBoundary;
  16462. }
  16463. if(IsLoopPrePass())
  16464. {
  16465. return isBoundary;
  16466. }
  16467. // Update the syms to specialize after enumeration
  16468. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  16469. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  16470. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Or(&symsToInclude);
  16471. }
  16472. {
  16473. // Exclude syms that are already live as lossless int32, and exclude lossy conversions of syms that are already live
  16474. // as lossy int32.
  16475. // symsToExclude = liveInt32Syms - liveLossyInt32Syms // syms live as lossless int
  16476. // lossySymsToExclude = symsRequiredToBeLossyInt & liveLossyInt32Syms; // syms we want as lossy int that are already live as lossy int
  16477. // symsToExclude |= lossySymsToExclude
  16478. // symsRequiredToBeInt -= symsToExclude
  16479. // symsRequiredToBeLossyInt -= symsToExclude
  16480. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  16481. BVSparse<JitArenaAllocator> &lossySymsToExclude = tempBv2;
  16482. symsToExclude.Minus(currentBlock->globOptData.liveInt32Syms, currentBlock->globOptData.liveLossyInt32Syms);
  16483. lossySymsToExclude.And(
  16484. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(),
  16485. currentBlock->globOptData.liveLossyInt32Syms);
  16486. symsToExclude.Or(&lossySymsToExclude);
  16487. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  16488. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  16489. }
  16490. #if DBG
  16491. {
  16492. // Verify that the syms to be converted are live
  16493. // liveSyms = liveInt32Syms | liveFloat64Syms | liveVarSyms
  16494. // deadSymsRequiredToBeInt = symsRequiredToBeInt - liveSyms
  16495. BVSparse<JitArenaAllocator> &liveSyms = tempBv1;
  16496. BVSparse<JitArenaAllocator> &deadSymsRequiredToBeInt = tempBv2;
  16497. liveSyms.Or(currentBlock->globOptData.liveInt32Syms, currentBlock->globOptData.liveFloat64Syms);
  16498. liveSyms.Or(currentBlock->globOptData.liveVarSyms);
  16499. deadSymsRequiredToBeInt.Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), &liveSyms);
  16500. Assert(deadSymsRequiredToBeInt.IsEmpty());
  16501. }
  16502. #endif
  16503. }
  16504. // Int-specialize the syms before the first instruction of the range (the current instruction)
  16505. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt());
  16506. #if DBG_DUMP
  16507. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  16508. {
  16509. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16510. Output::Print(
  16511. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  16512. func->GetJITFunctionBody()->GetDisplayName(),
  16513. func->GetDebugNumberSet(debugStringBuffer),
  16514. Js::PhaseNames[Js::ForwardPhase],
  16515. currentBlock->GetBlockNum());
  16516. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  16517. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Dump();
  16518. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  16519. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  16520. Output::Print(_u(" First instr: "));
  16521. instr->m_next->Dump();
  16522. Output::Flush();
  16523. }
  16524. #endif
  16525. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), currentBlock, false /* lossy */, instr);
  16526. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(), currentBlock, true /* lossy */, instr);
  16527. return isBoundary;
  16528. }
  16529. void
  16530. GlobOpt::VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr)
  16531. {
  16532. if(intOverflowCurrentlyMattersInRange || IsLoopPrePass())
  16533. {
  16534. return;
  16535. }
  16536. Assert(instr->m_opcode != Js::OpCode::Mul_I4 ||
  16537. (instr->m_opcode == Js::OpCode::Mul_I4 && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow() ));
  16538. // Instructions that are marked as "overflow doesn't matter" in the range must guarantee that they operate on int values and
  16539. // result in int values, for ignoring overflow to be valid. So, int-specialization is required for such instructions in the
  16540. // range. Ld_A is an exception because it only specializes if the src sym is available as a required specialized sym, and it
  16541. // doesn't generate bailouts or cause ignoring int overflow to be invalid.
  16542. // MULs are allowed to start a region and have BailOutInfo since they will bailout on non-32 bit overflow.
  16543. if(instr->m_opcode == Js::OpCode::Ld_A ||
  16544. ((!instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::Mul_I4) &&
  16545. (!instr->GetDst() || instr->GetDst()->IsInt32()) &&
  16546. (!instr->GetSrc1() || instr->GetSrc1()->IsInt32()) &&
  16547. (!instr->GetSrc2() || instr->GetSrc2()->IsInt32())))
  16548. {
  16549. return;
  16550. }
  16551. if (!instr->HasBailOutInfo() && !instr->HasAnySideEffects())
  16552. {
  16553. return;
  16554. }
  16555. // This can happen for Neg_A if it needs to bail out on negative zero, and perhaps other cases as well. It's too late to fix
  16556. // the problem (overflows may already be ignored), so handle it by bailing out at compile-time and disabling tracking int
  16557. // overflow.
  16558. Assert(!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled());
  16559. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  16560. {
  16561. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16562. Output::Print(
  16563. _u("BailOut (compile-time): function: %s (%s) instr: "),
  16564. func->GetJITFunctionBody()->GetDisplayName(),
  16565. func->GetDebugNumberSet(debugStringBuffer));
  16566. #if DBG_DUMP
  16567. instr->Dump();
  16568. #else
  16569. Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  16570. #endif
  16571. Output::Print(_u("(overflow does not matter but could not int-spec or needed bailout)\n"));
  16572. Output::Flush();
  16573. }
  16574. if(func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled())
  16575. {
  16576. // Tracking int overflows is already off for some reason. Prevent trying to rejit again because it won't help and the
  16577. // same thing will happen again and cause an infinite loop. Just abort jitting this function.
  16578. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  16579. {
  16580. Output::Print(_u(" Aborting JIT because TrackIntOverflow is already off\n"));
  16581. Output::Flush();
  16582. }
  16583. throw Js::OperationAbortedException();
  16584. }
  16585. throw Js::RejitException(RejitReason::TrackIntOverflowDisabled);
  16586. }
  16587. // It makes lowering easier if it can assume that the first src is never a constant,
  16588. // at least for commutative operators. For non-commutative, just hoist the constant.
  16589. void
  16590. GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val)
  16591. {
  16592. IR::Opnd *dst = instr->GetDst();
  16593. IR::Opnd *src1 = instr->GetSrc1();
  16594. IR::Opnd *src2 = instr->GetSrc2();
  16595. if (src1->IsImmediateOpnd())
  16596. {
  16597. // Swap for dst, src
  16598. }
  16599. else if (src2 && dst && src2->IsRegOpnd())
  16600. {
  16601. if (src2->GetIsDead() && !src1->GetIsDead() && !src1->IsEqual(dst))
  16602. {
  16603. // Swap if src2 is dead, as the reg can be reuse for the dst for opEqs like on x86 (ADD r1, r2)
  16604. }
  16605. else if (src2->IsEqual(dst))
  16606. {
  16607. // Helps lowering of opEqs
  16608. }
  16609. else
  16610. {
  16611. return;
  16612. }
  16613. // Make sure we don't swap 2 srcs with valueOf calls.
  16614. if (OpCodeAttr::OpndHasImplicitCall(instr->m_opcode))
  16615. {
  16616. if (instr->IsBranchInstr())
  16617. {
  16618. if (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive())
  16619. {
  16620. return;
  16621. }
  16622. }
  16623. else if (!src1->GetValueType().IsPrimitive() && !src2->GetValueType().IsPrimitive())
  16624. {
  16625. return;
  16626. }
  16627. }
  16628. }
  16629. else
  16630. {
  16631. return;
  16632. }
  16633. Js::OpCode opcode = instr->m_opcode;
  16634. switch (opcode)
  16635. {
  16636. case Js::OpCode::And_A:
  16637. case Js::OpCode::Mul_A:
  16638. case Js::OpCode::Or_A:
  16639. case Js::OpCode::Xor_A:
  16640. case Js::OpCode::And_I4:
  16641. case Js::OpCode::Mul_I4:
  16642. case Js::OpCode::Or_I4:
  16643. case Js::OpCode::Xor_I4:
  16644. case Js::OpCode::Add_I4:
  16645. case Js::OpCode::Add_Ptr:
  16646. swap_srcs:
  16647. if (!instr->GetSrc2()->IsImmediateOpnd())
  16648. {
  16649. instr->m_opcode = opcode;
  16650. src1 = instr->UnlinkSrc1();
  16651. src2 = instr->UnlinkSrc2();
  16652. instr->SetSrc1(src2);
  16653. instr->SetSrc2(src1);
  16654. Value *tempVal = *pSrc1Val;
  16655. *pSrc1Val = *pSrc2Val;
  16656. *pSrc2Val = tempVal;
  16657. return;
  16658. }
  16659. break;
  16660. case Js::OpCode::BrSrEq_A:
  16661. case Js::OpCode::BrSrNotNeq_A:
  16662. case Js::OpCode::BrEq_I4:
  16663. goto swap_srcs;
  16664. case Js::OpCode::BrSrNeq_A:
  16665. case Js::OpCode::BrNeq_A:
  16666. case Js::OpCode::BrSrNotEq_A:
  16667. case Js::OpCode::BrNotEq_A:
  16668. case Js::OpCode::BrNeq_I4:
  16669. goto swap_srcs;
  16670. case Js::OpCode::BrGe_A:
  16671. opcode = Js::OpCode::BrLe_A;
  16672. goto swap_srcs;
  16673. case Js::OpCode::BrNotGe_A:
  16674. opcode = Js::OpCode::BrNotLe_A;
  16675. goto swap_srcs;
  16676. case Js::OpCode::BrGe_I4:
  16677. opcode = Js::OpCode::BrLe_I4;
  16678. goto swap_srcs;
  16679. case Js::OpCode::BrGt_A:
  16680. opcode = Js::OpCode::BrLt_A;
  16681. goto swap_srcs;
  16682. case Js::OpCode::BrNotGt_A:
  16683. opcode = Js::OpCode::BrNotLt_A;
  16684. goto swap_srcs;
  16685. case Js::OpCode::BrGt_I4:
  16686. opcode = Js::OpCode::BrLt_I4;
  16687. goto swap_srcs;
  16688. case Js::OpCode::BrLe_A:
  16689. opcode = Js::OpCode::BrGe_A;
  16690. goto swap_srcs;
  16691. case Js::OpCode::BrNotLe_A:
  16692. opcode = Js::OpCode::BrNotGe_A;
  16693. goto swap_srcs;
  16694. case Js::OpCode::BrLe_I4:
  16695. opcode = Js::OpCode::BrGe_I4;
  16696. goto swap_srcs;
  16697. case Js::OpCode::BrLt_A:
  16698. opcode = Js::OpCode::BrGt_A;
  16699. goto swap_srcs;
  16700. case Js::OpCode::BrNotLt_A:
  16701. opcode = Js::OpCode::BrNotGt_A;
  16702. goto swap_srcs;
  16703. case Js::OpCode::BrLt_I4:
  16704. opcode = Js::OpCode::BrGt_I4;
  16705. goto swap_srcs;
  16706. case Js::OpCode::BrEq_A:
  16707. case Js::OpCode::BrNotNeq_A:
  16708. case Js::OpCode::CmEq_A:
  16709. case Js::OpCode::CmNeq_A:
  16710. // this == "" not the same as "" == this...
  16711. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16712. {
  16713. return;
  16714. }
  16715. goto swap_srcs;
  16716. case Js::OpCode::CmGe_A:
  16717. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16718. {
  16719. return;
  16720. }
  16721. opcode = Js::OpCode::CmLe_A;
  16722. goto swap_srcs;
  16723. case Js::OpCode::CmGt_A:
  16724. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16725. {
  16726. return;
  16727. }
  16728. opcode = Js::OpCode::CmLt_A;
  16729. goto swap_srcs;
  16730. case Js::OpCode::CmLe_A:
  16731. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16732. {
  16733. return;
  16734. }
  16735. opcode = Js::OpCode::CmGe_A;
  16736. goto swap_srcs;
  16737. case Js::OpCode::CmLt_A:
  16738. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16739. {
  16740. return;
  16741. }
  16742. opcode = Js::OpCode::CmGt_A;
  16743. goto swap_srcs;
  16744. case Js::OpCode::CallI:
  16745. case Js::OpCode::CallIFixed:
  16746. case Js::OpCode::NewScObject:
  16747. case Js::OpCode::NewScObjectSpread:
  16748. case Js::OpCode::NewScObjArray:
  16749. case Js::OpCode::NewScObjArraySpread:
  16750. case Js::OpCode::NewScObjectNoCtor:
  16751. // Don't insert load to register if the function operand is a fixed function.
  16752. if (instr->HasFixedFunctionAddressTarget())
  16753. {
  16754. return;
  16755. }
  16756. break;
  16757. // Can't do add because <32 + "Hello"> isn't equal to <"Hello" + 32>
  16758. // Lower can do the swap. Other op-codes listed below don't need immediate source hoisting, as the fast paths handle it,
  16759. // or the lowering handles the hoisting.
  16760. case Js::OpCode::Add_A:
  16761. if (src1->IsFloat())
  16762. {
  16763. goto swap_srcs;
  16764. }
  16765. return;
  16766. case Js::OpCode::Sub_I4:
  16767. case Js::OpCode::Neg_I4:
  16768. case Js::OpCode::Not_I4:
  16769. case Js::OpCode::NewScFunc:
  16770. case Js::OpCode::NewScGenFunc:
  16771. case Js::OpCode::NewScArray:
  16772. case Js::OpCode::NewScIntArray:
  16773. case Js::OpCode::NewScFltArray:
  16774. case Js::OpCode::NewScArrayWithMissingValues:
  16775. case Js::OpCode::NewRegEx:
  16776. case Js::OpCode::Ld_A:
  16777. case Js::OpCode::Ld_I4:
  16778. case Js::OpCode::FromVar:
  16779. case Js::OpCode::Conv_Prim:
  16780. case Js::OpCode::LdC_A_I4:
  16781. case Js::OpCode::LdStr:
  16782. case Js::OpCode::InitFld:
  16783. case Js::OpCode::InitRootFld:
  16784. case Js::OpCode::StartCall:
  16785. case Js::OpCode::ArgOut_A:
  16786. case Js::OpCode::ArgOut_A_Inline:
  16787. case Js::OpCode::ArgOut_A_Dynamic:
  16788. case Js::OpCode::ArgOut_A_FromStackArgs:
  16789. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  16790. case Js::OpCode::ArgOut_A_InlineSpecialized:
  16791. case Js::OpCode::ArgOut_A_SpreadArg:
  16792. case Js::OpCode::InlineeEnd:
  16793. case Js::OpCode::EndCallForPolymorphicInlinee:
  16794. case Js::OpCode::InlineeMetaArg:
  16795. case Js::OpCode::InlineBuiltInEnd:
  16796. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  16797. case Js::OpCode::CallHelper:
  16798. case Js::OpCode::LdElemUndef:
  16799. case Js::OpCode::LdElemUndefScoped:
  16800. case Js::OpCode::RuntimeTypeError:
  16801. case Js::OpCode::RuntimeReferenceError:
  16802. case Js::OpCode::Ret:
  16803. case Js::OpCode::NewScObjectSimple:
  16804. case Js::OpCode::NewScObjectLiteral:
  16805. case Js::OpCode::StFld:
  16806. case Js::OpCode::StRootFld:
  16807. case Js::OpCode::StSlot:
  16808. case Js::OpCode::StSlotChkUndecl:
  16809. case Js::OpCode::StElemC:
  16810. case Js::OpCode::StArrSegElemC:
  16811. case Js::OpCode::StElemI_A:
  16812. case Js::OpCode::StElemI_A_Strict:
  16813. case Js::OpCode::CallDirect:
  16814. case Js::OpCode::BrNotHasSideEffects:
  16815. case Js::OpCode::NewConcatStrMulti:
  16816. case Js::OpCode::NewConcatStrMultiBE:
  16817. case Js::OpCode::ExtendArg_A:
  16818. #ifdef ENABLE_DOM_FAST_PATH
  16819. case Js::OpCode::DOMFastPathGetter:
  16820. case Js::OpCode::DOMFastPathSetter:
  16821. #endif
  16822. case Js::OpCode::NewScopeSlots:
  16823. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  16824. case Js::OpCode::NewStackScopeSlots:
  16825. case Js::OpCode::IsInst:
  16826. case Js::OpCode::BailOnEqual:
  16827. case Js::OpCode::BailOnNotEqual:
  16828. case Js::OpCode::StArrViewElem:
  16829. return;
  16830. }
  16831. if (!src1->IsImmediateOpnd())
  16832. {
  16833. return;
  16834. }
  16835. // The fast paths or lowering of the remaining instructions may not support handling immediate opnds for the first src. The
  16836. // immediate src1 is hoisted here into a separate instruction.
  16837. if (src1->IsIntConstOpnd())
  16838. {
  16839. IR::Instr *newInstr = instr->HoistSrc1(Js::OpCode::Ld_I4);
  16840. ToInt32Dst(newInstr, newInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  16841. }
  16842. else
  16843. {
  16844. instr->HoistSrc1(Js::OpCode::Ld_A);
  16845. }
  16846. src1 = instr->GetSrc1();
  16847. src1->AsRegOpnd()->m_sym->SetIsConst();
  16848. }
  16849. // Clear the ValueMap pf the values invalidated by this instr.
  16850. void
  16851. GlobOpt::ProcessKills(IR::Instr *instr)
  16852. {
  16853. this->ProcessFieldKills(instr);
  16854. this->ProcessValueKills(instr);
  16855. this->ProcessArrayValueKills(instr);
  16856. }
  16857. bool
  16858. GlobOpt::OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives)
  16859. {
  16860. if(!loop->CanHoistInvariants())
  16861. {
  16862. return false;
  16863. }
  16864. Sym *sym;
  16865. switch(src->GetKind())
  16866. {
  16867. case IR::OpndKindAddr:
  16868. case IR::OpndKindFloatConst:
  16869. case IR::OpndKindIntConst:
  16870. return true;
  16871. case IR::OpndKindReg:
  16872. sym = src->AsRegOpnd()->m_sym;
  16873. break;
  16874. case IR::OpndKindSym:
  16875. sym = src->AsSymOpnd()->m_sym;
  16876. if (src->AsSymOpnd()->IsPropertySymOpnd())
  16877. {
  16878. if (src->AsSymOpnd()->AsPropertySymOpnd()->IsTypeChecked())
  16879. {
  16880. // We do not handle hoisting these yet. We might be hoisting this across the instr with the type check protecting this one.
  16881. // And somehow, the dead-store pass now removes the type check on that instr later on...
  16882. // For CheckFixedFld, there is no benefit hoisting these if they don't have a type check as they won't generate code.
  16883. return false;
  16884. }
  16885. }
  16886. break;
  16887. case IR::OpndKindHelperCall:
  16888. // Helper calls, like the private slot getter, can be invariant.
  16889. // Consider moving more math builtin to invariant?
  16890. return HelperMethodAttributes::IsInVariant(src->AsHelperCallOpnd()->m_fnHelper);
  16891. default:
  16892. return false;
  16893. }
  16894. return OptIsInvariant(sym, block, loop, srcVal, isNotTypeSpecConv, allowNonPrimitives);
  16895. }
  16896. bool
  16897. GlobOpt::OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef)
  16898. {
  16899. Value *localLoopHeadVal;
  16900. if(!loopHeadValRef)
  16901. {
  16902. loopHeadValRef = &localLoopHeadVal;
  16903. }
  16904. Value *&loopHeadVal = *loopHeadValRef;
  16905. loopHeadVal = nullptr;
  16906. if(!loop->CanHoistInvariants())
  16907. {
  16908. return false;
  16909. }
  16910. if (sym->IsStackSym())
  16911. {
  16912. if (sym->AsStackSym()->IsTypeSpec())
  16913. {
  16914. StackSym *varSym = sym->AsStackSym()->GetVarEquivSym(this->func);
  16915. // Make sure the int32/float64 version of this is available.
  16916. // Note: We could handle this by converting the src, but usually the
  16917. // conversion is hoistable if this is hoistable anyway.
  16918. // In some weird cases it may not be however, so we'll bail out.
  16919. if (sym->AsStackSym()->IsInt32())
  16920. {
  16921. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  16922. if (!loop->landingPad->globOptData.liveInt32Syms->Test(varSym->m_id) ||
  16923. (loop->landingPad->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  16924. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)))
  16925. {
  16926. // Either the int32 sym is not live in the landing pad, or it's lossy in the landing pad and the
  16927. // instruction's block is using the lossless version. In either case, the instruction cannot be hoisted
  16928. // without doing a conversion of this operand.
  16929. return false;
  16930. }
  16931. }
  16932. else if (sym->AsStackSym()->IsFloat64())
  16933. {
  16934. if (!loop->landingPad->globOptData.liveFloat64Syms->Test(varSym->m_id))
  16935. {
  16936. return false;
  16937. }
  16938. }
  16939. else
  16940. {
  16941. Assert(sym->AsStackSym()->IsSimd128());
  16942. if (!loop->landingPad->globOptData.liveSimd128F4Syms->Test(varSym->m_id) && !loop->landingPad->globOptData.liveSimd128I4Syms->Test(varSym->m_id))
  16943. {
  16944. return false;
  16945. }
  16946. }
  16947. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  16948. }
  16949. else
  16950. {
  16951. // Make sure the var version of this is available.
  16952. // Note: We could handle this by converting the src, but usually the
  16953. // conversion is hoistable if this is hoistable anyway.
  16954. // In some weird cases it may not be however, so we'll bail out.
  16955. if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->m_id))
  16956. {
  16957. return false;
  16958. }
  16959. }
  16960. }
  16961. else if (sym->IsPropertySym())
  16962. {
  16963. if (!loop->landingPad->globOptData.liveFields->Test(sym->m_id))
  16964. {
  16965. return false;
  16966. }
  16967. }
  16968. else
  16969. {
  16970. return false;
  16971. }
  16972. // We rely on having a value.
  16973. if (srcVal == NULL)
  16974. {
  16975. return false;
  16976. }
  16977. // Can't hoist non-primitives, unless we have safeguards against valueof/tostring.
  16978. if (!allowNonPrimitives && !srcVal->GetValueInfo()->IsPrimitive() && !this->IsTypeSpecialized(sym, loop->landingPad))
  16979. {
  16980. return false;
  16981. }
  16982. if(!isNotTypeSpecConv && loop->symsDefInLoop->Test(sym->m_id))
  16983. {
  16984. // Typically, a sym is considered invariant if it has the same value in the current block and in the loop landing pad.
  16985. // The sym may have had a different value earlier in the loop or on the back-edge, but as long as it's reassigned to its
  16986. // value outside the loop, it would be considered invariant in this block. Consider that case:
  16987. // s1 = s2[invariant]
  16988. // <loop start>
  16989. // s1 = s2[invariant]
  16990. // // s1 now has the same value as in the landing pad, and is considered invariant
  16991. // s1 += s3
  16992. // // s1 is not invariant here, or on the back-edge
  16993. // ++s3 // s3 is not invariant, so the add above cannot be hoisted
  16994. // <loop end>
  16995. //
  16996. // A problem occurs at the point of (s1 += s3) when:
  16997. // - At (s1 = s2) inside the loop, s1 was made to be the sym store of that value. This by itself is legal, because
  16998. // after that transfer, s1 and s2 have the same value.
  16999. // - (s1 += s3) is type-specialized but s1 is not specialized in the loop header. This happens when s1 is not
  17000. // specialized entering the loop, and since s1 is not used before it's defined in the loop, it's not specialized
  17001. // on back-edges.
  17002. //
  17003. // With that, at (s1 += s3), the conversion of s1 to the type-specialized version would be hoisted because s1 is
  17004. // invariant just before that instruction. Since this add is specialized, the specialized version of the sym is modified
  17005. // in the loop without a reassignment at (s1 = s2) inside the loop, and (s1 += s3) would then use an incorrect value of
  17006. // s1 (it would use the value of s1 from the previous loop iteration, instead of using the value of s2).
  17007. //
  17008. // The problem here, is that we cannot hoist the conversion of s1 into its specialized version across the assignment
  17009. // (s1 = s2) inside the loop. So for the purposes of type specialization, don't consider a sym invariant if it has a def
  17010. // inside the loop.
  17011. return false;
  17012. }
  17013. // A symbol is invariant if it's current value is the same as it was upon entering the loop.
  17014. loopHeadVal = this->FindValue(loop->landingPad->globOptData.symToValueMap, sym);
  17015. if (loopHeadVal == NULL || loopHeadVal->GetValueNumber() != srcVal->GetValueNumber())
  17016. {
  17017. return false;
  17018. }
  17019. // For values with an int range, require additionally that the range is the same as in the landing pad, as the range may
  17020. // have been changed on this path based on branches, and int specialization and invariant hoisting may rely on the range
  17021. // being the same. For type spec conversions, only require that if the value is an int constant in the current block, that
  17022. // it is also an int constant with the same value in the landing pad. Other range differences don't matter for type spec.
  17023. IntConstantBounds srcIntConstantBounds, loopHeadIntConstantBounds;
  17024. if(srcVal->GetValueInfo()->TryGetIntConstantBounds(&srcIntConstantBounds) &&
  17025. (isNotTypeSpecConv || srcIntConstantBounds.IsConstant()) &&
  17026. (
  17027. !loopHeadVal->GetValueInfo()->TryGetIntConstantBounds(&loopHeadIntConstantBounds) ||
  17028. loopHeadIntConstantBounds.LowerBound() != srcIntConstantBounds.LowerBound() ||
  17029. loopHeadIntConstantBounds.UpperBound() != srcIntConstantBounds.UpperBound()
  17030. ))
  17031. {
  17032. return false;
  17033. }
  17034. return true;
  17035. }
  17036. bool
  17037. GlobOpt::OptIsInvariant(
  17038. IR::Instr *instr,
  17039. BasicBlock *block,
  17040. Loop *loop,
  17041. Value *src1Val,
  17042. Value *src2Val,
  17043. bool isNotTypeSpecConv,
  17044. const bool forceInvariantHoisting)
  17045. {
  17046. if (!loop->CanHoistInvariants())
  17047. {
  17048. return false;
  17049. }
  17050. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  17051. {
  17052. return false;
  17053. }
  17054. bool allowNonPrimitives = !OpCodeAttr::OpndHasImplicitCall(instr->m_opcode);
  17055. switch(instr->m_opcode)
  17056. {
  17057. // Can't legally hoist these
  17058. case Js::OpCode::LdLen_A:
  17059. return false;
  17060. //Can't Hoist BailOnNotStackArgs, as it is necessary as InlineArgsOptimization relies on this opcode
  17061. //to decide whether to throw rejit exception or not.
  17062. case Js::OpCode::BailOnNotStackArgs:
  17063. return false;
  17064. // Usually not worth hoisting these
  17065. case Js::OpCode::LdStr:
  17066. case Js::OpCode::Ld_A:
  17067. case Js::OpCode::Ld_I4:
  17068. case Js::OpCode::LdC_A_I4:
  17069. if(!forceInvariantHoisting)
  17070. {
  17071. return false;
  17072. }
  17073. break;
  17074. // Can't hoist these outside the function it's for. The LdArgumentsFromFrame for an inlinee depends on the inlinee meta arg
  17075. // that holds the arguments object, which is only initialized at the start of the inlinee. So, can't hoist this outside the
  17076. // inlinee.
  17077. case Js::OpCode::LdArgumentsFromFrame:
  17078. if(instr->m_func != loop->GetFunc())
  17079. {
  17080. return false;
  17081. }
  17082. break;
  17083. case Js::OpCode::FromVar:
  17084. if (instr->HasBailOutInfo())
  17085. {
  17086. allowNonPrimitives = true;
  17087. }
  17088. break;
  17089. }
  17090. IR::Opnd *dst = instr->GetDst();
  17091. if (dst && !dst->IsRegOpnd())
  17092. {
  17093. return false;
  17094. }
  17095. IR::Opnd *src1 = instr->GetSrc1();
  17096. if (src1)
  17097. {
  17098. if (!this->OptIsInvariant(src1, block, loop, src1Val, isNotTypeSpecConv, allowNonPrimitives))
  17099. {
  17100. return false;
  17101. }
  17102. IR::Opnd *src2 = instr->GetSrc2();
  17103. if (src2)
  17104. {
  17105. if (!this->OptIsInvariant(src2, block, loop, src2Val, isNotTypeSpecConv, allowNonPrimitives))
  17106. {
  17107. return false;
  17108. }
  17109. }
  17110. }
  17111. return true;
  17112. }
  17113. bool
  17114. GlobOpt::OptDstIsInvariant(IR::RegOpnd *dst)
  17115. {
  17116. StackSym *dstSym = dst->m_sym;
  17117. if (dstSym->IsTypeSpec())
  17118. {
  17119. // The type-specialized sym may be single def, but not the original...
  17120. dstSym = dstSym->GetVarEquivSym(this->func);
  17121. }
  17122. return (dstSym->m_isSingleDef);
  17123. }
  17124. void
  17125. GlobOpt::OptHoistToLandingPadUpdateValueType(
  17126. BasicBlock* landingPad,
  17127. IR::Instr* instr,
  17128. IR::Opnd* opnd,
  17129. Value* opndVal)
  17130. {
  17131. if (instr->m_opcode == Js::OpCode::FromVar)
  17132. {
  17133. return;
  17134. }
  17135. Sym* opndSym = opnd->GetSym();;
  17136. if (opndSym)
  17137. {
  17138. if (opndVal == nullptr)
  17139. {
  17140. opndVal = FindValue(opndSym);
  17141. }
  17142. Value* opndValueInLandingPad = FindValue(landingPad->globOptData.symToValueMap, opndSym);
  17143. Assert(opndVal->GetValueNumber() == opndValueInLandingPad->GetValueNumber());
  17144. opnd->SetValueType(opndValueInLandingPad->GetValueInfo()->Type());
  17145. if (opndSym->IsPropertySym())
  17146. {
  17147. // Also fix valueInfo on objPtr
  17148. StackSym* opndObjPtrSym = opndSym->AsPropertySym()->m_stackSym;
  17149. Value* opndObjPtrSymValInLandingPad = FindValue(landingPad->globOptData.symToValueMap, opndObjPtrSym);
  17150. ValueInfo* opndObjPtrSymValueInfoInLandingPad = opndObjPtrSymValInLandingPad->GetValueInfo();
  17151. opnd->AsSymOpnd()->SetPropertyOwnerValueType(opndObjPtrSymValueInfoInLandingPad->Type());
  17152. }
  17153. }
  17154. }
  17155. void
  17156. GlobOpt::OptHoistInvariant(
  17157. IR::Instr *instr,
  17158. BasicBlock *block,
  17159. Loop *loop,
  17160. Value *dstVal,
  17161. Value *const src1Val,
  17162. bool isNotTypeSpecConv,
  17163. bool lossy,
  17164. IR::BailOutKind bailoutKind)
  17165. {
  17166. BasicBlock *landingPad = loop->landingPad;
  17167. IR::Opnd* src1 = instr->GetSrc1();
  17168. if (src1)
  17169. {
  17170. // We are hoisting this instruction possibly past other uses, which might invalidate the last use info. Clear it.
  17171. OptHoistToLandingPadUpdateValueType(landingPad, instr, src1, src1Val);
  17172. if (src1->IsRegOpnd())
  17173. {
  17174. src1->AsRegOpnd()->m_isTempLastUse = false;
  17175. }
  17176. IR::Opnd* src2 = instr->GetSrc2();
  17177. if (src2)
  17178. {
  17179. OptHoistToLandingPadUpdateValueType(landingPad, instr, src2, nullptr);
  17180. if (src2->IsRegOpnd())
  17181. {
  17182. src2->AsRegOpnd()->m_isTempLastUse = false;
  17183. }
  17184. }
  17185. }
  17186. IR::RegOpnd *dst = instr->GetDst() ? instr->GetDst()->AsRegOpnd() : nullptr;
  17187. if(dst)
  17188. {
  17189. switch (instr->m_opcode)
  17190. {
  17191. case Js::OpCode::CmEq_I4:
  17192. case Js::OpCode::CmNeq_I4:
  17193. case Js::OpCode::CmLt_I4:
  17194. case Js::OpCode::CmLe_I4:
  17195. case Js::OpCode::CmGt_I4:
  17196. case Js::OpCode::CmGe_I4:
  17197. case Js::OpCode::CmUnLt_I4:
  17198. case Js::OpCode::CmUnLe_I4:
  17199. case Js::OpCode::CmUnGt_I4:
  17200. case Js::OpCode::CmUnGe_I4:
  17201. // These operations are a special case. They generate a lossy int value, and the var sym is initialized using
  17202. // Conv_Bool. A sym cannot be live only as a lossy int sym, the var needs to be live as well since the lossy int
  17203. // sym cannot be used to convert to var. We don't know however, whether the Conv_Bool will be hoisted. The idea
  17204. // currently is that the sym is only used on the path in which it is initialized inside the loop. So, don't
  17205. // hoist any liveness info for the dst.
  17206. if (!this->GetIsAsmJSFunc())
  17207. {
  17208. lossy = true;
  17209. }
  17210. break;
  17211. case Js::OpCode::FromVar:
  17212. {
  17213. StackSym* src1StackSym = IR::RegOpnd::TryGetStackSym(instr->GetSrc1());
  17214. if (instr->HasBailOutInfo())
  17215. {
  17216. IR::BailOutKind instrBailoutKind = instr->GetBailOutKind();
  17217. Assert(instrBailoutKind == IR::BailOutIntOnly ||
  17218. instrBailoutKind == IR::BailOutExpectingInteger ||
  17219. instrBailoutKind == IR::BailOutOnNotPrimitive ||
  17220. instrBailoutKind == IR::BailOutNumberOnly ||
  17221. instrBailoutKind == IR::BailOutPrimitiveButString ||
  17222. instrBailoutKind == IR::BailOutSimd128F4Only ||
  17223. instrBailoutKind == IR::BailOutSimd128I4Only);
  17224. }
  17225. else if (src1StackSym && bailoutKind != IR::BailOutInvalid)
  17226. {
  17227. // We may be hoisting FromVar from a region where it didn't need a bailout (src1 had a definite value type) to a region
  17228. // where it would. In such cases, the FromVar needs a bailout based on the value type of src1 in its new position.
  17229. Assert(!src1StackSym->IsTypeSpec());
  17230. Value* landingPadSrc1val = FindValue(landingPad->globOptData.symToValueMap, src1StackSym);
  17231. Assert(src1Val->GetValueNumber() == landingPadSrc1val->GetValueNumber());
  17232. ValueInfo *src1ValueInfo = src1Val->GetValueInfo();
  17233. ValueInfo *landingPadSrc1ValueInfo = landingPadSrc1val->GetValueInfo();
  17234. IRType dstType = dst->GetType();
  17235. const auto AddBailOutToFromVar = [&]()
  17236. {
  17237. instr->GetSrc1()->SetValueType(landingPadSrc1val->GetValueInfo()->Type());
  17238. EnsureBailTarget(loop);
  17239. if (block->IsLandingPad())
  17240. {
  17241. instr = instr->ConvertToBailOutInstr(instr, bailoutKind, loop->bailOutInfo->bailOutOffset);
  17242. }
  17243. else
  17244. {
  17245. instr = instr->ConvertToBailOutInstr(instr, bailoutKind);
  17246. }
  17247. };
  17248. // A definite type in the source position and not a definite type in the destination (landing pad)
  17249. // and no bailout on the instruction; we should put a bailout on the hoisted instruction.
  17250. if (dstType == TyInt32)
  17251. {
  17252. if (lossy)
  17253. {
  17254. if ((src1ValueInfo->IsPrimitive() || IsTypeSpecialized(src1StackSym, block)) && // didn't need a lossy type spec bailout in the source block
  17255. (!landingPadSrc1ValueInfo->IsPrimitive() && !IsTypeSpecialized(src1StackSym, landingPad))) // needs a lossy type spec bailout in the landing pad
  17256. {
  17257. bailoutKind = IR::BailOutOnNotPrimitive;
  17258. AddBailOutToFromVar();
  17259. }
  17260. }
  17261. else if (src1ValueInfo->IsInt() && !landingPadSrc1ValueInfo->IsInt())
  17262. {
  17263. AddBailOutToFromVar();
  17264. }
  17265. }
  17266. else if ((dstType == TyFloat64 && src1ValueInfo->IsNumber() && !landingPadSrc1ValueInfo->IsNumber()) ||
  17267. (IRType_IsSimd128(dstType) && src1ValueInfo->IsSimd128() && !landingPadSrc1ValueInfo->IsSimd128()))
  17268. {
  17269. AddBailOutToFromVar();
  17270. }
  17271. }
  17272. break;
  17273. }
  17274. }
  17275. if (dstVal == NULL)
  17276. {
  17277. dstVal = this->NewGenericValue(ValueType::Uninitialized, dst);
  17278. }
  17279. // ToVar/FromVar don't need a new dst because it has to be invariant if their src is invariant.
  17280. bool dstDoesntNeedLoad = (!isNotTypeSpecConv && instr->m_opcode != Js::OpCode::LdC_A_I4);
  17281. StackSym *varSym = dst->m_sym;
  17282. if (varSym->IsTypeSpec())
  17283. {
  17284. varSym = varSym->GetVarEquivSym(this->func);
  17285. }
  17286. Value *const landingPadDstVal = FindValue(loop->landingPad->globOptData.symToValueMap, varSym);
  17287. if(landingPadDstVal
  17288. ? dstVal->GetValueNumber() != landingPadDstVal->GetValueNumber()
  17289. : loop->symsDefInLoop->Test(varSym->m_id))
  17290. {
  17291. // We need a temp for FromVar/ToVar if dst changes in the loop.
  17292. dstDoesntNeedLoad = false;
  17293. }
  17294. if (!dstDoesntNeedLoad && this->OptDstIsInvariant(dst) == false)
  17295. {
  17296. // Keep dst in place, hoist instr using a new dst.
  17297. instr->UnlinkDst();
  17298. // Set type specialization info correctly for this new sym
  17299. StackSym *copyVarSym;
  17300. IR::RegOpnd *copyReg;
  17301. if (dst->m_sym->IsTypeSpec())
  17302. {
  17303. copyVarSym = StackSym::New(TyVar, instr->m_func);
  17304. StackSym *copySym = copyVarSym;
  17305. if (dst->m_sym->IsInt32())
  17306. {
  17307. if(lossy)
  17308. {
  17309. // The new sym would only be live as a lossy int since we're only hoisting the store to the int version
  17310. // of the sym, and cannot be converted to var. It is not legal to have a sym only live as a lossy int,
  17311. // so don't update liveness info for this sym.
  17312. }
  17313. else
  17314. {
  17315. block->globOptData.liveInt32Syms->Set(copyVarSym->m_id);
  17316. }
  17317. copySym = copySym->GetInt32EquivSym(instr->m_func);
  17318. }
  17319. else if (dst->m_sym->IsFloat64())
  17320. {
  17321. block->globOptData.liveFloat64Syms->Set(copyVarSym->m_id);
  17322. copySym = copySym->GetFloat64EquivSym(instr->m_func);
  17323. }
  17324. else if (dst->IsSimd128())
  17325. {
  17326. // SIMD_JS
  17327. if (dst->IsSimd128F4())
  17328. {
  17329. block->globOptData.liveSimd128F4Syms->Set(copyVarSym->m_id);
  17330. copySym = copySym->GetSimd128F4EquivSym(instr->m_func);
  17331. }
  17332. else
  17333. {
  17334. Assert(dst->IsSimd128I4());
  17335. block->globOptData.liveSimd128I4Syms->Set(copyVarSym->m_id);
  17336. copySym = copySym->GetSimd128I4EquivSym(instr->m_func);
  17337. }
  17338. }
  17339. copyReg = IR::RegOpnd::New(copySym, copySym->GetType(), instr->m_func);
  17340. }
  17341. else
  17342. {
  17343. copyReg = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  17344. copyVarSym = copyReg->m_sym;
  17345. block->globOptData.liveVarSyms->Set(copyVarSym->m_id);
  17346. }
  17347. copyReg->SetValueType(dst->GetValueType());
  17348. IR::Instr *copyInstr = IR::Instr::New(Js::OpCode::Ld_A, dst, copyReg, instr->m_func);
  17349. copyInstr->SetByteCodeOffset(instr);
  17350. instr->SetDst(copyReg);
  17351. instr->InsertBefore(copyInstr);
  17352. dst->m_sym->m_mayNotBeTempLastUse = true;
  17353. if (instr->GetSrc1() && instr->GetSrc1()->IsImmediateOpnd())
  17354. {
  17355. // Propagate IsIntConst if appropriate
  17356. switch(instr->m_opcode)
  17357. {
  17358. case Js::OpCode::Ld_A:
  17359. case Js::OpCode::Ld_I4:
  17360. case Js::OpCode::LdC_A_I4:
  17361. copyReg->m_sym->SetIsConst();
  17362. break;
  17363. }
  17364. }
  17365. ValueInfo *dstValueInfo = dstVal->GetValueInfo();
  17366. if((!dstValueInfo->GetSymStore() || dstValueInfo->GetSymStore() == varSym) && !lossy)
  17367. {
  17368. // The destination's value may have been transferred from one of the invariant sources, in which case we should
  17369. // keep the sym store intact, as that sym will likely have a better lifetime than this new copy sym. For
  17370. // instance, if we're inside a conditioned block, because we don't make the copy sym live and set its value in
  17371. // all preceding blocks, this sym would not be live after exiting this block, causing this value to not
  17372. // participate in copy-prop after this block.
  17373. this->SetSymStoreDirect(dstValueInfo, copyVarSym);
  17374. }
  17375. this->InsertNewValue(&block->globOptData, dstVal, copyReg);
  17376. dst = copyReg;
  17377. }
  17378. }
  17379. // Move to landing pad
  17380. block->UnlinkInstr(instr);
  17381. if (loop->bailOutInfo->bailOutInstr)
  17382. {
  17383. loop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  17384. }
  17385. else
  17386. {
  17387. landingPad->InsertAfter(instr);
  17388. }
  17389. GlobOpt::MarkNonByteCodeUsed(instr);
  17390. if (instr->HasBailOutInfo() || instr->HasAuxBailOut())
  17391. {
  17392. Assert(loop->bailOutInfo);
  17393. EnsureBailTarget(loop);
  17394. // Copy bailout info of loop top.
  17395. if (instr->ReplaceBailOutInfo(loop->bailOutInfo))
  17396. {
  17397. // if the old bailout is deleted, reset capturedvalues cached in block
  17398. block->globOptData.capturedValues = nullptr;
  17399. block->globOptData.capturedValuesCandidate = nullptr;
  17400. }
  17401. }
  17402. if(!dst)
  17403. {
  17404. return;
  17405. }
  17406. // The bailout info's liveness for the dst sym is not updated in loop landing pads because bailout instructions previously
  17407. // hoisted into the loop's landing pad may bail out before the current type of the dst sym became live (perhaps due to this
  17408. // instruction). Since the landing pad will have a shared bailout point, the bailout info cannot assume that the current
  17409. // type of the dst sym was live during every bailout hoisted into the landing pad.
  17410. StackSym *const dstSym = dst->m_sym;
  17411. StackSym *const dstVarSym = dstSym->IsTypeSpec() ? dstSym->GetVarEquivSym(nullptr) : dstSym;
  17412. Assert(dstVarSym);
  17413. if(isNotTypeSpecConv || !IsLive(dstVarSym, loop->landingPad))
  17414. {
  17415. // A new dst is being hoisted, or the same single-def dst that would not be live before this block. So, make it live and
  17416. // update the value info with the same value info in this block.
  17417. if(lossy)
  17418. {
  17419. // This is a lossy conversion to int. The instruction was given a new dst specifically for hoisting, so this new dst
  17420. // will not be live as a var before this block. A sym cannot be live only as a lossy int sym, the var needs to be
  17421. // live as well since the lossy int sym cannot be used to convert to var. Since the var version of the sym is not
  17422. // going to be initialized, don't hoist any liveness info for the dst. The sym is only going to be used on the path
  17423. // in which it is initialized inside the loop.
  17424. Assert(dstSym->IsTypeSpec());
  17425. Assert(dstSym->IsInt32());
  17426. return;
  17427. }
  17428. // Check if the dst value was transferred from the src. If so, the value transfer needs to be replicated.
  17429. bool isTransfer = dstVal == src1Val;
  17430. StackSym *transferValueOfSym = nullptr;
  17431. if(isTransfer)
  17432. {
  17433. Assert(instr->GetSrc1());
  17434. if(instr->GetSrc1()->IsRegOpnd())
  17435. {
  17436. StackSym *src1Sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17437. if(src1Sym->IsTypeSpec())
  17438. {
  17439. src1Sym = src1Sym->GetVarEquivSym(nullptr);
  17440. Assert(src1Sym);
  17441. }
  17442. if(dstVal == FindValue(block->globOptData.symToValueMap, src1Sym))
  17443. {
  17444. transferValueOfSym = src1Sym;
  17445. }
  17446. }
  17447. }
  17448. // SIMD_JS
  17449. if (instr->m_opcode == Js::OpCode::ExtendArg_A)
  17450. {
  17451. // Check if we should have CSE'ed this EA
  17452. Assert(instr->GetSrc1());
  17453. // If the dstVal symstore is not the dst itself, then we copied the Value from another expression.
  17454. if (dstVal->GetValueInfo()->GetSymStore() != instr->GetDst()->GetStackSym())
  17455. {
  17456. isTransfer = true;
  17457. transferValueOfSym = dstVal->GetValueInfo()->GetSymStore()->AsStackSym();
  17458. }
  17459. }
  17460. const ValueNumber dstValueNumber = dstVal->GetValueNumber();
  17461. ValueNumber dstNewValueNumber = InvalidValueNumber;
  17462. for(InvariantBlockBackwardIterator it(this, block, loop->landingPad, nullptr); it.IsValid(); it.MoveNext())
  17463. {
  17464. BasicBlock *const hoistBlock = it.Block();
  17465. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  17466. Assert(!IsLive(dstVarSym, &hoistBlockData));
  17467. MakeLive(dstSym, &hoistBlockData, lossy);
  17468. Value *newDstValue;
  17469. do
  17470. {
  17471. if(isTransfer)
  17472. {
  17473. if(transferValueOfSym)
  17474. {
  17475. newDstValue = FindValue(hoistBlockData.symToValueMap, transferValueOfSym);
  17476. if(newDstValue && newDstValue->GetValueNumber() == dstValueNumber)
  17477. {
  17478. break;
  17479. }
  17480. }
  17481. // It's a transfer, but we don't have a sym whose value number matches in the target block. Use a new value
  17482. // number since we don't know if there is already a value with the current number for the target block.
  17483. if(dstNewValueNumber == InvalidValueNumber)
  17484. {
  17485. dstNewValueNumber = NewValueNumber();
  17486. }
  17487. newDstValue = CopyValue(dstVal, dstNewValueNumber);
  17488. break;
  17489. }
  17490. newDstValue = CopyValue(dstVal, dstValueNumber);
  17491. } while(false);
  17492. SetValue(&hoistBlockData, newDstValue, dstVarSym);
  17493. }
  17494. return;
  17495. }
  17496. #if DBG
  17497. if(instr->GetSrc1()->IsRegOpnd()) // Type spec conversion may load a constant into a dst sym
  17498. {
  17499. StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17500. Assert(srcSym != dstSym); // Type spec conversion must be changing the type, so the syms must be different
  17501. StackSym *const srcVarSym = srcSym->IsTypeSpec() ? srcSym->GetVarEquivSym(nullptr) : srcSym;
  17502. Assert(srcVarSym == dstVarSym); // Type spec conversion must be between variants of the same var sym
  17503. }
  17504. #endif
  17505. bool changeValueType = false, changeValueTypeToInt = false;
  17506. if(dstSym->IsTypeSpec())
  17507. {
  17508. if(dst->IsInt32())
  17509. {
  17510. if(!lossy)
  17511. {
  17512. Assert(
  17513. !instr->HasBailOutInfo() ||
  17514. instr->GetBailOutKind() == IR::BailOutIntOnly ||
  17515. instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  17516. changeValueType = changeValueTypeToInt = true;
  17517. }
  17518. }
  17519. else if (dst->IsFloat64())
  17520. {
  17521. if(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutNumberOnly)
  17522. {
  17523. changeValueType = true;
  17524. }
  17525. }
  17526. else
  17527. {
  17528. // SIMD_JS
  17529. Assert(dst->IsSimd128());
  17530. if (instr->HasBailOutInfo() &&
  17531. (instr->GetBailOutKind() == IR::BailOutSimd128F4Only || instr->GetBailOutKind() == IR::BailOutSimd128I4Only))
  17532. {
  17533. changeValueType = true;
  17534. }
  17535. }
  17536. }
  17537. ValueInfo *previousValueInfoBeforeUpdate = nullptr, *previousValueInfoAfterUpdate = nullptr;
  17538. for(InvariantBlockBackwardIterator it(
  17539. this,
  17540. block,
  17541. loop->landingPad,
  17542. dstVarSym,
  17543. dstVal->GetValueNumber());
  17544. it.IsValid();
  17545. it.MoveNext())
  17546. {
  17547. BasicBlock *const hoistBlock = it.Block();
  17548. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  17549. #if DBG
  17550. // TODO: There are some odd cases with field hoisting where the sym is invariant in only part of the loop and the info
  17551. // does not flow through all blocks. Un-comment the verification below after PRE replaces field hoisting.
  17552. //// Verify that the src sym is live as the required type, and that the conversion is valid
  17553. //Assert(IsLive(dstVarSym, &hoistBlockData));
  17554. //if(instr->GetSrc1()->IsRegOpnd())
  17555. //{
  17556. // IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  17557. // StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17558. // if(srcSym->IsTypeSpec())
  17559. // {
  17560. // if(src->IsInt32())
  17561. // {
  17562. // Assert(hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  17563. // Assert(!hoistBlockData.liveLossyInt32Syms->Test(dstVarSym->m_id)); // shouldn't try to convert a lossy int32 to anything
  17564. // }
  17565. // else
  17566. // {
  17567. // Assert(src->IsFloat64());
  17568. // Assert(hoistBlockData.liveFloat64Syms->Test(dstVarSym->m_id));
  17569. // if(dstSym->IsTypeSpec() && dst->IsInt32())
  17570. // {
  17571. // Assert(lossy); // shouldn't try to do a lossless conversion from float64 to int32
  17572. // }
  17573. // }
  17574. // }
  17575. // else
  17576. // {
  17577. // Assert(hoistBlockData.liveVarSyms->Test(dstVarSym->m_id));
  17578. // }
  17579. //}
  17580. //if(dstSym->IsTypeSpec() && dst->IsInt32())
  17581. //{
  17582. // // If the sym is already specialized as required in the block to which we are attempting to hoist the conversion,
  17583. // // that info should have flowed into this block
  17584. // if(lossy)
  17585. // {
  17586. // Assert(!hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  17587. // }
  17588. // else
  17589. // {
  17590. // Assert(!IsInt32TypeSpecialized(dstVarSym, hoistBlock));
  17591. // }
  17592. //}
  17593. #endif
  17594. MakeLive(dstSym, &hoistBlockData, lossy);
  17595. if(!changeValueType)
  17596. {
  17597. continue;
  17598. }
  17599. Value *const hoistBlockValue = it.InvariantSymValue();
  17600. ValueInfo *const hoistBlockValueInfo = hoistBlockValue->GetValueInfo();
  17601. if(hoistBlockValueInfo == previousValueInfoBeforeUpdate)
  17602. {
  17603. if(hoistBlockValueInfo != previousValueInfoAfterUpdate)
  17604. {
  17605. HoistInvariantValueInfo(previousValueInfoAfterUpdate, hoistBlockValue, hoistBlock);
  17606. }
  17607. }
  17608. else
  17609. {
  17610. previousValueInfoBeforeUpdate = hoistBlockValueInfo;
  17611. ValueInfo *const newValueInfo =
  17612. changeValueTypeToInt
  17613. ? hoistBlockValueInfo->SpecializeToInt32(alloc)
  17614. : hoistBlockValueInfo->SpecializeToFloat64(alloc);
  17615. previousValueInfoAfterUpdate = newValueInfo;
  17616. ChangeValueInfo(changeValueTypeToInt ? nullptr : hoistBlock, hoistBlockValue, newValueInfo);
  17617. }
  17618. }
  17619. }
  17620. bool
  17621. GlobOpt::TryHoistInvariant(
  17622. IR::Instr *instr,
  17623. BasicBlock *block,
  17624. Value *dstVal,
  17625. Value *src1Val,
  17626. Value *src2Val,
  17627. bool isNotTypeSpecConv,
  17628. const bool lossy,
  17629. const bool forceInvariantHoisting,
  17630. IR::BailOutKind bailoutKind)
  17631. {
  17632. Assert(!this->IsLoopPrePass());
  17633. if (OptIsInvariant(instr, block, block->loop, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  17634. {
  17635. #if DBG
  17636. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::InvariantsPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  17637. {
  17638. Output::Print(_u(" **** INVARIANT *** "));
  17639. instr->Dump();
  17640. }
  17641. #endif
  17642. #if ENABLE_DEBUG_CONFIG_OPTIONS
  17643. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::InvariantsPhase))
  17644. {
  17645. Output::Print(_u(" **** INVARIANT *** "));
  17646. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  17647. }
  17648. #endif
  17649. Loop *loop = block->loop;
  17650. // Try hoisting from to outer most loop
  17651. while (loop->parent && OptIsInvariant(instr, block, loop->parent, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  17652. {
  17653. loop = loop->parent;
  17654. }
  17655. // Record the byte code use here since we are going to move this instruction up
  17656. if (isNotTypeSpecConv)
  17657. {
  17658. InsertNoImplicitCallUses(instr);
  17659. this->CaptureByteCodeSymUses(instr);
  17660. this->InsertByteCodeUses(instr, true);
  17661. }
  17662. #if DBG
  17663. else
  17664. {
  17665. PropertySym *propertySymUse = NULL;
  17666. NoRecoverMemoryJitArenaAllocator tempAllocator(_u("BE-GlobOpt-Temp"), this->alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  17667. BVSparse<JitArenaAllocator> * tempByteCodeUse = JitAnew(&tempAllocator, BVSparse<JitArenaAllocator>, &tempAllocator);
  17668. GlobOpt::TrackByteCodeSymUsed(instr, tempByteCodeUse, &propertySymUse);
  17669. Assert(tempByteCodeUse->Count() == 0 && propertySymUse == NULL);
  17670. }
  17671. #endif
  17672. OptHoistInvariant(instr, block, loop, dstVal, src1Val, isNotTypeSpecConv, lossy, bailoutKind);
  17673. return true;
  17674. }
  17675. return false;
  17676. }
  17677. InvariantBlockBackwardIterator::InvariantBlockBackwardIterator(
  17678. GlobOpt *const globOpt,
  17679. BasicBlock *const exclusiveBeginBlock,
  17680. BasicBlock *const inclusiveEndBlock,
  17681. StackSym *const invariantSym,
  17682. const ValueNumber invariantSymValueNumber)
  17683. : globOpt(globOpt),
  17684. exclusiveEndBlock(inclusiveEndBlock->prev),
  17685. invariantSym(invariantSym),
  17686. invariantSymValueNumber(invariantSymValueNumber),
  17687. block(exclusiveBeginBlock)
  17688. #if DBG
  17689. ,
  17690. inclusiveEndBlock(inclusiveEndBlock)
  17691. #endif
  17692. {
  17693. Assert(exclusiveBeginBlock);
  17694. Assert(inclusiveEndBlock);
  17695. Assert(!inclusiveEndBlock->isDeleted);
  17696. Assert(exclusiveBeginBlock != inclusiveEndBlock);
  17697. Assert(!invariantSym == (invariantSymValueNumber == InvalidValueNumber));
  17698. MoveNext();
  17699. }
  17700. bool
  17701. InvariantBlockBackwardIterator::IsValid() const
  17702. {
  17703. return block != exclusiveEndBlock;
  17704. }
  17705. void
  17706. InvariantBlockBackwardIterator::MoveNext()
  17707. {
  17708. Assert(IsValid());
  17709. while(true)
  17710. {
  17711. #if DBG
  17712. BasicBlock *const previouslyIteratedBlock = block;
  17713. #endif
  17714. block = block->prev;
  17715. if(!IsValid())
  17716. {
  17717. Assert(previouslyIteratedBlock == inclusiveEndBlock);
  17718. break;
  17719. }
  17720. if(block->isDeleted)
  17721. {
  17722. continue;
  17723. }
  17724. if(!block->globOptData.HasData())
  17725. {
  17726. // This block's info has already been merged with all of its successors
  17727. continue;
  17728. }
  17729. if(!invariantSym)
  17730. {
  17731. break;
  17732. }
  17733. invariantSymValue = globOpt->FindValue(block->globOptData.symToValueMap, invariantSym);
  17734. if(!invariantSymValue || invariantSymValue->GetValueNumber() != invariantSymValueNumber)
  17735. {
  17736. // BailOnNoProfile and throw blocks are not moved outside loops. A sym table cleanup on these paths may delete the
  17737. // values. Field hoisting also has some odd cases where the hoisted stack sym is invariant in only part of the loop.
  17738. continue;
  17739. }
  17740. break;
  17741. }
  17742. }
  17743. BasicBlock *
  17744. InvariantBlockBackwardIterator::Block() const
  17745. {
  17746. Assert(IsValid());
  17747. return block;
  17748. }
  17749. Value *
  17750. InvariantBlockBackwardIterator::InvariantSymValue() const
  17751. {
  17752. Assert(IsValid());
  17753. Assert(invariantSym);
  17754. return invariantSymValue;
  17755. }
  17756. void
  17757. GlobOpt::HoistInvariantValueInfo(
  17758. ValueInfo *const invariantValueInfoToHoist,
  17759. Value *const valueToUpdate,
  17760. BasicBlock *const targetBlock)
  17761. {
  17762. Assert(invariantValueInfoToHoist);
  17763. Assert(valueToUpdate);
  17764. Assert(targetBlock);
  17765. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  17766. Assert(!invariantValueInfoToHoist->IsJsType());
  17767. Sym *const symStore = valueToUpdate->GetValueInfo()->GetSymStore();
  17768. ValueInfo *newValueInfo;
  17769. if(invariantValueInfoToHoist->GetSymStore() == symStore)
  17770. {
  17771. newValueInfo = invariantValueInfoToHoist;
  17772. }
  17773. else
  17774. {
  17775. newValueInfo = invariantValueInfoToHoist->Copy(alloc);
  17776. this->SetSymStoreDirect(newValueInfo, symStore);
  17777. }
  17778. ChangeValueInfo(targetBlock, valueToUpdate, newValueInfo);
  17779. }
  17780. // static
  17781. bool
  17782. GlobOpt::DoInlineArgsOpt(Func* func)
  17783. {
  17784. Func* topFunc = func->GetTopFunc();
  17785. Assert(topFunc != func);
  17786. bool doInlineArgsOpt =
  17787. !PHASE_OFF(Js::InlineArgsOptPhase, topFunc) &&
  17788. !func->GetHasCalls() &&
  17789. !func->GetHasUnoptimizedArgumentsAcccess() &&
  17790. func->m_canDoInlineArgsOpt;
  17791. return doInlineArgsOpt;
  17792. }
  17793. bool
  17794. GlobOpt::IsSwitchOptEnabled(Func* func)
  17795. {
  17796. Assert(func->IsTopFunc());
  17797. return !PHASE_OFF(Js::SwitchOptPhase, func) && (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) && !IsTypeSpecPhaseOff(func)
  17798. && func->DoGlobOpt() && !func->HasTry();
  17799. }
  17800. bool
  17801. GlobOpt::DoConstFold() const
  17802. {
  17803. return !PHASE_OFF(Js::ConstFoldPhase, func);
  17804. }
  17805. bool
  17806. GlobOpt::IsTypeSpecPhaseOff(Func *func)
  17807. {
  17808. return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
  17809. }
  17810. bool
  17811. GlobOpt::DoTypeSpec() const
  17812. {
  17813. return doTypeSpec;
  17814. }
  17815. bool
  17816. GlobOpt::DoAggressiveIntTypeSpec(Func* func)
  17817. {
  17818. return
  17819. !PHASE_OFF(Js::AggressiveIntTypeSpecPhase, func) &&
  17820. !IsTypeSpecPhaseOff(func) &&
  17821. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(func->IsLoopBody()));
  17822. }
  17823. bool
  17824. GlobOpt::DoAggressiveIntTypeSpec() const
  17825. {
  17826. return doAggressiveIntTypeSpec;
  17827. }
  17828. bool
  17829. GlobOpt::DoAggressiveMulIntTypeSpec() const
  17830. {
  17831. return doAggressiveMulIntTypeSpec;
  17832. }
  17833. bool
  17834. GlobOpt::DoDivIntTypeSpec() const
  17835. {
  17836. return doDivIntTypeSpec;
  17837. }
  17838. // static
  17839. bool
  17840. GlobOpt::DoLossyIntTypeSpec(Func* func)
  17841. {
  17842. return
  17843. !PHASE_OFF(Js::LossyIntTypeSpecPhase, func) &&
  17844. !IsTypeSpecPhaseOff(func) &&
  17845. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLossyIntTypeSpecDisabled());
  17846. }
  17847. bool
  17848. GlobOpt::DoLossyIntTypeSpec() const
  17849. {
  17850. return doLossyIntTypeSpec;
  17851. }
  17852. // static
  17853. bool
  17854. GlobOpt::DoFloatTypeSpec(Func* func)
  17855. {
  17856. return
  17857. !PHASE_OFF(Js::FloatTypeSpecPhase, func) &&
  17858. !IsTypeSpecPhaseOff(func) &&
  17859. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsFloatTypeSpecDisabled()) &&
  17860. AutoSystemInfo::Data.SSE2Available();
  17861. }
  17862. bool
  17863. GlobOpt::DoFloatTypeSpec() const
  17864. {
  17865. return doFloatTypeSpec;
  17866. }
  17867. bool
  17868. GlobOpt::DoStringTypeSpec(Func* func)
  17869. {
  17870. return !PHASE_OFF(Js::StringTypeSpecPhase, func) && !IsTypeSpecPhaseOff(func);
  17871. }
  17872. // static
  17873. bool
  17874. GlobOpt::DoTypedArrayTypeSpec(Func* func)
  17875. {
  17876. return !PHASE_OFF(Js::TypedArrayTypeSpecPhase, func) &&
  17877. !IsTypeSpecPhaseOff(func) &&
  17878. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTypedArrayTypeSpecDisabled(func->IsLoopBody()))
  17879. #if defined(_M_IX86)
  17880. && AutoSystemInfo::Data.SSE2Available()
  17881. #endif
  17882. ;
  17883. }
  17884. // static
  17885. bool
  17886. GlobOpt::DoNativeArrayTypeSpec(Func* func)
  17887. {
  17888. return !PHASE_OFF(Js::NativeArrayPhase, func) &&
  17889. !IsTypeSpecPhaseOff(func)
  17890. #if defined(_M_IX86)
  17891. && AutoSystemInfo::Data.SSE2Available()
  17892. #endif
  17893. ;
  17894. }
  17895. bool
  17896. GlobOpt::DoArrayCheckHoist(Func *const func)
  17897. {
  17898. Assert(func->IsTopFunc());
  17899. return
  17900. !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
  17901. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(func->IsLoopBody())) &&
  17902. !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
  17903. func->DoGlobOptsForGeneratorFunc();
  17904. }
  17905. bool
  17906. GlobOpt::DoArrayCheckHoist() const
  17907. {
  17908. return doArrayCheckHoist;
  17909. }
  17910. bool
  17911. GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr *const instr) const
  17912. {
  17913. if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  17914. {
  17915. return false;
  17916. }
  17917. if(!baseValueType.IsLikelyArrayOrObjectWithArray() ||
  17918. (loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func)))
  17919. {
  17920. return true;
  17921. }
  17922. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant JS array checks
  17923. #if DBG_DUMP
  17924. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  17925. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  17926. {
  17927. Output::Print(_u("DoArrayCheckHoist disabled for JS arrays because of external: "));
  17928. func->DumpFullFunctionName();
  17929. Output::Print(_u("\n"));
  17930. Output::Flush();
  17931. }
  17932. #endif
  17933. return false;
  17934. }
  17935. bool
  17936. GlobOpt::DoArrayMissingValueCheckHoist(Func *const func)
  17937. {
  17938. return
  17939. DoArrayCheckHoist(func) &&
  17940. !PHASE_OFF(Js::ArrayMissingValueCheckHoistPhase, func) &&
  17941. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayMissingValueCheckHoistDisabled(func->IsLoopBody()));
  17942. }
  17943. bool
  17944. GlobOpt::DoArrayMissingValueCheckHoist() const
  17945. {
  17946. return doArrayMissingValueCheckHoist;
  17947. }
  17948. bool
  17949. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType, Func *const func)
  17950. {
  17951. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  17952. if(!DoArrayCheckHoist(func) || PHASE_OFF(Js::ArraySegmentHoistPhase, func))
  17953. {
  17954. return false;
  17955. }
  17956. if(!baseValueType.IsLikelyArrayOrObjectWithArray())
  17957. {
  17958. return true;
  17959. }
  17960. return
  17961. !PHASE_OFF(Js::JsArraySegmentHoistPhase, func) &&
  17962. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsJsArraySegmentHoistDisabled(func->IsLoopBody()));
  17963. }
  17964. bool
  17965. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType) const
  17966. {
  17967. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  17968. return baseValueType.IsLikelyArrayOrObjectWithArray() ? doJsArraySegmentHoist : doArraySegmentHoist;
  17969. }
  17970. bool
  17971. GlobOpt::DoTypedArraySegmentLengthHoist(Loop *const loop) const
  17972. {
  17973. if(!DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array)))
  17974. {
  17975. return false;
  17976. }
  17977. if(loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func))
  17978. {
  17979. return true;
  17980. }
  17981. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant typed array
  17982. // segment length loads.
  17983. #if DBG_DUMP
  17984. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  17985. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  17986. {
  17987. Output::Print(_u("DoArraySegmentLengthHoist disabled for typed arrays because of external: "));
  17988. func->DumpFullFunctionName();
  17989. Output::Print(_u("\n"));
  17990. Output::Flush();
  17991. }
  17992. #endif
  17993. return false;
  17994. }
  17995. bool
  17996. GlobOpt::DoArrayLengthHoist(Func *const func)
  17997. {
  17998. return
  17999. DoArrayCheckHoist(func) &&
  18000. !PHASE_OFF(Js::Phase::ArrayLengthHoistPhase, func) &&
  18001. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayLengthHoistDisabled(func->IsLoopBody()));
  18002. }
  18003. bool
  18004. GlobOpt::DoArrayLengthHoist() const
  18005. {
  18006. return doArrayLengthHoist;
  18007. }
  18008. bool
  18009. GlobOpt::DoEliminateArrayAccessHelperCall(Func *const func)
  18010. {
  18011. return DoArrayCheckHoist(func);
  18012. }
  18013. bool
  18014. GlobOpt::DoEliminateArrayAccessHelperCall() const
  18015. {
  18016. return doEliminateArrayAccessHelperCall;
  18017. }
  18018. bool
  18019. GlobOpt::DoLdLenIntSpec(IR::Instr *const instr, const ValueType baseValueType) const
  18020. {
  18021. Assert(!instr || instr->m_opcode == Js::OpCode::LdLen_A);
  18022. Assert(!instr || instr->GetDst());
  18023. Assert(!instr || instr->GetSrc1());
  18024. if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
  18025. IsTypeSpecPhaseOff(func) ||
  18026. (func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
  18027. (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  18028. {
  18029. return false;
  18030. }
  18031. if(instr &&
  18032. instr->IsProfiledInstr() &&
  18033. (
  18034. !instr->AsProfiledInstr()->u.ldElemInfo->GetElementType().IsLikelyInt() ||
  18035. instr->GetDst()->AsRegOpnd()->m_sym->m_isNotInt
  18036. ))
  18037. {
  18038. return false;
  18039. }
  18040. Assert(!instr || baseValueType == instr->GetSrc1()->GetValueType());
  18041. return
  18042. baseValueType.HasBeenString() ||
  18043. (baseValueType.IsLikelyAnyOptimizedArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray);
  18044. }
  18045. bool
  18046. GlobOpt::DoPathDependentValues() const
  18047. {
  18048. return !PHASE_OFF(Js::Phase::PathDependentValuesPhase, func);
  18049. }
  18050. bool
  18051. GlobOpt::DoTrackRelativeIntBounds() const
  18052. {
  18053. return doTrackRelativeIntBounds;
  18054. }
  18055. bool
  18056. GlobOpt::DoBoundCheckElimination() const
  18057. {
  18058. return doBoundCheckElimination;
  18059. }
  18060. bool
  18061. GlobOpt::DoBoundCheckHoist() const
  18062. {
  18063. return doBoundCheckHoist;
  18064. }
  18065. bool
  18066. GlobOpt::DoLoopCountBasedBoundCheckHoist() const
  18067. {
  18068. return doLoopCountBasedBoundCheckHoist;
  18069. }
  18070. bool
  18071. GlobOpt::DoPowIntIntTypeSpec() const
  18072. {
  18073. return doPowIntIntTypeSpec;
  18074. }
  18075. bool
  18076. GlobOpt::DoTagChecks() const
  18077. {
  18078. return doTagChecks;
  18079. }
  18080. bool
  18081. GlobOpt::TrackArgumentsObject()
  18082. {
  18083. if (PHASE_OFF(Js::StackArgOptPhase, this->func))
  18084. {
  18085. this->CannotAllocateArgumentsObjectOnStack();
  18086. return false;
  18087. }
  18088. return func->GetHasStackArgs();
  18089. }
  18090. void
  18091. GlobOpt::CannotAllocateArgumentsObjectOnStack()
  18092. {
  18093. func->SetHasStackArgs(false);
  18094. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  18095. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  18096. {
  18097. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  18098. Output::Print(_u("Stack args disabled for function %s(%s)\n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetDebugNumberSet(debugStringBuffer));
  18099. Output::Flush();
  18100. }
  18101. #endif
  18102. }
  18103. IR::Instr *
  18104. GlobOpt::PreOptPeep(IR::Instr *instr)
  18105. {
  18106. if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
  18107. {
  18108. switch (instr->m_opcode)
  18109. {
  18110. case Js::OpCode::BailOnNoProfile:
  18111. {
  18112. // Handle BailOnNoProfile
  18113. if (instr->HasBailOutInfo())
  18114. {
  18115. if (!this->prePassLoop)
  18116. {
  18117. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  18118. }
  18119. // Already processed.
  18120. return instr;
  18121. }
  18122. // Convert to bailout instr
  18123. IR::Instr *nextBytecodeOffsetInstr = instr->GetNextRealInstrOrLabel();
  18124. while(nextBytecodeOffsetInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  18125. {
  18126. nextBytecodeOffsetInstr = nextBytecodeOffsetInstr->GetNextRealInstrOrLabel();
  18127. Assert(!nextBytecodeOffsetInstr->IsLabelInstr());
  18128. }
  18129. instr = instr->ConvertToBailOutInstr(nextBytecodeOffsetInstr, IR::BailOutOnNoProfile);
  18130. instr->ClearByteCodeOffset();
  18131. instr->SetByteCodeOffset(nextBytecodeOffsetInstr);
  18132. if (!this->currentBlock->loop)
  18133. {
  18134. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  18135. }
  18136. else
  18137. {
  18138. Assert(this->prePassLoop);
  18139. }
  18140. break;
  18141. }
  18142. case Js::OpCode::BailOnException:
  18143. {
  18144. Assert(this->func->HasTry() && this->func->DoOptimizeTryCatch() &&
  18145. instr->m_prev->m_opcode == Js::OpCode::Catch &&
  18146. instr->m_prev->m_prev->IsLabelInstr() &&
  18147. instr->m_prev->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeCatch); // Should also handle RegionTypeFinally
  18148. break;
  18149. }
  18150. default:
  18151. {
  18152. if(this->currentBlock->loop && !this->IsLoopPrePass())
  18153. {
  18154. return instr;
  18155. }
  18156. break;
  18157. }
  18158. }
  18159. RemoveCodeAfterNoFallthroughInstr(instr);
  18160. }
  18161. return instr;
  18162. }
  18163. void
  18164. GlobOpt::RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr)
  18165. {
  18166. if (instr != this->currentBlock->GetLastInstr())
  18167. {
  18168. // Remove dead code after bailout
  18169. IR::Instr *instrDead = instr->m_next;
  18170. IR::Instr *instrNext;
  18171. for (; instrDead != this->currentBlock->GetLastInstr(); instrDead = instrNext)
  18172. {
  18173. instrNext = instrDead->m_next;
  18174. if (instrNext->m_opcode == Js::OpCode::FunctionExit)
  18175. {
  18176. break;
  18177. }
  18178. this->func->m_fg->RemoveInstr(instrDead, this);
  18179. }
  18180. IR::Instr *instrNextBlock = instrDead->m_next;
  18181. this->func->m_fg->RemoveInstr(instrDead, this);
  18182. this->currentBlock->SetLastInstr(instrNextBlock->m_prev);
  18183. }
  18184. // Cleanup dead successors
  18185. FOREACH_SUCCESSOR_BLOCK_EDITING(deadBlock, this->currentBlock, iter)
  18186. {
  18187. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  18188. if (this->currentBlock->GetDataUseCount() > 0)
  18189. {
  18190. this->currentBlock->DecrementDataUseCount();
  18191. }
  18192. } NEXT_SUCCESSOR_BLOCK_EDITING;
  18193. }
  18194. void
  18195. GlobOpt::ProcessTryCatch(IR::Instr* instr)
  18196. {
  18197. Assert(instr->m_next->IsLabelInstr() && instr->m_next->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeTry);
  18198. Region* tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  18199. BVSparse<JitArenaAllocator> * writeThroughSymbolsSet = tryRegion->writeThroughSymbolsSet;
  18200. ToVar(writeThroughSymbolsSet, this->currentBlock);
  18201. }
  18202. void
  18203. GlobOpt::InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd)
  18204. {
  18205. if (this->currentRegion->GetType() == RegionTypeTry && dstOpnd->IsRegOpnd() && dstOpnd->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  18206. {
  18207. StackSym * sym = dstOpnd->AsRegOpnd()->m_sym;
  18208. if (sym->IsVar())
  18209. {
  18210. return;
  18211. }
  18212. StackSym * varSym = sym->GetVarEquivSym(nullptr);
  18213. if (this->currentRegion->writeThroughSymbolsSet->Test(varSym->m_id))
  18214. {
  18215. IR::RegOpnd * regOpnd = IR::RegOpnd::New(varSym, IRType::TyVar, instr->m_func);
  18216. this->ToVar(instr->m_next, regOpnd, this->currentBlock, NULL, false);
  18217. }
  18218. }
  18219. }
  18220. void
  18221. GlobOpt::RemoveFlowEdgeToCatchBlock(IR::Instr * instr)
  18222. {
  18223. Assert(instr->IsBranchInstr());
  18224. BasicBlock * catchBlock = nullptr;
  18225. BasicBlock * predBlock = nullptr;
  18226. if (instr->m_opcode == Js::OpCode::BrOnException)
  18227. {
  18228. catchBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  18229. predBlock = this->currentBlock;
  18230. }
  18231. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  18232. {
  18233. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  18234. Assert(nextInstr->IsLabelInstr());
  18235. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  18236. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeCatch)
  18237. {
  18238. catchBlock = nextLabel->GetBasicBlock();
  18239. predBlock = this->currentBlock;
  18240. }
  18241. else
  18242. {
  18243. Assert(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional());
  18244. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  18245. IR::BranchInstr * branchToCatchBlock = nextLabel->m_next->AsBranchInstr();
  18246. IR::LabelInstr * catchBlockLabel = branchToCatchBlock->GetTarget();
  18247. Assert(catchBlockLabel->GetRegion()->GetType() == RegionTypeCatch);
  18248. catchBlock = catchBlockLabel->GetBasicBlock();
  18249. predBlock = nextBlock;
  18250. }
  18251. }
  18252. Assert(catchBlock);
  18253. Assert(predBlock);
  18254. if (this->func->m_fg->FindEdge(predBlock, catchBlock))
  18255. {
  18256. predBlock->RemoveDeadSucc(catchBlock, this->func->m_fg);
  18257. if (predBlock == this->currentBlock)
  18258. {
  18259. predBlock->DecrementDataUseCount();
  18260. }
  18261. }
  18262. }
  18263. IR::Instr *
  18264. GlobOpt::OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val)
  18265. {
  18266. IR::Opnd *dst, *src1, *src2;
  18267. if (this->IsLoopPrePass())
  18268. {
  18269. return instr;
  18270. }
  18271. switch (instr->m_opcode)
  18272. {
  18273. case Js::OpCode::DeadBrEqual:
  18274. case Js::OpCode::DeadBrRelational:
  18275. case Js::OpCode::DeadBrSrEqual:
  18276. src1 = instr->GetSrc1();
  18277. src2 = instr->GetSrc2();
  18278. // These branches were turned into dead branches because they were unnecessary (branch to next, ...).
  18279. // The DeadBr are necessary in case the evaluation of the sources have side-effects.
  18280. // If we know for sure the srcs are primitive or have been type specialized, we don't need these instructions
  18281. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && this->IsTypeSpecialized(src1->AsRegOpnd()->m_sym, this->currentBlock))) &&
  18282. ((src2Val && src2Val->GetValueInfo()->IsPrimitive()) || (src2->IsRegOpnd() && this->IsTypeSpecialized(src2->AsRegOpnd()->m_sym, this->currentBlock))))
  18283. {
  18284. this->CaptureByteCodeSymUses(instr);
  18285. instr->m_opcode = Js::OpCode::Nop;
  18286. }
  18287. break;
  18288. case Js::OpCode::DeadBrOnHasProperty:
  18289. src1 = instr->GetSrc1();
  18290. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && this->IsTypeSpecialized(src1->AsRegOpnd()->m_sym, this->currentBlock))))
  18291. {
  18292. this->CaptureByteCodeSymUses(instr);
  18293. instr->m_opcode = Js::OpCode::Nop;
  18294. }
  18295. break;
  18296. case Js::OpCode::Ld_A:
  18297. case Js::OpCode::Ld_I4:
  18298. src1 = instr->GetSrc1();
  18299. dst = instr->GetDst();
  18300. if (dst->IsRegOpnd() && dst->IsEqual(src1))
  18301. {
  18302. dst = instr->UnlinkDst();
  18303. if (!dst->GetIsJITOptimizedReg())
  18304. {
  18305. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(instr);
  18306. bytecodeUse->SetDst(dst);
  18307. instr->InsertAfter(bytecodeUse);
  18308. }
  18309. instr->FreeSrc1();
  18310. instr->m_opcode = Js::OpCode::Nop;
  18311. }
  18312. break;
  18313. }
  18314. return instr;
  18315. }
  18316. void
  18317. GlobOpt::OptimizeIndirUses(IR::IndirOpnd *indirOpnd, IR::Instr * *pInstr, Value **indirIndexValRef)
  18318. {
  18319. IR::Instr * &instr = *pInstr;
  18320. Assert(!indirIndexValRef || !*indirIndexValRef);
  18321. // Update value types and copy-prop the base
  18322. OptSrc(indirOpnd->GetBaseOpnd(), &instr, nullptr, indirOpnd);
  18323. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18324. if (!indexOpnd)
  18325. {
  18326. return;
  18327. }
  18328. // Update value types and copy-prop the index
  18329. Value *indexVal = OptSrc(indexOpnd, &instr, nullptr, indirOpnd);
  18330. if(indirIndexValRef)
  18331. {
  18332. *indirIndexValRef = indexVal;
  18333. }
  18334. }
  18335. bool
  18336. ValueInfo::IsGeneric() const
  18337. {
  18338. return structureKind == ValueStructureKind::Generic;
  18339. }
  18340. bool
  18341. ValueInfo::IsIntConstant() const
  18342. {
  18343. return IsInt() && structureKind == ValueStructureKind::IntConstant;
  18344. }
  18345. const IntConstantValueInfo *
  18346. ValueInfo::AsIntConstant() const
  18347. {
  18348. Assert(IsIntConstant());
  18349. return static_cast<const IntConstantValueInfo *>(this);
  18350. }
  18351. bool
  18352. ValueInfo::IsIntRange() const
  18353. {
  18354. return IsInt() && structureKind == ValueStructureKind::IntRange;
  18355. }
  18356. const IntRangeValueInfo *
  18357. ValueInfo::AsIntRange() const
  18358. {
  18359. Assert(IsIntRange());
  18360. return static_cast<const IntRangeValueInfo *>(this);
  18361. }
  18362. bool
  18363. ValueInfo::IsIntBounded() const
  18364. {
  18365. const bool isIntBounded = IsLikelyInt() && structureKind == ValueStructureKind::IntBounded;
  18366. // Bounds for definitely int values should have relative bounds, otherwise those values should use one of the other value
  18367. // infos
  18368. Assert(!isIntBounded || static_cast<const IntBoundedValueInfo *>(this)->Bounds()->RequiresIntBoundedValueInfo(Type()));
  18369. return isIntBounded;
  18370. }
  18371. const IntBoundedValueInfo *
  18372. ValueInfo::AsIntBounded() const
  18373. {
  18374. Assert(IsIntBounded());
  18375. return static_cast<const IntBoundedValueInfo *>(this);
  18376. }
  18377. bool
  18378. ValueInfo::IsFloatConstant() const
  18379. {
  18380. return IsFloat() && structureKind == ValueStructureKind::FloatConstant;
  18381. }
  18382. FloatConstantValueInfo *
  18383. ValueInfo::AsFloatConstant()
  18384. {
  18385. Assert(IsFloatConstant());
  18386. return static_cast<FloatConstantValueInfo *>(this);
  18387. }
  18388. const FloatConstantValueInfo *
  18389. ValueInfo::AsFloatConstant() const
  18390. {
  18391. Assert(IsFloatConstant());
  18392. return static_cast<const FloatConstantValueInfo *>(this);
  18393. }
  18394. bool
  18395. ValueInfo::IsVarConstant() const
  18396. {
  18397. return structureKind == ValueStructureKind::VarConstant;
  18398. }
  18399. VarConstantValueInfo *
  18400. ValueInfo::AsVarConstant()
  18401. {
  18402. Assert(IsVarConstant());
  18403. return static_cast<VarConstantValueInfo *>(this);
  18404. }
  18405. bool
  18406. ValueInfo::IsJsType() const
  18407. {
  18408. Assert(!(structureKind == ValueStructureKind::JsType && !IsUninitialized()));
  18409. return structureKind == ValueStructureKind::JsType;
  18410. }
  18411. JsTypeValueInfo *
  18412. ValueInfo::AsJsType()
  18413. {
  18414. Assert(IsJsType());
  18415. return static_cast<JsTypeValueInfo *>(this);
  18416. }
  18417. const JsTypeValueInfo *
  18418. ValueInfo::AsJsType() const
  18419. {
  18420. Assert(IsJsType());
  18421. return static_cast<const JsTypeValueInfo *>(this);
  18422. }
  18423. bool
  18424. ValueInfo::IsArrayValueInfo() const
  18425. {
  18426. return IsAnyOptimizedArray() && structureKind == ValueStructureKind::Array;
  18427. }
  18428. const
  18429. ArrayValueInfo *ValueInfo::AsArrayValueInfo() const
  18430. {
  18431. Assert(IsArrayValueInfo());
  18432. return static_cast<const ArrayValueInfo *>(this);
  18433. }
  18434. ArrayValueInfo *
  18435. ValueInfo::AsArrayValueInfo()
  18436. {
  18437. Assert(IsArrayValueInfo());
  18438. return static_cast<ArrayValueInfo *>(this);
  18439. }
  18440. ValueInfo *
  18441. ValueInfo::SpecializeToInt32(JitArenaAllocator *const allocator, const bool isForLoopBackEdgeCompensation)
  18442. {
  18443. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of arbitrary
  18444. // types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop prepass. In all
  18445. // other cases, only values that are likely int may be int-specialized.
  18446. Assert(IsUninitialized() || IsLikelyInt() || isForLoopBackEdgeCompensation);
  18447. if(IsInt())
  18448. {
  18449. return this;
  18450. }
  18451. if(!IsIntBounded())
  18452. {
  18453. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18454. newValueInfo->Type() = ValueType::GetInt(true);
  18455. return newValueInfo;
  18456. }
  18457. const IntBoundedValueInfo *const boundedValueInfo = AsIntBounded();
  18458. const IntBounds *const bounds = boundedValueInfo->Bounds();
  18459. const IntConstantBounds constantBounds = bounds->ConstantBounds();
  18460. if(bounds->RequiresIntBoundedValueInfo())
  18461. {
  18462. IntBoundedValueInfo *const newValueInfo = boundedValueInfo->Copy(allocator);
  18463. newValueInfo->Type() = constantBounds.GetValueType();
  18464. return newValueInfo;
  18465. }
  18466. ValueInfo *const newValueInfo =
  18467. constantBounds.IsConstant()
  18468. ? static_cast<ValueInfo *>(IntConstantValueInfo::New(allocator, constantBounds.LowerBound()))
  18469. : IntRangeValueInfo::New(allocator, constantBounds.LowerBound(), constantBounds.UpperBound(), false);
  18470. newValueInfo->SetSymStore(GetSymStore());
  18471. return newValueInfo;
  18472. }
  18473. ValueInfo *
  18474. ValueInfo::SpecializeToFloat64(JitArenaAllocator *const allocator)
  18475. {
  18476. if(IsNumber())
  18477. {
  18478. return this;
  18479. }
  18480. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18481. // If the value type was likely int, after float-specializing, it's preferable to use Int_Number rather than Float, as the
  18482. // former is also likely int and allows int specialization later.
  18483. newValueInfo->Type() = IsLikelyInt() ? Type().ToDefiniteAnyNumber() : Type().ToDefiniteAnyFloat();
  18484. return newValueInfo;
  18485. }
  18486. // SIMD_JS
  18487. ValueInfo *
  18488. ValueInfo::SpecializeToSimd128(IRType type, JitArenaAllocator *const allocator)
  18489. {
  18490. switch (type)
  18491. {
  18492. case TySimd128F4:
  18493. return SpecializeToSimd128F4(allocator);
  18494. case TySimd128I4:
  18495. return SpecializeToSimd128I4(allocator);
  18496. default:
  18497. Assert(UNREACHED);
  18498. return nullptr;
  18499. }
  18500. }
  18501. ValueInfo *
  18502. ValueInfo::SpecializeToSimd128F4(JitArenaAllocator *const allocator)
  18503. {
  18504. if (IsSimd128Float32x4())
  18505. {
  18506. return this;
  18507. }
  18508. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18509. newValueInfo->Type() = ValueType::GetSimd128(ObjectType::Simd128Float32x4);
  18510. return newValueInfo;
  18511. }
  18512. ValueInfo *
  18513. ValueInfo::SpecializeToSimd128I4(JitArenaAllocator *const allocator)
  18514. {
  18515. if (IsSimd128Int32x4())
  18516. {
  18517. return this;
  18518. }
  18519. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18520. newValueInfo->Type() = ValueType::GetSimd128(ObjectType::Simd128Int32x4);
  18521. return newValueInfo;
  18522. }
  18523. bool
  18524. ValueInfo::GetIsShared() const
  18525. {
  18526. return IsJsType() ? AsJsType()->GetIsShared() : false;
  18527. }
  18528. void
  18529. ValueInfo::SetIsShared()
  18530. {
  18531. if (IsJsType()) AsJsType()->SetIsShared();
  18532. }
  18533. ValueInfo *
  18534. ValueInfo::Copy(JitArenaAllocator * allocator)
  18535. {
  18536. if(IsIntConstant())
  18537. {
  18538. return AsIntConstant()->Copy(allocator);
  18539. }
  18540. if(IsIntRange())
  18541. {
  18542. return AsIntRange()->Copy(allocator);
  18543. }
  18544. if(IsIntBounded())
  18545. {
  18546. return AsIntBounded()->Copy(allocator);
  18547. }
  18548. if(IsFloatConstant())
  18549. {
  18550. return AsFloatConstant()->Copy(allocator);
  18551. }
  18552. if(IsJsType())
  18553. {
  18554. return AsJsType()->Copy(allocator);
  18555. }
  18556. if(IsArrayValueInfo())
  18557. {
  18558. return AsArrayValueInfo()->Copy(allocator);
  18559. }
  18560. return CopyWithGenericStructureKind(allocator);
  18561. }
  18562. bool
  18563. ValueInfo::GetIntValMinMax(int *pMin, int *pMax, bool doAggressiveIntTypeSpec)
  18564. {
  18565. IntConstantBounds intConstantBounds;
  18566. if (TryGetIntConstantBounds(&intConstantBounds, doAggressiveIntTypeSpec))
  18567. {
  18568. *pMin = intConstantBounds.LowerBound();
  18569. *pMax = intConstantBounds.UpperBound();
  18570. return true;
  18571. }
  18572. Assert(!IsInt());
  18573. Assert(!doAggressiveIntTypeSpec || !IsLikelyInt());
  18574. return false;
  18575. }
  18576. bool
  18577. GlobOpt::IsPREInstrCandidateLoad(Js::OpCode opcode)
  18578. {
  18579. switch (opcode)
  18580. {
  18581. case Js::OpCode::LdFld:
  18582. case Js::OpCode::LdFldForTypeOf:
  18583. case Js::OpCode::LdRootFld:
  18584. case Js::OpCode::LdRootFldForTypeOf:
  18585. case Js::OpCode::LdMethodFld:
  18586. case Js::OpCode::LdRootMethodFld:
  18587. case Js::OpCode::LdSlot:
  18588. case Js::OpCode::LdSlotArr:
  18589. return true;
  18590. }
  18591. return false;
  18592. }
  18593. bool
  18594. GlobOpt::IsPREInstrCandidateStore(Js::OpCode opcode)
  18595. {
  18596. switch (opcode)
  18597. {
  18598. case Js::OpCode::StFld:
  18599. case Js::OpCode::StRootFld:
  18600. case Js::OpCode::StSlot:
  18601. return true;
  18602. }
  18603. return false;
  18604. }
  18605. bool
  18606. GlobOpt::ImplicitCallFlagsAllowOpts(Loop *loop)
  18607. {
  18608. return loop->GetImplicitCallFlags() != Js::ImplicitCall_HasNoInfo &&
  18609. (((loop->GetImplicitCallFlags() & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  18610. }
  18611. bool
  18612. GlobOpt::ImplicitCallFlagsAllowOpts(Func *func)
  18613. {
  18614. return func->m_fg->implicitCallFlags != Js::ImplicitCall_HasNoInfo &&
  18615. (((func->m_fg->implicitCallFlags & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  18616. }
  18617. #if DBG_DUMP
  18618. void ValueInfo::Dump()
  18619. {
  18620. if(!IsJsType()) // The value type is uninitialized for a type value
  18621. {
  18622. char typeStr[VALUE_TYPE_MAX_STRING_SIZE];
  18623. Type().ToString(typeStr);
  18624. Output::Print(_u("%S"), typeStr);
  18625. }
  18626. IntConstantBounds intConstantBounds;
  18627. if(TryGetIntConstantBounds(&intConstantBounds))
  18628. {
  18629. if(intConstantBounds.IsConstant())
  18630. {
  18631. Output::Print(_u(" constant:%d"), intConstantBounds.LowerBound());
  18632. return;
  18633. }
  18634. Output::Print(_u(" range:%d - %d"), intConstantBounds.LowerBound(), intConstantBounds.UpperBound());
  18635. }
  18636. else if(IsFloatConstant())
  18637. {
  18638. Output::Print(_u(" constant:%g"), AsFloatConstant()->FloatValue());
  18639. }
  18640. else if(IsJsType())
  18641. {
  18642. const JITTypeHolder type(AsJsType()->GetJsType());
  18643. type != nullptr ? Output::Print(_u("type: 0x%p, "), type->GetAddr()) : Output::Print(_u("type: null, "));
  18644. Output::Print(_u("type Set: "));
  18645. Js::EquivalentTypeSet* typeSet = AsJsType()->GetJsTypeSet();
  18646. if (typeSet != nullptr)
  18647. {
  18648. uint16 typeCount = typeSet->GetCount();
  18649. for (uint16 ti = 0; ti < typeCount - 1; ti++)
  18650. {
  18651. Output::Print(_u("0x%p, "), typeSet->GetType(ti));
  18652. }
  18653. Output::Print(_u("0x%p"), typeSet->GetType(typeCount - 1));
  18654. }
  18655. else
  18656. {
  18657. Output::Print(_u("null"));
  18658. }
  18659. }
  18660. else if(IsArrayValueInfo())
  18661. {
  18662. const ArrayValueInfo *const arrayValueInfo = AsArrayValueInfo();
  18663. if(arrayValueInfo->HeadSegmentSym())
  18664. {
  18665. Output::Print(_u(" seg: "));
  18666. arrayValueInfo->HeadSegmentSym()->Dump();
  18667. }
  18668. if(arrayValueInfo->HeadSegmentLengthSym())
  18669. {
  18670. Output::Print(_u(" segLen: "));
  18671. arrayValueInfo->HeadSegmentLengthSym()->Dump();
  18672. }
  18673. if(arrayValueInfo->LengthSym())
  18674. {
  18675. Output::Print(_u(" len: "));
  18676. arrayValueInfo->LengthSym()->Dump();
  18677. }
  18678. }
  18679. if (this->GetSymStore())
  18680. {
  18681. Output::Print(_u("\t\tsym:"));
  18682. this->GetSymStore()->Dump();
  18683. }
  18684. }
  18685. void
  18686. GlobOpt::Dump()
  18687. {
  18688. this->DumpSymToValueMap();
  18689. }
  18690. void
  18691. GlobOpt::DumpSymToValueMap(GlobHashTable* symToValueMap)
  18692. {
  18693. if (symToValueMap != nullptr)
  18694. {
  18695. symToValueMap->Dump(GlobOpt::DumpSym);
  18696. }
  18697. }
  18698. void
  18699. GlobOpt::DumpSymToValueMap(BasicBlock *block)
  18700. {
  18701. Output::Print(_u("\n*** SymToValueMap ***\n"));
  18702. DumpSymToValueMap(block->globOptData.symToValueMap);
  18703. }
  18704. void
  18705. GlobOpt::DumpSymToValueMap()
  18706. {
  18707. DumpSymToValueMap(this->currentBlock);
  18708. }
  18709. void
  18710. GlobOpt::DumpSym(Sym *sym)
  18711. {
  18712. sym->Dump();
  18713. }
  18714. void
  18715. GlobOpt::DumpSymVal(int index)
  18716. {
  18717. SymID id = index;
  18718. extern Func *CurrentFunc;
  18719. Sym *sym = this->func->m_symTable->Find(id);
  18720. AssertMsg(sym, "Sym not found!!!");
  18721. Output::Print(_u("Sym: "));
  18722. sym->Dump();
  18723. Output::Print(_u("\t\tValueNumber: "));
  18724. Value ** pValue = this->blockData.symToValueMap->Get(sym->m_id);
  18725. (*pValue)->Dump();
  18726. Output::Print(_u("\n"));
  18727. }
  18728. void
  18729. GlobOpt::Trace(BasicBlock * block, bool before)
  18730. {
  18731. bool globOptTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18732. bool typeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::TypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18733. bool floatTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FloatTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18734. bool fieldHoistTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldHoistPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18735. bool fieldCopyPropTrace = fieldHoistTrace || Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18736. bool objTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ObjTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18737. bool valueTableTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ValueTablePhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18738. bool fieldPRETrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18739. bool anyTrace = globOptTrace || typeSpecTrace || floatTypeSpecTrace || fieldCopyPropTrace || fieldHoistTrace || objTypeSpecTrace || valueTableTrace || fieldPRETrace;
  18740. if (!anyTrace)
  18741. {
  18742. return;
  18743. }
  18744. if (fieldPRETrace && this->IsLoopPrePass())
  18745. {
  18746. if (block->isLoopHeader && before)
  18747. {
  18748. Output::Print(_u("==== Loop Prepass block header #%-3d, Visiting Loop block head #%-3d\n"),
  18749. this->prePassLoop->GetHeadBlock()->GetBlockNum(), block->GetBlockNum());
  18750. }
  18751. }
  18752. if (!typeSpecTrace && !floatTypeSpecTrace && !valueTableTrace && !Js::Configuration::Global.flags.Verbose)
  18753. {
  18754. return;
  18755. }
  18756. if (before)
  18757. {
  18758. Output::Print(_u("========================================================================\n"));
  18759. Output::Print(_u("Begin OptBlock: Block #%-3d"), block->GetBlockNum());
  18760. if (block->loop)
  18761. {
  18762. Output::Print(_u(" Loop block header:%-3d currentLoop block head:%-3d %s"),
  18763. block->loop->GetHeadBlock()->GetBlockNum(),
  18764. this->prePassLoop ? this->prePassLoop->GetHeadBlock()->GetBlockNum() : 0,
  18765. this->IsLoopPrePass() ? _u("PrePass") : _u(""));
  18766. }
  18767. Output::Print(_u("\n"));
  18768. }
  18769. else
  18770. {
  18771. Output::Print(_u("-----------------------------------------------------------------------\n"));
  18772. Output::Print(_u("After OptBlock: Block #%-3d\n"), block->GetBlockNum());
  18773. }
  18774. if ((typeSpecTrace || floatTypeSpecTrace) && !block->globOptData.liveVarSyms->IsEmpty())
  18775. {
  18776. Output::Print(_u(" Live var syms: "));
  18777. block->globOptData.liveVarSyms->Dump();
  18778. }
  18779. if (typeSpecTrace && !block->globOptData.liveInt32Syms->IsEmpty())
  18780. {
  18781. Assert(this->tempBv->IsEmpty());
  18782. this->tempBv->Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  18783. if(!this->tempBv->IsEmpty())
  18784. {
  18785. Output::Print(_u(" Int32 type specialized (lossless) syms: "));
  18786. this->tempBv->Dump();
  18787. }
  18788. this->tempBv->ClearAll();
  18789. if(!block->globOptData.liveLossyInt32Syms->IsEmpty())
  18790. {
  18791. Output::Print(_u(" Int32 converted (lossy) syms: "));
  18792. block->globOptData.liveLossyInt32Syms->Dump();
  18793. }
  18794. }
  18795. if (floatTypeSpecTrace && !block->globOptData.liveFloat64Syms->IsEmpty())
  18796. {
  18797. Output::Print(_u(" Float64 type specialized syms: "));
  18798. block->globOptData.liveFloat64Syms->Dump();
  18799. }
  18800. if ((fieldCopyPropTrace || objTypeSpecTrace) && this->DoFieldCopyProp(block->loop) && !block->globOptData.liveFields->IsEmpty())
  18801. {
  18802. Output::Print(_u(" Live field syms: "));
  18803. block->globOptData.liveFields->Dump();
  18804. }
  18805. if ((fieldHoistTrace || objTypeSpecTrace) && this->DoFieldHoisting(block->loop) && HasHoistableFields(block))
  18806. {
  18807. Output::Print(_u(" Hoistable field sym: "));
  18808. block->globOptData.hoistableFields->Dump();
  18809. }
  18810. if (objTypeSpecTrace || valueTableTrace)
  18811. {
  18812. Output::Print(_u(" Value table:\n"));
  18813. DumpSymToValueMap(block->globOptData.symToValueMap);
  18814. }
  18815. if (before)
  18816. {
  18817. Output::Print(_u("-----------------------------------------------------------------------\n")); \
  18818. }
  18819. Output::Flush();
  18820. }
  18821. void
  18822. GlobOpt::TraceSettings()
  18823. {
  18824. Output::Print(_u("GlobOpt Settings:\r\n"));
  18825. Output::Print(_u(" FloatTypeSpec: %s\r\n"), this->DoFloatTypeSpec() ? _u("enabled") : _u("disabled"));
  18826. Output::Print(_u(" AggressiveIntTypeSpec: %s\r\n"), this->DoAggressiveIntTypeSpec() ? _u("enabled") : _u("disabled"));
  18827. Output::Print(_u(" LossyIntTypeSpec: %s\r\n"), this->DoLossyIntTypeSpec() ? _u("enabled") : _u("disabled"));
  18828. Output::Print(_u(" ArrayCheckHoist: %s\r\n"), (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(func->IsLoopBody())) ? _u("disabled") : _u("enabled"));
  18829. Output::Print(_u(" ImplicitCallFlags: %s\r\n"), Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->func->m_fg->implicitCallFlags));
  18830. for (Loop * loop = this->func->m_fg->loopList; loop != NULL; loop = loop->next)
  18831. {
  18832. Output::Print(_u(" loop: %d, ImplicitCallFlags: %s\r\n"), loop->GetLoopNumber(),
  18833. Js::DynamicProfileInfo::GetImplicitCallFlagsString(loop->GetImplicitCallFlags()));
  18834. }
  18835. Output::Flush();
  18836. }
  18837. #endif // DBG_DUMP
  18838. IR::Instr *
  18839. GlobOpt::TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrLast)
  18840. {
  18841. if (!this->func->GetHasMarkTempObjects())
  18842. {
  18843. return instrLast;
  18844. }
  18845. IR::Instr * instr = instrStart;
  18846. IR::Instr * instrEnd = instrLast->m_next;
  18847. IR::Instr * lastInstr = nullptr;
  18848. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  18849. do
  18850. {
  18851. bool mayNeedBailOnImplicitCallsPreOp = !this->IsLoopPrePass()
  18852. && instr->HasAnyImplicitCalls()
  18853. && globOptData.maybeTempObjectSyms != nullptr;
  18854. if (mayNeedBailOnImplicitCallsPreOp)
  18855. {
  18856. IR::Opnd * src1 = instr->GetSrc1();
  18857. if (src1)
  18858. {
  18859. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src1, false);
  18860. IR::Opnd * src2 = instr->GetSrc2();
  18861. if (src2)
  18862. {
  18863. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src2, false);
  18864. }
  18865. }
  18866. }
  18867. IR::Opnd *dst = instr->GetDst();
  18868. if (dst)
  18869. {
  18870. if (dst->IsRegOpnd())
  18871. {
  18872. TrackTempObjectSyms(instr, dst->AsRegOpnd());
  18873. }
  18874. else if (mayNeedBailOnImplicitCallsPreOp)
  18875. {
  18876. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, dst, true);
  18877. }
  18878. }
  18879. lastInstr = instr;
  18880. instr = instr->m_next;
  18881. }
  18882. while (instr != instrEnd);
  18883. return lastInstr;
  18884. }
  18885. void
  18886. GlobOpt::TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd)
  18887. {
  18888. // If it is marked as dstIsTempObject, we should have mark temped it, or type specialized it to Ld_I4.
  18889. Assert(!instr->dstIsTempObject || ObjectTempVerify::CanMarkTemp(instr, nullptr));
  18890. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  18891. bool canStoreTemp = false;
  18892. bool maybeTemp = false;
  18893. if (OpCodeAttr::TempObjectProducing(instr->m_opcode))
  18894. {
  18895. maybeTemp = instr->dstIsTempObject;
  18896. // We have to make sure that lower will always generate code to do stack allocation
  18897. // before we can store any other stack instance onto it. Otherwise, we would not
  18898. // walk object to box the stack property.
  18899. canStoreTemp = instr->dstIsTempObject && ObjectTemp::CanStoreTemp(instr);
  18900. }
  18901. else if (OpCodeAttr::TempObjectTransfer(instr->m_opcode))
  18902. {
  18903. // Need to check both sources, GetNewScObject has two srcs for transfer.
  18904. // No need to get var equiv sym here as transfer of type spec value does not transfer a mark temp object.
  18905. maybeTemp = globOptData.maybeTempObjectSyms && (
  18906. (instr->GetSrc1()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  18907. || (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id)));
  18908. canStoreTemp = globOptData.canStoreTempObjectSyms && (
  18909. (instr->GetSrc1()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  18910. && (!instr->GetSrc2() || (instr->GetSrc2()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))));
  18911. Assert(!canStoreTemp || instr->dstIsTempObject);
  18912. Assert(!maybeTemp || instr->dstIsTempObject);
  18913. }
  18914. // Need to get the var equiv sym as assignment of type specialized sym kill the var sym value anyway.
  18915. StackSym * sym = opnd->m_sym;
  18916. if (!sym->IsVar())
  18917. {
  18918. sym = sym->GetVarEquivSym(nullptr);
  18919. if (sym == nullptr)
  18920. {
  18921. return;
  18922. }
  18923. }
  18924. SymID symId = sym->m_id;
  18925. if (maybeTemp)
  18926. {
  18927. // Only var sym should be temp objects
  18928. Assert(opnd->m_sym == sym);
  18929. if (globOptData.maybeTempObjectSyms == nullptr)
  18930. {
  18931. globOptData.maybeTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  18932. }
  18933. globOptData.maybeTempObjectSyms->Set(symId);
  18934. if (canStoreTemp)
  18935. {
  18936. if (instr->m_opcode == Js::OpCode::NewScObjectLiteral && !this->IsLoopPrePass())
  18937. {
  18938. // For object literal, we install the final type up front.
  18939. // If there are bailout before we finish initializing all the fields, we need to
  18940. // zero out the rest if we stack allocate the literal, so that the boxing would not
  18941. // try to box trash pointer in the properties.
  18942. // Although object Literal initialization can be done lexically, BailOnNoProfile may cause some path
  18943. // to disappear. Do it is flow base make it easier to stop propagate those entries.
  18944. IR::IntConstOpnd * propertyArrayIdOpnd = instr->GetSrc1()->AsIntConstOpnd();
  18945. const Js::PropertyIdArray * propIds = instr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  18946. // Duplicates are removed by parser
  18947. Assert(!propIds->hadDuplicates);
  18948. if (globOptData.stackLiteralInitFldDataMap == nullptr)
  18949. {
  18950. globOptData.stackLiteralInitFldDataMap = JitAnew(alloc, StackLiteralInitFldDataMap, alloc);
  18951. }
  18952. else
  18953. {
  18954. Assert(!globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  18955. }
  18956. StackLiteralInitFldData data = { propIds, 0};
  18957. globOptData.stackLiteralInitFldDataMap->AddNew(sym, data);
  18958. }
  18959. if (globOptData.canStoreTempObjectSyms == nullptr)
  18960. {
  18961. globOptData.canStoreTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  18962. }
  18963. globOptData.canStoreTempObjectSyms->Set(symId);
  18964. }
  18965. else if (globOptData.canStoreTempObjectSyms)
  18966. {
  18967. globOptData.canStoreTempObjectSyms->Clear(symId);
  18968. }
  18969. }
  18970. else
  18971. {
  18972. Assert(!canStoreTemp);
  18973. if (globOptData.maybeTempObjectSyms)
  18974. {
  18975. if (globOptData.canStoreTempObjectSyms)
  18976. {
  18977. globOptData.canStoreTempObjectSyms->Clear(symId);
  18978. }
  18979. globOptData.maybeTempObjectSyms->Clear(symId);
  18980. }
  18981. else
  18982. {
  18983. Assert(!globOptData.canStoreTempObjectSyms);
  18984. }
  18985. // The symbol is being assigned to, the sym shouldn't still be in the stackLiteralInitFldDataMap
  18986. Assert(this->IsLoopPrePass() ||
  18987. globOptData.stackLiteralInitFldDataMap == nullptr
  18988. || globOptData.stackLiteralInitFldDataMap->Count() == 0
  18989. || !globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  18990. }
  18991. }
  18992. IR::Instr *
  18993. GlobOpt::GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst)
  18994. {
  18995. Assert(opnd);
  18996. Assert(isDst == (opnd == instr->GetDst()));
  18997. Assert(opnd != instr->GetDst() || !opnd->IsRegOpnd());
  18998. Assert(!this->IsLoopPrePass());
  18999. Assert(instr->HasAnyImplicitCalls());
  19000. // Only dst reg opnd opcode or ArgOut_A should have dstIsTempObject marked
  19001. Assert(!isDst || !instr->dstIsTempObject || instr->m_opcode == Js::OpCode::ArgOut_A);
  19002. // Post-op implicit call shouldn't have installed yet
  19003. Assert(!instr->HasBailOutInfo() || (instr->GetBailOutKind() & IR::BailOutKindBits) != IR::BailOutOnImplicitCalls);
  19004. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  19005. Assert(globOptData.maybeTempObjectSyms != nullptr);
  19006. IR::PropertySymOpnd * propertySymOpnd = nullptr;
  19007. StackSym * stackSym = ObjectTemp::GetStackSym(opnd, &propertySymOpnd);
  19008. // It is okay to not get the var equiv sym here, as use of a type specialized sym is not use of the temp object
  19009. // so no need to add mark temp bailout.
  19010. // TempObjectSysm doesn't contain any type spec sym, so we will get false here for all type spec sym.
  19011. if (stackSym && globOptData.maybeTempObjectSyms->Test(stackSym->m_id))
  19012. {
  19013. if (instr->HasBailOutInfo())
  19014. {
  19015. instr->SetBailOutKind(instr->GetBailOutKind() | IR::BailOutMarkTempObject);
  19016. }
  19017. else
  19018. {
  19019. // On insert the pre op bailout if it is not Direct field access do nothing, don't check the dst yet.
  19020. // SetTypeCheckBailout will clear this out if it is direct field access.
  19021. if (isDst
  19022. || (instr->m_opcode == Js::OpCode::FromVar && !opnd->GetValueType().IsPrimitive())
  19023. || propertySymOpnd == nullptr
  19024. || !propertySymOpnd->IsTypeCheckProtected())
  19025. {
  19026. this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
  19027. }
  19028. }
  19029. if (!opnd->IsRegOpnd() && (!isDst || (globOptData.canStoreTempObjectSyms && globOptData.canStoreTempObjectSyms->Test(stackSym->m_id))))
  19030. {
  19031. // If this opnd is a dst, that means that the object pointer is a stack object,
  19032. // and we can store temp object/number on it.
  19033. // If the opnd is a src, that means that the object pointer may be a stack object
  19034. // so the load may be a temp object/number and we need to track its use.
  19035. // Don't mark start of indir as can store temp, because we don't actually know
  19036. // what it is assigning to.
  19037. if (!isDst || !opnd->IsIndirOpnd())
  19038. {
  19039. opnd->SetCanStoreTemp();
  19040. }
  19041. if (propertySymOpnd)
  19042. {
  19043. // Track initfld of stack literals
  19044. if (isDst && instr->m_opcode == Js::OpCode::InitFld)
  19045. {
  19046. const Js::PropertyId propertyId = propertySymOpnd->m_sym->AsPropertySym()->m_propertyId;
  19047. // We don't need to track numeric properties init
  19048. if (!this->func->GetThreadContextInfo()->IsNumericProperty(propertyId))
  19049. {
  19050. DebugOnly(bool found = false);
  19051. globOptData.stackLiteralInitFldDataMap->RemoveIf(stackSym,
  19052. [&](StackSym * key, StackLiteralInitFldData & data)
  19053. {
  19054. DebugOnly(found = true);
  19055. Assert(key == stackSym);
  19056. Assert(data.currentInitFldCount < data.propIds->count);
  19057. if (data.propIds->elements[data.currentInitFldCount] != propertyId)
  19058. {
  19059. #if DBG
  19060. bool duplicate = false;
  19061. for (uint i = 0; i < data.currentInitFldCount; i++)
  19062. {
  19063. if (data.propIds->elements[i] == propertyId)
  19064. {
  19065. duplicate = true;
  19066. break;
  19067. }
  19068. }
  19069. Assert(duplicate);
  19070. #endif
  19071. // duplicate initialization
  19072. return false;
  19073. }
  19074. bool finished = (++data.currentInitFldCount == data.propIds->count);
  19075. #if DBG
  19076. if (finished)
  19077. {
  19078. // We can still track the finished stack literal InitFld lexically.
  19079. this->finishedStackLiteralInitFld->Set(stackSym->m_id);
  19080. }
  19081. #endif
  19082. return finished;
  19083. });
  19084. // We might still see InitFld even we have finished with all the property Id because
  19085. // of duplicate entries at the end
  19086. Assert(found || finishedStackLiteralInitFld->Test(stackSym->m_id));
  19087. }
  19088. }
  19089. }
  19090. }
  19091. }
  19092. return instr;
  19093. }
  19094. void
  19095. GlobOpt::KillStateForGeneratorYield()
  19096. {
  19097. GlobOptBlockData* globOptData = &this->currentBlock->globOptData;
  19098. /*
  19099. TODO[generators][ianhall]: Do a ToVar on any typespec'd syms before the bailout so that we can enable typespec in generators without bailin having to restore typespec'd values
  19100. FOREACH_BITSET_IN_SPARSEBV(symId, globOptData->liveInt32Syms)
  19101. {
  19102. this->ToVar(instr, , this->currentBlock, , );
  19103. }
  19104. NEXT_BITSET_IN_SPARSEBV;
  19105. FOREACH_BITSET_IN_SPARSEBV(symId, globOptData->liveInt32Syms)
  19106. {
  19107. this->ToVar(instr, , this->currentBlock, , );
  19108. }
  19109. NEXT_BITSET_IN_SPARSEBV;
  19110. */
  19111. FOREACH_GLOBHASHTABLE_ENTRY(bucket, globOptData->symToValueMap)
  19112. {
  19113. ValueType type = bucket.element->GetValueInfo()->Type().ToLikely();
  19114. bucket.element = this->NewGenericValue(type);
  19115. }
  19116. NEXT_GLOBHASHTABLE_ENTRY;
  19117. globOptData->exprToValueMap->ClearAll();
  19118. globOptData->liveFields->ClearAll();
  19119. globOptData->liveArrayValues->ClearAll();
  19120. if (globOptData->maybeWrittenTypeSyms)
  19121. {
  19122. globOptData->maybeWrittenTypeSyms->ClearAll();
  19123. }
  19124. globOptData->isTempSrc->ClearAll();
  19125. globOptData->liveInt32Syms->ClearAll();
  19126. globOptData->liveLossyInt32Syms->ClearAll();
  19127. globOptData->liveFloat64Syms->ClearAll();
  19128. // SIMD_JS
  19129. globOptData->liveSimd128F4Syms->ClearAll();
  19130. globOptData->liveSimd128I4Syms->ClearAll();
  19131. if (globOptData->hoistableFields)
  19132. {
  19133. globOptData->hoistableFields->ClearAll();
  19134. }
  19135. // Keep globOptData->liveVarSyms as is
  19136. // Keep globOptData->argObjSyms as is
  19137. // MarkTemp should be disabled for generator functions for now
  19138. Assert(globOptData->maybeTempObjectSyms == nullptr || globOptData->maybeTempObjectSyms->IsEmpty());
  19139. Assert(globOptData->canStoreTempObjectSyms == nullptr || globOptData->canStoreTempObjectSyms->IsEmpty());
  19140. globOptData->valuesToKillOnCalls->Clear();
  19141. if (globOptData->inductionVariables)
  19142. {
  19143. globOptData->inductionVariables->Clear();
  19144. }
  19145. if (globOptData->availableIntBoundChecks)
  19146. {
  19147. globOptData->availableIntBoundChecks->Clear();
  19148. }
  19149. // Keep bailout data as is
  19150. globOptData->hasCSECandidates = false;
  19151. }
  19152. LoopCount *
  19153. GlobOpt::GetOrGenerateLoopCountForMemOp(Loop *loop)
  19154. {
  19155. LoopCount *loopCount = loop->loopCount;
  19156. if (loopCount && !loopCount->HasGeneratedLoopCountSym())
  19157. {
  19158. Assert(loop->bailOutInfo);
  19159. EnsureBailTarget(loop);
  19160. GenerateLoopCountPlusOne(loop, loopCount);
  19161. }
  19162. return loopCount;
  19163. }
  19164. IR::Opnd *
  19165. GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr)
  19166. {
  19167. LoopCount *loopCount = loop->loopCount;
  19168. IR::Opnd *sizeOpnd = nullptr;
  19169. Assert(loopCount);
  19170. Assert(loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  19171. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap->TryGetValue(unroll, &sizeOpnd))
  19172. {
  19173. return sizeOpnd;
  19174. }
  19175. Func *localFunc = loop->GetFunc();
  19176. const auto InsertInstr = [&](IR::Instr *instr)
  19177. {
  19178. if (insertBeforeInstr == nullptr)
  19179. {
  19180. loop->landingPad->InsertAfter(instr);
  19181. }
  19182. else
  19183. {
  19184. insertBeforeInstr->InsertBefore(instr);
  19185. }
  19186. };
  19187. if (loopCount->LoopCountMinusOneSym())
  19188. {
  19189. IRType type = loopCount->LoopCountSym()->GetType();
  19190. // Loop count is off by one, so add one
  19191. IR::RegOpnd *loopCountOpnd = IR::RegOpnd::New(loopCount->LoopCountSym(), type, localFunc);
  19192. sizeOpnd = loopCountOpnd;
  19193. if (unroll != 1)
  19194. {
  19195. sizeOpnd = IR::RegOpnd::New(TyUint32, this->func);
  19196. IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
  19197. InsertInstr(IR::Instr::New(Js::OpCode::Mul_I4,
  19198. sizeOpnd,
  19199. loopCountOpnd,
  19200. unrollOpnd,
  19201. localFunc));
  19202. }
  19203. }
  19204. else
  19205. {
  19206. uint size = (loopCount->LoopCountMinusOneConstantValue() + 1) * unroll;
  19207. sizeOpnd = IR::IntConstOpnd::New(size, IRType::TyUint32, localFunc);
  19208. }
  19209. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Add(unroll, sizeOpnd);
  19210. return sizeOpnd;
  19211. }
  19212. IR::RegOpnd*
  19213. GlobOpt::GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr)
  19214. {
  19215. IR::RegOpnd *startIndexOpnd = nullptr;
  19216. Func *localFunc = loop->GetFunc();
  19217. IRType type = indexOpnd->GetType();
  19218. const int cacheIndex = ((int)isInductionVariableChangeIncremental << 1) | (int)bIndexAlreadyChanged;
  19219. if (loop->memOpInfo->startIndexOpndCache[cacheIndex])
  19220. {
  19221. return loop->memOpInfo->startIndexOpndCache[cacheIndex];
  19222. }
  19223. const auto InsertInstr = [&](IR::Instr *instr)
  19224. {
  19225. if (insertBeforeInstr == nullptr)
  19226. {
  19227. loop->landingPad->InsertAfter(instr);
  19228. }
  19229. else
  19230. {
  19231. insertBeforeInstr->InsertBefore(instr);
  19232. }
  19233. };
  19234. startIndexOpnd = IR::RegOpnd::New(type, localFunc);
  19235. // If the 2 are different we can simply use indexOpnd
  19236. if (isInductionVariableChangeIncremental != bIndexAlreadyChanged)
  19237. {
  19238. InsertInstr(IR::Instr::New(Js::OpCode::Ld_A,
  19239. startIndexOpnd,
  19240. indexOpnd,
  19241. localFunc));
  19242. }
  19243. else
  19244. {
  19245. // Otherwise add 1 to it
  19246. InsertInstr(IR::Instr::New(Js::OpCode::Add_I4,
  19247. startIndexOpnd,
  19248. indexOpnd,
  19249. IR::IntConstOpnd::New(1, type, localFunc, true),
  19250. localFunc));
  19251. }
  19252. if (!isInductionVariableChangeIncremental)
  19253. {
  19254. InsertInstr(IR::Instr::New(Js::OpCode::Sub_I4,
  19255. startIndexOpnd,
  19256. startIndexOpnd,
  19257. sizeOpnd,
  19258. localFunc));
  19259. }
  19260. loop->memOpInfo->startIndexOpndCache[cacheIndex] = startIndexOpnd;
  19261. return startIndexOpnd;
  19262. }
  19263. IR::Instr*
  19264. GlobOpt::FindUpperBoundsCheckInstr(IR::Instr* fromInstr)
  19265. {
  19266. IR::Instr *upperBoundCheck = fromInstr;
  19267. do
  19268. {
  19269. upperBoundCheck = upperBoundCheck->m_prev;
  19270. Assert(upperBoundCheck);
  19271. Assert(!upperBoundCheck->IsLabelInstr());
  19272. } while (upperBoundCheck->m_opcode != Js::OpCode::BoundCheck);
  19273. return upperBoundCheck;
  19274. }
  19275. IR::Instr*
  19276. GlobOpt::FindArraySegmentLoadInstr(IR::Instr* fromInstr)
  19277. {
  19278. IR::Instr *headSegmentLengthLoad = fromInstr;
  19279. do
  19280. {
  19281. headSegmentLengthLoad = headSegmentLengthLoad->m_prev;
  19282. Assert(headSegmentLengthLoad);
  19283. Assert(!headSegmentLengthLoad->IsLabelInstr());
  19284. } while (headSegmentLengthLoad->m_opcode != Js::OpCode::LdIndir);
  19285. return headSegmentLengthLoad;
  19286. }
  19287. void
  19288. GlobOpt::RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block)
  19289. {
  19290. Assert(srcInstr && (srcInstr->m_opcode == Js::OpCode::LdElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict));
  19291. Assert(memopInstr && (memopInstr->m_opcode == Js::OpCode::Memcopy || memopInstr->m_opcode == Js::OpCode::Memset));
  19292. Assert(block);
  19293. const bool isDst = srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict;
  19294. IR::RegOpnd* opnd = (isDst ? memopInstr->GetDst() : memopInstr->GetSrc1())->AsIndirOpnd()->GetBaseOpnd();
  19295. IR::ArrayRegOpnd* arrayOpnd = opnd->IsArrayRegOpnd() ? opnd->AsArrayRegOpnd() : nullptr;
  19296. IR::Instr* topInstr = srcInstr;
  19297. if (srcInstr->extractedUpperBoundCheckWithoutHoisting)
  19298. {
  19299. IR::Instr *upperBoundCheck = FindUpperBoundsCheckInstr(srcInstr);
  19300. Assert(upperBoundCheck && upperBoundCheck != srcInstr);
  19301. topInstr = upperBoundCheck;
  19302. }
  19303. if (srcInstr->loadedArrayHeadSegmentLength && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  19304. {
  19305. IR::Instr *arrayLoadSegmentHeadLength = FindArraySegmentLoadInstr(topInstr);
  19306. Assert(arrayLoadSegmentHeadLength);
  19307. topInstr = arrayLoadSegmentHeadLength;
  19308. arrayOpnd->RemoveHeadSegmentLengthSym();
  19309. }
  19310. if (srcInstr->loadedArrayHeadSegment && arrayOpnd && arrayOpnd->HeadSegmentSym())
  19311. {
  19312. IR::Instr *arrayLoadSegmentHead = FindArraySegmentLoadInstr(topInstr);
  19313. Assert(arrayLoadSegmentHead);
  19314. topInstr = arrayLoadSegmentHead;
  19315. arrayOpnd->RemoveHeadSegmentSym();
  19316. }
  19317. // If no bounds check are present, simply look up for instruction added for instrumentation
  19318. if(topInstr == srcInstr)
  19319. {
  19320. bool checkPrev = true;
  19321. while (checkPrev)
  19322. {
  19323. switch (topInstr->m_prev->m_opcode)
  19324. {
  19325. case Js::OpCode::BailOnNotArray:
  19326. case Js::OpCode::NoImplicitCallUses:
  19327. case Js::OpCode::ByteCodeUses:
  19328. topInstr = topInstr->m_prev;
  19329. checkPrev = !!topInstr->m_prev;
  19330. break;
  19331. default:
  19332. checkPrev = false;
  19333. break;
  19334. }
  19335. }
  19336. }
  19337. while (topInstr != srcInstr)
  19338. {
  19339. IR::Instr* removeInstr = topInstr;
  19340. topInstr = topInstr->m_next;
  19341. Assert(
  19342. removeInstr->m_opcode == Js::OpCode::BailOnNotArray ||
  19343. removeInstr->m_opcode == Js::OpCode::NoImplicitCallUses ||
  19344. removeInstr->m_opcode == Js::OpCode::ByteCodeUses ||
  19345. removeInstr->m_opcode == Js::OpCode::LdIndir ||
  19346. removeInstr->m_opcode == Js::OpCode::BoundCheck
  19347. );
  19348. if (removeInstr->m_opcode != Js::OpCode::ByteCodeUses)
  19349. {
  19350. block->RemoveInstr(removeInstr);
  19351. }
  19352. }
  19353. this->ConvertToByteCodeUses(srcInstr);
  19354. }
  19355. void
  19356. GlobOpt::GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType)
  19357. {
  19358. Assert(instr && (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict));
  19359. IR::Opnd* arrayOpnd = instr->m_opcode == Js::OpCode::LdElemI_A ? instr->GetSrc1() : instr->GetDst();
  19360. Assert(arrayOpnd->IsIndirOpnd());
  19361. IR::IndirOpnd* indirArrayOpnd = arrayOpnd->AsIndirOpnd();
  19362. IR::RegOpnd* baseOpnd = (IR::RegOpnd*)indirArrayOpnd->GetBaseOpnd();
  19363. IR::RegOpnd* indexOpnd = (IR::RegOpnd*)indirArrayOpnd->GetIndexOpnd();
  19364. Assert(baseOpnd);
  19365. Assert(indexOpnd);
  19366. // Process Out Params
  19367. base = baseOpnd;
  19368. index = indexOpnd;
  19369. arrayType = indirArrayOpnd->GetType();
  19370. }
  19371. void
  19372. GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitData)
  19373. {
  19374. Assert(emitData);
  19375. Assert(emitData->candidate);
  19376. Assert(emitData->stElemInstr);
  19377. Assert(emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A || emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A_Strict);
  19378. IR::BailOutKind bailOutKind = emitData->bailOutKind;
  19379. const byte unroll = emitData->inductionVar.unroll;
  19380. Assert(unroll == 1);
  19381. const bool isInductionVariableChangeIncremental = emitData->inductionVar.isIncremental;
  19382. const bool bIndexAlreadyChanged = emitData->candidate->bIndexAlreadyChanged;
  19383. IR::RegOpnd *baseOpnd = nullptr;
  19384. IR::RegOpnd *indexOpnd = nullptr;
  19385. IRType dstType;
  19386. GetMemOpSrcInfo(loop, emitData->stElemInstr, baseOpnd, indexOpnd, dstType);
  19387. Func *localFunc = loop->GetFunc();
  19388. // Handle bailout info
  19389. EnsureBailTarget(loop);
  19390. Assert(bailOutKind != IR::BailOutInvalid);
  19391. // Keep only Array bits bailOuts. Consider handling these bailouts instead of simply ignoring them
  19392. bailOutKind &= IR::BailOutForArrayBits;
  19393. // Add our custom bailout to handle Op_MemCopy return value.
  19394. bailOutKind |= IR::BailOutOnMemOpError;
  19395. BailOutInfo *const bailOutInfo = loop->bailOutInfo;
  19396. Assert(bailOutInfo);
  19397. IR::Instr *insertBeforeInstr = bailOutInfo->bailOutInstr;
  19398. Assert(insertBeforeInstr);
  19399. IR::Opnd *sizeOpnd = GenerateInductionVariableChangeForMemOp(loop, unroll, insertBeforeInstr);
  19400. IR::RegOpnd *startIndexOpnd = GenerateStartIndexOpndForMemop(loop, indexOpnd, sizeOpnd, isInductionVariableChangeIncremental, bIndexAlreadyChanged, insertBeforeInstr);
  19401. IR::IndirOpnd* dstOpnd = IR::IndirOpnd::New(baseOpnd, startIndexOpnd, dstType, localFunc);
  19402. IR::Opnd *src1;
  19403. const bool isMemset = emitData->candidate->IsMemSet();
  19404. // Get the source according to the memop type
  19405. if (isMemset)
  19406. {
  19407. MemSetEmitData* data = (MemSetEmitData*)emitData;
  19408. const Loop::MemSetCandidate* candidate = data->candidate->AsMemSet();
  19409. if (candidate->srcSym)
  19410. {
  19411. IR::RegOpnd* regSrc = IR::RegOpnd::New(candidate->srcSym, candidate->srcSym->GetType(), func);
  19412. regSrc->SetIsJITOptimizedReg(true);
  19413. src1 = regSrc;
  19414. }
  19415. else
  19416. {
  19417. src1 = IR::AddrOpnd::New(candidate->constant.ToVar(localFunc), IR::AddrOpndKindConstantAddress, localFunc);
  19418. }
  19419. }
  19420. else
  19421. {
  19422. Assert(emitData->candidate->IsMemCopy());
  19423. MemCopyEmitData* data = (MemCopyEmitData*)emitData;
  19424. Assert(data->ldElemInstr);
  19425. Assert(data->ldElemInstr->m_opcode == Js::OpCode::LdElemI_A);
  19426. IR::RegOpnd *srcBaseOpnd = nullptr;
  19427. IR::RegOpnd *srcIndexOpnd = nullptr;
  19428. IRType srcType;
  19429. GetMemOpSrcInfo(loop, data->ldElemInstr, srcBaseOpnd, srcIndexOpnd, srcType);
  19430. Assert(GetVarSymID(srcIndexOpnd->GetStackSym()) == GetVarSymID(indexOpnd->GetStackSym()));
  19431. src1 = IR::IndirOpnd::New(srcBaseOpnd, startIndexOpnd, srcType, localFunc);
  19432. }
  19433. // Generate memcopy
  19434. IR::Instr* memopInstr = IR::BailOutInstr::New(isMemset ? Js::OpCode::Memset : Js::OpCode::Memcopy, bailOutKind, bailOutInfo, localFunc);
  19435. memopInstr->SetDst(dstOpnd);
  19436. memopInstr->SetSrc1(src1);
  19437. memopInstr->SetSrc2(sizeOpnd);
  19438. insertBeforeInstr->InsertBefore(memopInstr);
  19439. #if DBG_DUMP
  19440. if (DO_MEMOP_TRACE())
  19441. {
  19442. char valueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19443. baseOpnd->GetValueType().ToString(valueTypeStr);
  19444. const int loopCountBufSize = 16;
  19445. char16 loopCountBuf[loopCountBufSize];
  19446. if (loopCount->LoopCountMinusOneSym())
  19447. {
  19448. swprintf_s(loopCountBuf, _u("s%u"), loopCount->LoopCountMinusOneSym()->m_id);
  19449. }
  19450. else
  19451. {
  19452. swprintf_s(loopCountBuf, _u("%u"), loopCount->LoopCountMinusOneConstantValue() + 1);
  19453. }
  19454. if (isMemset)
  19455. {
  19456. const Loop::MemSetCandidate* candidate = emitData->candidate->AsMemSet();
  19457. const int constBufSize = 32;
  19458. char16 constBuf[constBufSize];
  19459. if (candidate->srcSym)
  19460. {
  19461. swprintf_s(constBuf, _u("s%u"), candidate->srcSym->m_id);
  19462. }
  19463. else
  19464. {
  19465. switch (candidate->constant.type)
  19466. {
  19467. case TyInt8:
  19468. case TyInt16:
  19469. case TyInt32:
  19470. case TyInt64:
  19471. swprintf_s(constBuf, sizeof(IntConstType) == 8 ? _u("%lld") : _u("%d"), candidate->constant.u.intConst.value);
  19472. break;
  19473. case TyFloat32:
  19474. case TyFloat64:
  19475. swprintf_s(constBuf, _u("%.4f"), candidate->constant.u.floatConst.value);
  19476. break;
  19477. case TyVar:
  19478. swprintf_s(constBuf, sizeof(Js::Var) == 8 ? _u("0x%.16llX") : _u("0x%.8X"), candidate->constant.u.varConst.value);
  19479. break;
  19480. default:
  19481. AssertMsg(false, "Unsupported constant type");
  19482. swprintf_s(constBuf, _u("Unknown"));
  19483. break;
  19484. }
  19485. }
  19486. TRACE_MEMOP_PHASE(MemSet, loop, emitData->stElemInstr,
  19487. _u("ValueType: %S, Base: s%u, Index: s%u, Constant: %s, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  19488. valueTypeStr,
  19489. candidate->base,
  19490. candidate->index,
  19491. constBuf,
  19492. loopCountBuf,
  19493. bIndexAlreadyChanged);
  19494. }
  19495. else
  19496. {
  19497. const Loop::MemCopyCandidate* candidate = emitData->candidate->AsMemCopy();
  19498. TRACE_MEMOP_PHASE(MemCopy, loop, emitData->stElemInstr,
  19499. _u("ValueType: %S, StBase: s%u, Index: s%u, LdBase: s%u, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  19500. valueTypeStr,
  19501. candidate->base,
  19502. candidate->index,
  19503. candidate->ldBase,
  19504. loopCountBuf,
  19505. bIndexAlreadyChanged);
  19506. }
  19507. }
  19508. #endif
  19509. RemoveMemOpSrcInstr(memopInstr, emitData->stElemInstr, emitData->block);
  19510. if (!isMemset)
  19511. {
  19512. RemoveMemOpSrcInstr(memopInstr, ((MemCopyEmitData*)emitData)->ldElemInstr, emitData->block);
  19513. }
  19514. }
  19515. bool
  19516. GlobOpt::InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, MemSetEmitData* emitData, bool& errorInInstr)
  19517. {
  19518. Assert(emitData && emitData->candidate && emitData->candidate->IsMemSet());
  19519. Loop::MemSetCandidate* candidate = (Loop::MemSetCandidate*)emitData->candidate;
  19520. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  19521. {
  19522. if (instr->GetDst()->IsIndirOpnd()
  19523. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base)
  19524. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19525. )
  19526. {
  19527. Assert(instr->IsProfiledInstr());
  19528. emitData->stElemInstr = instr;
  19529. emitData->bailOutKind = instr->GetBailOutKind();
  19530. return true;
  19531. }
  19532. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan StElemI_A detected"));
  19533. errorInInstr = true;
  19534. }
  19535. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  19536. {
  19537. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan LdElemI_A detected"));
  19538. errorInInstr = true;
  19539. }
  19540. return false;
  19541. }
  19542. bool
  19543. GlobOpt::InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, MemCopyEmitData* emitData, bool& errorInInstr)
  19544. {
  19545. Assert(emitData && emitData->candidate && emitData->candidate->IsMemCopy());
  19546. Loop::MemCopyCandidate* candidate = (Loop::MemCopyCandidate*)emitData->candidate;
  19547. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  19548. {
  19549. if (
  19550. instr->GetDst()->IsIndirOpnd() &&
  19551. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base) &&
  19552. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19553. )
  19554. {
  19555. Assert(instr->IsProfiledInstr());
  19556. emitData->stElemInstr = instr;
  19557. emitData->bailOutKind = instr->GetBailOutKind();
  19558. // Still need to find the LdElem
  19559. return false;
  19560. }
  19561. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan StElemI_A detected"));
  19562. errorInInstr = true;
  19563. }
  19564. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  19565. {
  19566. if (
  19567. emitData->stElemInstr &&
  19568. instr->GetSrc1()->IsIndirOpnd() &&
  19569. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->ldBase) &&
  19570. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19571. )
  19572. {
  19573. Assert(instr->IsProfiledInstr());
  19574. emitData->ldElemInstr = instr;
  19575. ValueType stValueType = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  19576. ValueType ldValueType = emitData->ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  19577. if (stValueType != ldValueType)
  19578. {
  19579. #if DBG_DUMP
  19580. char16 stValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19581. stValueType.ToString(stValueTypeStr);
  19582. char16 ldValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19583. ldValueType.ToString(ldValueTypeStr);
  19584. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("for mismatch in Load(%s) and Store(%s) value type"), ldValueTypeStr, stValueTypeStr);
  19585. #endif
  19586. errorInInstr = true;
  19587. return false;
  19588. }
  19589. // We found both instruction for this candidate
  19590. return true;
  19591. }
  19592. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan LdElemI_A detected"));
  19593. errorInInstr = true;
  19594. }
  19595. return false;
  19596. }
  19597. // The caller is responsible to free the memory allocated between inOrderEmitData[iEmitData -> end]
  19598. bool
  19599. GlobOpt::ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) MemOpEmitData** inOrderEmitData, int& iEmitData)
  19600. {
  19601. AnalysisAssert(iEmitData == (int)loop->memOpInfo->candidates->Count());
  19602. // We iterate over the second block of the loop only. MemOp Works only if the loop has exactly 2 blocks
  19603. Assert(loop->blockList.HasTwo());
  19604. Loop::MemOpList::Iterator iter(loop->memOpInfo->candidates);
  19605. BasicBlock* bblock = loop->blockList.Head()->next;
  19606. Loop::MemOpCandidate* candidate = nullptr;
  19607. MemOpEmitData* emitData = nullptr;
  19608. // Iterate backward because the list of candidate is reversed
  19609. FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, bblock)
  19610. {
  19611. if (!candidate)
  19612. {
  19613. // Time to check next candidate
  19614. if (!iter.Next())
  19615. {
  19616. // We have been through the whole list of candidates, finish
  19617. break;
  19618. }
  19619. candidate = iter.Data();
  19620. if (!candidate)
  19621. {
  19622. continue;
  19623. }
  19624. // Common check for memset and memcopy
  19625. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  19626. // Get the inductionVariable changeInfo
  19627. if (!loop->memOpInfo->inductionVariableChangeInfoMap->TryGetValue(candidate->index, &inductionVariableChangeInfo))
  19628. {
  19629. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): no induction variable"), candidate->base);
  19630. return false;
  19631. }
  19632. if (inductionVariableChangeInfo.unroll != candidate->count)
  19633. {
  19634. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): not matching unroll count"), candidate->base);
  19635. return false;
  19636. }
  19637. if (candidate->IsMemSet())
  19638. {
  19639. Assert(!PHASE_OFF(Js::MemSetPhase, this->func));
  19640. emitData = JitAnew(this->alloc, MemSetEmitData);
  19641. }
  19642. else
  19643. {
  19644. Assert(!PHASE_OFF(Js::MemCopyPhase, this->func));
  19645. // Specific check for memcopy
  19646. Assert(candidate->IsMemCopy());
  19647. Loop::MemCopyCandidate* memcopyCandidate = candidate->AsMemCopy();
  19648. if (memcopyCandidate->base == Js::Constants::InvalidSymID
  19649. || memcopyCandidate->ldBase == Js::Constants::InvalidSymID
  19650. || (memcopyCandidate->ldCount != memcopyCandidate->count))
  19651. {
  19652. TRACE_MEMOP_PHASE(MemCopy, loop, nullptr, _u("(s%d): not matching ldElem and stElem"), candidate->base);
  19653. return false;
  19654. }
  19655. emitData = JitAnew(this->alloc, MemCopyEmitData);
  19656. }
  19657. Assert(emitData);
  19658. emitData->block = bblock;
  19659. emitData->inductionVar = inductionVariableChangeInfo;
  19660. emitData->candidate = candidate;
  19661. }
  19662. bool errorInInstr = false;
  19663. bool candidateFound = candidate->IsMemSet() ?
  19664. InspectInstrForMemSetCandidate(loop, instr, (MemSetEmitData*)emitData, errorInInstr)
  19665. : InspectInstrForMemCopyCandidate(loop, instr, (MemCopyEmitData*)emitData, errorInInstr);
  19666. if (errorInInstr)
  19667. {
  19668. JitAdelete(this->alloc, emitData);
  19669. return false;
  19670. }
  19671. if (candidateFound)
  19672. {
  19673. AnalysisAssert(iEmitData > 0);
  19674. if (iEmitData == 0)
  19675. {
  19676. // Explicit for OACR
  19677. break;
  19678. }
  19679. inOrderEmitData[--iEmitData] = emitData;
  19680. candidate = nullptr;
  19681. emitData = nullptr;
  19682. }
  19683. } NEXT_INSTR_BACKWARD_IN_BLOCK;
  19684. if (iter.IsValid())
  19685. {
  19686. TRACE_MEMOP(loop, nullptr, _u("Candidates not found in loop while validating"));
  19687. return false;
  19688. }
  19689. return true;
  19690. }
  19691. void
  19692. GlobOpt::ProcessMemOp()
  19693. {
  19694. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  19695. {
  19696. if (HasMemOp(loop))
  19697. {
  19698. const int candidateCount = loop->memOpInfo->candidates->Count();
  19699. Assert(candidateCount > 0);
  19700. LoopCount * loopCount = GetOrGenerateLoopCountForMemOp(loop);
  19701. // If loopCount is not available we can not continue with memop
  19702. if (!loopCount || !(loopCount->LoopCountMinusOneSym() || loopCount->LoopCountMinusOneConstantValue()))
  19703. {
  19704. TRACE_MEMOP(loop, nullptr, _u("MemOp skipped for no loop count"));
  19705. loop->doMemOp = false;
  19706. loop->memOpInfo->candidates->Clear();
  19707. continue;
  19708. }
  19709. // The list is reversed, check them and place them in order in the following array
  19710. MemOpEmitData** inOrderCandidates = JitAnewArray(this->alloc, MemOpEmitData*, candidateCount);
  19711. int i = candidateCount;
  19712. if (ValidateMemOpCandidates(loop, inOrderCandidates, i))
  19713. {
  19714. Assert(i == 0);
  19715. // Process the valid MemOp candidate in order.
  19716. for (; i < candidateCount; ++i)
  19717. {
  19718. // Emit
  19719. EmitMemop(loop, loopCount, inOrderCandidates[i]);
  19720. JitAdelete(this->alloc, inOrderCandidates[i]);
  19721. }
  19722. }
  19723. else
  19724. {
  19725. Assert(i != 0);
  19726. for (; i < candidateCount; ++i)
  19727. {
  19728. JitAdelete(this->alloc, inOrderCandidates[i]);
  19729. }
  19730. // One of the memop candidates did not validate. Do not emit for this loop.
  19731. loop->doMemOp = false;
  19732. loop->memOpInfo->candidates->Clear();
  19733. }
  19734. // Free memory
  19735. JitAdeleteArray(this->alloc, candidateCount, inOrderCandidates);
  19736. }
  19737. } NEXT_LOOP_EDITING;
  19738. }
  19739. template<>
  19740. ValueNumber JsUtil::ValueToKey<ValueNumber, Value *>::ToKey(Value *const &value)
  19741. {
  19742. Assert(value);
  19743. return value->GetValueNumber();
  19744. }