GlobOpt.cpp 827 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7. #define TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  8. if(PHASE_TESTTRACE(phase, this->func)) \
  9. { \
  10. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  11. Output::Print( \
  12. _u("Testtrace: %s function %s (%s): "), \
  13. Js::PhaseNames[phase], \
  14. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  15. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  16. Output::Print(__VA_ARGS__); \
  17. Output::Flush(); \
  18. }
  19. #else // ENABLE_DEBUG_CONFIG_OPTIONS
  20. #define TESTTRACE_PHASE_INSTR(phase, instr, ...)
  21. #endif // ENABLE_DEBUG_CONFIG_OPTIONS
  22. #if ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  23. #define GOPT_TRACE_OPND(opnd, ...) \
  24. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  25. { \
  26. Output::Print(_u("TRACE: ")); \
  27. opnd->Dump(); \
  28. Output::Print(_u(" : ")); \
  29. Output::Print(__VA_ARGS__); \
  30. Output::Flush(); \
  31. }
  32. #define GOPT_TRACE(...) \
  33. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  34. { \
  35. Output::Print(_u("TRACE: ")); \
  36. Output::Print(__VA_ARGS__); \
  37. Output::Flush(); \
  38. }
  39. #define GOPT_TRACE_INSTRTRACE(instr) \
  40. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  41. { \
  42. instr->Dump(); \
  43. Output::Flush(); \
  44. }
  45. #define GOPT_TRACE_INSTR(instr, ...) \
  46. if (PHASE_TRACE(Js::GlobOptPhase, this->func) && !this->IsLoopPrePass()) \
  47. { \
  48. Output::Print(_u("TRACE: ")); \
  49. Output::Print(__VA_ARGS__); \
  50. instr->Dump(); \
  51. Output::Flush(); \
  52. }
  53. #define GOPT_TRACE_BLOCK(block, before) \
  54. this->Trace(block, before); \
  55. Output::Flush();
  56. // TODO: OOP JIT, add back line number
  57. #define TRACE_PHASE_INSTR(phase, instr, ...) \
  58. if(PHASE_TRACE(phase, this->func)) \
  59. { \
  60. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  61. Output::Print( \
  62. _u("Function %s (%s)"), \
  63. this->func->GetJITFunctionBody()->GetDisplayName(), \
  64. this->func->GetDebugNumberSet(debugStringBuffer)); \
  65. if(this->func->IsLoopBody()) \
  66. { \
  67. Output::Print(_u(", loop %u"), this->func->GetWorkItem()->GetLoopNumber()); \
  68. } \
  69. if(instr->m_func != this->func) \
  70. { \
  71. Output::Print( \
  72. _u(", Inlinee %s (%s)"), \
  73. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  74. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  75. } \
  76. Output::Print(_u(" - %s\n "), Js::PhaseNames[phase]); \
  77. instr->Dump(); \
  78. Output::Print(_u(" ")); \
  79. Output::Print(__VA_ARGS__); \
  80. Output::Flush(); \
  81. }
  82. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...) \
  83. if(CONFIG_FLAG(Verbose)) \
  84. { \
  85. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  86. }
  87. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  88. TRACE_PHASE_INSTR(phase, instr, __VA_ARGS__); \
  89. TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  90. #else // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  91. #define GOPT_TRACE(...)
  92. #define GOPT_TRACE_OPND(opnd, ...)
  93. #define GOPT_TRACE_INSTRTRACE(instr)
  94. #define GOPT_TRACE_INSTR(instr, ...)
  95. #define GOPT_TRACE_BLOCK(block, before)
  96. #define TRACE_PHASE_INSTR(phase, instr, ...)
  97. #define TRACE_PHASE_INSTR_VERBOSE(phase, instr, ...)
  98. #define TRACE_TESTTRACE_PHASE_INSTR(phase, instr, ...) TESTTRACE_PHASE_INSTR(phase, instr, __VA_ARGS__);
  99. #endif // ENABLE_DEBUG_CONFIG_OPTIONS && DBG_DUMP
  100. #if DBG_DUMP
  101. #define DO_MEMOP_TRACE() (PHASE_TRACE(Js::MemOpPhase, this->func) ||\
  102. PHASE_TRACE(Js::MemSetPhase, this->func) ||\
  103. PHASE_TRACE(Js::MemCopyPhase, this->func))
  104. #define DO_MEMOP_TRACE_PHASE(phase) (PHASE_TRACE(Js::MemOpPhase, this->func) || PHASE_TRACE(Js::phase ## Phase, this->func))
  105. #define OUTPUT_MEMOP_TRACE(loop, instr, ...) {\
  106. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];\
  107. Output::Print(15, _u("Function: %s%s, Loop: %u: "), this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer), loop->GetLoopNumber());\
  108. Output::Print(__VA_ARGS__);\
  109. IR::Instr* __instr__ = instr;\
  110. if(__instr__) __instr__->DumpByteCodeOffset();\
  111. if(__instr__) Output::Print(_u(" (%s)"), Js::OpCodeUtil::GetOpCodeName(__instr__->m_opcode));\
  112. Output::Print(_u("\n"));\
  113. Output::Flush(); \
  114. }
  115. #define TRACE_MEMOP(loop, instr, ...) \
  116. if (DO_MEMOP_TRACE()) {\
  117. Output::Print(_u("TRACE MemOp:"));\
  118. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  119. }
  120. #define TRACE_MEMOP_VERBOSE(loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP(loop, instr, __VA_ARGS__)}
  121. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...) \
  122. if (DO_MEMOP_TRACE_PHASE(phase))\
  123. {\
  124. Output::Print(_u("TRACE ") _u(#phase) _u(":"));\
  125. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  126. }
  127. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP_PHASE(phase, loop, instr, __VA_ARGS__)}
  128. #else
  129. #define DO_MEMOP_TRACE()
  130. #define DO_MEMOP_TRACE_PHASE(phase)
  131. #define OUTPUT_MEMOP_TRACE(loop, instr, ...)
  132. #define TRACE_MEMOP(loop, instr, ...)
  133. #define TRACE_MEMOP_VERBOSE(loop, instr, ...)
  134. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...)
  135. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...)
  136. #endif
  137. class AutoRestoreVal
  138. {
  139. private:
  140. Value *const originalValue;
  141. Value *const tempValue;
  142. Value * *const valueRef;
  143. public:
  144. AutoRestoreVal(Value *const originalValue, Value * *const tempValueRef)
  145. : originalValue(originalValue), tempValue(*tempValueRef), valueRef(tempValueRef)
  146. {
  147. }
  148. ~AutoRestoreVal()
  149. {
  150. if(*valueRef == tempValue)
  151. {
  152. *valueRef = originalValue;
  153. }
  154. }
  155. PREVENT_COPY(AutoRestoreVal);
  156. };
  157. GlobOpt::GlobOpt(Func * func)
  158. : func(func),
  159. intConstantToStackSymMap(nullptr),
  160. intConstantToValueMap(nullptr),
  161. currentValue(FirstNewValueNumber),
  162. prePassLoop(nullptr),
  163. alloc(nullptr),
  164. isCallHelper(false),
  165. inInlinedBuiltIn(false),
  166. rootLoopPrePass(nullptr),
  167. noImplicitCallUsesToInsert(nullptr),
  168. valuesCreatedForClone(nullptr),
  169. valuesCreatedForMerge(nullptr),
  170. blockData(func),
  171. instrCountSinceLastCleanUp(0),
  172. isRecursiveCallOnLandingPad(false),
  173. updateInductionVariableValueNumber(false),
  174. isPerformingLoopBackEdgeCompensation(false),
  175. currentRegion(nullptr),
  176. changedSymsAfterIncBailoutCandidate(nullptr),
  177. doTypeSpec(
  178. !IsTypeSpecPhaseOff(func)),
  179. doAggressiveIntTypeSpec(
  180. doTypeSpec &&
  181. DoAggressiveIntTypeSpec(func)),
  182. doAggressiveMulIntTypeSpec(
  183. doTypeSpec &&
  184. !PHASE_OFF(Js::AggressiveMulIntTypeSpecPhase, func) &&
  185. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveMulIntTypeSpecDisabled(func->IsLoopBody()))),
  186. doDivIntTypeSpec(
  187. doAggressiveIntTypeSpec &&
  188. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsDivIntTypeSpecDisabled(func->IsLoopBody()))),
  189. doLossyIntTypeSpec(
  190. doTypeSpec &&
  191. DoLossyIntTypeSpec(func)),
  192. doFloatTypeSpec(
  193. doTypeSpec &&
  194. DoFloatTypeSpec(func)),
  195. doArrayCheckHoist(
  196. DoArrayCheckHoist(func)),
  197. doArrayMissingValueCheckHoist(
  198. doArrayCheckHoist &&
  199. DoArrayMissingValueCheckHoist(func)),
  200. doArraySegmentHoist(
  201. doArrayCheckHoist &&
  202. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array), func)),
  203. doJsArraySegmentHoist(
  204. doArraySegmentHoist &&
  205. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array), func)),
  206. doArrayLengthHoist(
  207. doArrayCheckHoist &&
  208. DoArrayLengthHoist(func)),
  209. doEliminateArrayAccessHelperCall(
  210. doArrayCheckHoist &&
  211. !PHASE_OFF(Js::EliminateArrayAccessHelperCallPhase, func)),
  212. doTrackRelativeIntBounds(
  213. doAggressiveIntTypeSpec &&
  214. DoPathDependentValues() &&
  215. !PHASE_OFF(Js::Phase::TrackRelativeIntBoundsPhase, func)),
  216. doBoundCheckElimination(
  217. doTrackRelativeIntBounds &&
  218. !PHASE_OFF(Js::Phase::BoundCheckEliminationPhase, func)),
  219. doBoundCheckHoist(
  220. doEliminateArrayAccessHelperCall &&
  221. doBoundCheckElimination &&
  222. DoConstFold() &&
  223. !PHASE_OFF(Js::Phase::BoundCheckHoistPhase, func) &&
  224. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsBoundCheckHoistDisabled(func->IsLoopBody()))),
  225. doLoopCountBasedBoundCheckHoist(
  226. doBoundCheckHoist &&
  227. !PHASE_OFF(Js::Phase::LoopCountBasedBoundCheckHoistPhase, func) &&
  228. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLoopCountBasedBoundCheckHoistDisabled(func->IsLoopBody()))),
  229. doPowIntIntTypeSpec(
  230. doAggressiveIntTypeSpec &&
  231. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsPowIntIntTypeSpecDisabled())),
  232. doTagChecks(
  233. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTagCheckDisabled())),
  234. isAsmJSFunc(func->GetJITFunctionBody()->IsAsmJsMode())
  235. {
  236. }
  237. void
  238. GlobOpt::BackwardPass(Js::Phase tag)
  239. {
  240. BEGIN_CODEGEN_PHASE(this->func, tag);
  241. ::BackwardPass backwardPass(this->func, this, tag);
  242. backwardPass.Optimize();
  243. END_CODEGEN_PHASE(this->func, tag);
  244. }
  245. void
  246. GlobOpt::Optimize()
  247. {
  248. this->objectTypeSyms = nullptr;
  249. this->func->argInsCount = this->func->GetInParamsCount() - 1; //Don't include "this" pointer in the count.
  250. if (!func->DoGlobOpt())
  251. {
  252. this->lengthEquivBv = nullptr;
  253. this->argumentsEquivBv = nullptr;
  254. this->callerEquivBv = nullptr;
  255. // Still need to run the dead store phase to calculate the live reg on back edge
  256. this->BackwardPass(Js::DeadStorePhase);
  257. CannotAllocateArgumentsObjectOnStack();
  258. return;
  259. }
  260. {
  261. this->lengthEquivBv = this->func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::length, nullptr); // Used to kill live "length" properties
  262. this->argumentsEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::arguments, nullptr); // Used to kill live "arguments" properties
  263. this->callerEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::caller, nullptr); // Used to kill live "caller" properties
  264. // The backward phase needs the glob opt's allocator to allocate the propertyTypeValueMap
  265. // in GlobOpt::EnsurePropertyTypeValue and ranges of instructions where int overflow may be ignored.
  266. // (see BackwardPass::TrackIntUsage)
  267. PageAllocator * pageAllocator = this->func->m_alloc->GetPageAllocator();
  268. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-GlobOpt"), pageAllocator, Js::Throw::OutOfMemory);
  269. this->alloc = &localAlloc;
  270. NoRecoverMemoryJitArenaAllocator localTempAlloc(_u("BE-GlobOpt temp"), pageAllocator, Js::Throw::OutOfMemory);
  271. this->tempAlloc = &localTempAlloc;
  272. // The forward passes use info (upwardExposedUses) from the backward pass. This info
  273. // isn't available for some of the symbols created during the backward pass, or the forward pass.
  274. // Keep track of the last symbol for which we're guaranteed to have data.
  275. this->maxInitialSymID = this->func->m_symTable->GetMaxSymID();
  276. this->BackwardPass(Js::BackwardPhase);
  277. this->ForwardPass();
  278. }
  279. this->BackwardPass(Js::DeadStorePhase);
  280. this->TailDupPass();
  281. }
  282. bool GlobOpt::ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd)
  283. {
  284. Assert(indirOpnd);
  285. if(!indirOpnd->GetIndexOpnd())
  286. {
  287. return indirOpnd->GetOffset() >= 0;
  288. }
  289. IR::RegOpnd *const indexOpnd = indirOpnd->GetIndexOpnd();
  290. if(indexOpnd->m_sym->m_isNotInt)
  291. {
  292. // Typically, single-def or any sym-specific information for type-specialized syms should not be used because all of
  293. // their defs will not have been accounted for until after the forward pass. But m_isNotInt is only ever changed from
  294. // false to true, so it's okay in this case.
  295. return false;
  296. }
  297. StackSym *indexVarSym = indexOpnd->m_sym;
  298. if(indexVarSym->IsTypeSpec())
  299. {
  300. indexVarSym = indexVarSym->GetVarEquivSym(nullptr);
  301. Assert(indexVarSym);
  302. }
  303. else if(!IsLoopPrePass())
  304. {
  305. // Don't use single-def info or const flags for type-specialized syms, as all of their defs will not have been accounted
  306. // for until after the forward pass. Also, don't use the const flags in a loop prepass because the const flags may not
  307. // be up-to-date.
  308. StackSym *const indexSym = indexOpnd->m_sym;
  309. if(indexSym->IsIntConst())
  310. {
  311. return indexSym->GetIntConstValue() >= 0;
  312. }
  313. }
  314. Value *const indexValue = FindValue(indexVarSym);
  315. if(!indexValue)
  316. {
  317. // Treat it as Uninitialized, assume it's going to be valid
  318. return true;
  319. }
  320. ValueInfo *const indexValueInfo = indexValue->GetValueInfo();
  321. int32 indexConstantValue;
  322. if(indexValueInfo->TryGetIntConstantValue(&indexConstantValue))
  323. {
  324. return indexConstantValue >= 0;
  325. }
  326. if(indexValueInfo->IsUninitialized())
  327. {
  328. // Assume it's going to be valid
  329. return true;
  330. }
  331. return indexValueInfo->HasBeenNumber() && !indexValueInfo->HasBeenFloat();
  332. }
  333. //
  334. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  335. //
  336. ValueType GlobOpt::GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize)
  337. {
  338. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  339. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  340. if (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo())
  341. {
  342. ValueType resultType = instr->m_func->GetReadOnlyProfileInfo()->GetDivProfileInfo(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  343. if (resultType.IsLikelyInt())
  344. {
  345. if (specialize && src1ValueInfo && src2ValueInfo
  346. && ((src1ValueInfo->IsInt() && src2ValueInfo->IsInt()) ||
  347. (this->DoDivIntTypeSpec() && src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())))
  348. {
  349. return ValueType::GetInt(true);
  350. }
  351. return resultType;
  352. }
  353. // Consider: Checking that the sources are numbers.
  354. if (resultType.IsLikelyFloat())
  355. {
  356. return ValueType::Float;
  357. }
  358. return resultType;
  359. }
  360. int32 src1IntConstantValue;
  361. if(!src1ValueInfo || !src1ValueInfo->TryGetIntConstantValue(&src1IntConstantValue))
  362. {
  363. return ValueType::Number;
  364. }
  365. if (src1IntConstantValue == 1)
  366. {
  367. return ValueType::Float;
  368. }
  369. int32 src2IntConstantValue;
  370. if(!src2Val || !src2ValueInfo->TryGetIntConstantValue(&src2IntConstantValue))
  371. {
  372. return ValueType::Number;
  373. }
  374. if (src2IntConstantValue // Avoid divide by zero
  375. && !(src1IntConstantValue == 0x80000000 && src2IntConstantValue == -1) // Avoid integer overflow
  376. && (src1IntConstantValue % src2IntConstantValue) != 0)
  377. {
  378. return ValueType::Float;
  379. }
  380. return ValueType::Number;
  381. }
  382. void
  383. GlobOpt::ForwardPass()
  384. {
  385. BEGIN_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  386. #if DBG_DUMP
  387. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  388. {
  389. this->func->DumpHeader();
  390. }
  391. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::GlobOptPhase))
  392. {
  393. this->TraceSettings();
  394. }
  395. #endif
  396. // GetConstantCount() gives us the right size to pick for the SparseArray, but we may need more if we've inlined
  397. // functions with constants. There will be a gap in the symbol numbering between the main constants and
  398. // the inlined ones, so we'll most likely need a new array chunk. Make the min size of the array chunks be 64
  399. // in case we have a main function with very few constants and a bunch of constants from inlined functions.
  400. this->byteCodeConstantValueArray = SparseArray<Value>::New(this->alloc, max(this->func->GetJITFunctionBody()->GetConstCount(), 64U));
  401. this->byteCodeConstantValueNumbersBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  402. this->tempBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  403. this->prePassCopyPropSym = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  404. this->byteCodeUses = nullptr;
  405. this->propertySymUse = nullptr;
  406. // changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
  407. this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  408. #if DBG
  409. this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  410. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
  411. {
  412. Output::Print(_u("TRACE: CanDoFieldCopyProp Func: "));
  413. this->func->DumpFullFunctionName();
  414. Output::Print(_u("\n"));
  415. }
  416. #endif
  417. OpndList localNoImplicitCallUsesToInsert(alloc);
  418. this->noImplicitCallUsesToInsert = &localNoImplicitCallUsesToInsert;
  419. IntConstantToStackSymMap localIntConstantToStackSymMap(alloc);
  420. this->intConstantToStackSymMap = &localIntConstantToStackSymMap;
  421. IntConstantToValueMap localIntConstantToValueMap(alloc);
  422. this->intConstantToValueMap = &localIntConstantToValueMap;
  423. AddrConstantToValueMap localAddrConstantToValueMap(alloc);
  424. this->addrConstantToValueMap = &localAddrConstantToValueMap;
  425. StringConstantToValueMap localStringConstantToValueMap(alloc);
  426. this->stringConstantToValueMap = &localStringConstantToValueMap;
  427. SymIdToInstrMap localPrePassInstrMap(alloc);
  428. this->prePassInstrMap = &localPrePassInstrMap;
  429. ValueSetByValueNumber localValuesCreatedForClone(alloc, 64);
  430. this->valuesCreatedForClone = &localValuesCreatedForClone;
  431. ValueNumberPairToValueMap localValuesCreatedForMerge(alloc, 64);
  432. this->valuesCreatedForMerge = &localValuesCreatedForMerge;
  433. #if DBG
  434. BVSparse<JitArenaAllocator> localFinishedStackLiteralInitFld(alloc);
  435. this->finishedStackLiteralInitFld = &localFinishedStackLiteralInitFld;
  436. #endif
  437. FOREACH_BLOCK_IN_FUNC_EDITING(block, this->func)
  438. {
  439. this->OptBlock(block);
  440. } NEXT_BLOCK_IN_FUNC_EDITING;
  441. if (!PHASE_OFF(Js::MemOpPhase, this->func))
  442. {
  443. ProcessMemOp();
  444. }
  445. this->noImplicitCallUsesToInsert = nullptr;
  446. this->intConstantToStackSymMap = nullptr;
  447. this->intConstantToValueMap = nullptr;
  448. this->addrConstantToValueMap = nullptr;
  449. this->stringConstantToValueMap = nullptr;
  450. #if DBG
  451. this->finishedStackLiteralInitFld = nullptr;
  452. uint freedCount = 0;
  453. uint spilledCount = 0;
  454. #endif
  455. FOREACH_BLOCK_IN_FUNC(block, this->func)
  456. {
  457. #if DBG
  458. if (block->GetDataUseCount() == 0)
  459. {
  460. freedCount++;
  461. }
  462. else
  463. {
  464. spilledCount++;
  465. }
  466. #endif
  467. block->SetDataUseCount(0);
  468. if (block->cloneStrCandidates)
  469. {
  470. JitAdelete(this->alloc, block->cloneStrCandidates);
  471. block->cloneStrCandidates = nullptr;
  472. }
  473. } NEXT_BLOCK_IN_FUNC;
  474. // Make sure we free most of them.
  475. Assert(freedCount >= spilledCount);
  476. // this->alloc will be freed right after return, no need to free it here
  477. this->changedSymsAfterIncBailoutCandidate = nullptr;
  478. END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  479. }
  480. void
  481. GlobOpt::OptBlock(BasicBlock *block)
  482. {
  483. if (this->func->m_fg->RemoveUnreachableBlock(block, this))
  484. {
  485. GOPT_TRACE(_u("Removing unreachable block #%d\n"), block->GetBlockNum());
  486. return;
  487. }
  488. Loop * loop = block->loop;
  489. if (loop && block->isLoopHeader)
  490. {
  491. if (loop != this->prePassLoop)
  492. {
  493. OptLoops(loop);
  494. if (!this->IsLoopPrePass() && DoFieldPRE(loop))
  495. {
  496. // Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
  497. this->FieldPRE(loop);
  498. // Re-optimize the landing pad
  499. BasicBlock *landingPad = loop->landingPad;
  500. this->isRecursiveCallOnLandingPad = true;
  501. this->OptBlock(landingPad);
  502. this->isRecursiveCallOnLandingPad = false;
  503. this->currentBlock = block;
  504. }
  505. }
  506. }
  507. this->currentBlock = block;
  508. PrepareLoopArrayCheckHoist();
  509. this->MergePredBlocksValueMaps(block);
  510. this->intOverflowCurrentlyMattersInRange = true;
  511. this->intOverflowDoesNotMatterRange = this->currentBlock->intOverflowDoesNotMatterRange;
  512. if (loop && DoFieldHoisting(loop))
  513. {
  514. if (block->isLoopHeader)
  515. {
  516. if (!this->IsLoopPrePass())
  517. {
  518. this->PrepareFieldHoisting(loop);
  519. }
  520. else if (loop == this->rootLoopPrePass)
  521. {
  522. this->PreparePrepassFieldHoisting(loop);
  523. }
  524. }
  525. }
  526. else
  527. {
  528. Assert(!TrackHoistableFields() || !HasHoistableFields(&this->blockData));
  529. if (!DoFieldCopyProp() && !DoFieldRefOpts())
  530. {
  531. this->KillAllFields(blockData.liveFields);
  532. }
  533. }
  534. this->tempAlloc->Reset();
  535. if(loop && block->isLoopHeader)
  536. {
  537. loop->firstValueNumberInLoop = this->currentValue;
  538. }
  539. GOPT_TRACE_BLOCK(block, true);
  540. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  541. {
  542. GOPT_TRACE_INSTRTRACE(instr);
  543. BailOutInfo* oldBailOutInfo = nullptr;
  544. bool isCheckAuxBailoutNeeded = this->func->IsJitInDebugMode() && !this->IsLoopPrePass();
  545. if (isCheckAuxBailoutNeeded && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  546. {
  547. oldBailOutInfo = instr->GetBailOutInfo();
  548. Assert(oldBailOutInfo);
  549. }
  550. bool isInstrRemoved = false;
  551. instrNext = this->OptInstr(instr, &isInstrRemoved);
  552. // If we still have instrs with only aux bail out, convert aux bail out back to regular bail out and fill it.
  553. // During OptInstr some instr can be moved out to a different block, in this case bailout info is going to be replaced
  554. // with e.g. loop bailout info which is filled as part of processing that block, thus we don't need to fill it here.
  555. if (isCheckAuxBailoutNeeded && !isInstrRemoved && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  556. {
  557. if (instr->GetBailOutInfo() == oldBailOutInfo)
  558. {
  559. instr->PromoteAuxBailOut();
  560. FillBailOutInfo(block, instr->GetBailOutInfo());
  561. }
  562. else
  563. {
  564. AssertMsg(instr->GetBailOutInfo(), "With aux bailout, the bailout info should not be removed by OptInstr.");
  565. }
  566. }
  567. } NEXT_INSTR_IN_BLOCK_EDITING;
  568. GOPT_TRACE_BLOCK(block, false);
  569. if (block->loop)
  570. {
  571. if (IsLoopPrePass())
  572. {
  573. if (DoBoundCheckHoist())
  574. {
  575. DetectUnknownChangesToInductionVariables(&block->globOptData);
  576. }
  577. }
  578. else
  579. {
  580. isPerformingLoopBackEdgeCompensation = true;
  581. Assert(this->tempBv->IsEmpty());
  582. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  583. // On loop back-edges, we need to restore the state of the type specialized
  584. // symbols to that of the loop header.
  585. FOREACH_SUCCESSOR_BLOCK(succ, block)
  586. {
  587. if (succ->isLoopHeader && succ->loop->IsDescendentOrSelf(block->loop))
  588. {
  589. BVSparse<JitArenaAllocator> *liveOnBackEdge = block->loop->regAlloc.liveOnBackEdgeSyms;
  590. this->tempBv->Minus(block->loop->varSymsOnEntry, block->globOptData.liveVarSyms);
  591. this->tempBv->And(liveOnBackEdge);
  592. this->ToVar(this->tempBv, block);
  593. // Lossy int in the loop header, and no int on the back-edge - need a lossy conversion to int
  594. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->globOptData.liveInt32Syms);
  595. this->tempBv->And(liveOnBackEdge);
  596. this->ToInt32(this->tempBv, block, true /* lossy */);
  597. // Lossless int in the loop header, and no lossless int on the back-edge - need a lossless conversion to int
  598. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  599. tempBv2.Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  600. this->tempBv->Minus(&tempBv2);
  601. this->tempBv->And(liveOnBackEdge);
  602. this->ToInt32(this->tempBv, block, false /* lossy */);
  603. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->globOptData.liveFloat64Syms);
  604. this->tempBv->And(liveOnBackEdge);
  605. this->ToFloat64(this->tempBv, block);
  606. // SIMD_JS
  607. // Compensate on backedge if sym is live on loop entry but not on backedge
  608. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->globOptData.liveSimd128F4Syms);
  609. this->tempBv->And(liveOnBackEdge);
  610. this->ToTypeSpec(this->tempBv, block, TySimd128F4, IR::BailOutSimd128F4Only);
  611. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->globOptData.liveSimd128I4Syms);
  612. this->tempBv->And(liveOnBackEdge);
  613. this->ToTypeSpec(this->tempBv, block, TySimd128I4, IR::BailOutSimd128I4Only);
  614. // For ints and floats, go aggressive and type specialize in the landing pad any symbol which was specialized on
  615. // entry to the loop body (in the loop header), and is still specialized on this tail, but wasn't specialized in
  616. // the landing pad.
  617. // Lossy int in the loop header and no int in the landing pad - need a lossy conversion to int
  618. // (entry.lossyInt32 - landingPad.int32)
  619. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->loop->landingPad->globOptData.liveInt32Syms);
  620. this->tempBv->And(liveOnBackEdge);
  621. this->ToInt32(this->tempBv, block->loop->landingPad, true /* lossy */);
  622. // Lossless int in the loop header, and no lossless int in the landing pad - need a lossless conversion to int
  623. // ((entry.int32 - entry.lossyInt32) - (landingPad.int32 - landingPad.lossyInt32))
  624. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  625. tempBv2.Minus(
  626. block->loop->landingPad->globOptData.liveInt32Syms,
  627. block->loop->landingPad->globOptData.liveLossyInt32Syms);
  628. this->tempBv->Minus(&tempBv2);
  629. this->tempBv->And(liveOnBackEdge);
  630. this->ToInt32(this->tempBv, block->loop->landingPad, false /* lossy */);
  631. // ((entry.float64 - landingPad.float64) & block.float64)
  632. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->loop->landingPad->globOptData.liveFloat64Syms);
  633. this->tempBv->And(block->globOptData.liveFloat64Syms);
  634. this->tempBv->And(liveOnBackEdge);
  635. this->ToFloat64(this->tempBv, block->loop->landingPad);
  636. // SIMD_JS
  637. // compensate on landingpad if live on loopEntry and Backedge.
  638. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128F4Syms);
  639. this->tempBv->And(block->globOptData.liveSimd128F4Syms);
  640. this->tempBv->And(liveOnBackEdge);
  641. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128F4, IR::BailOutSimd128F4Only);
  642. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128I4Syms);
  643. this->tempBv->And(block->globOptData.liveSimd128I4Syms);
  644. this->tempBv->And(liveOnBackEdge);
  645. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128I4, IR::BailOutSimd128I4Only);
  646. // Now that we're done with the liveFields within this loop, trim the set to those syms
  647. // that the backward pass told us were live out of the loop.
  648. // This assumes we have no further need of the liveFields within the loop.
  649. if (block->loop->liveOutFields)
  650. {
  651. block->globOptData.liveFields->And(block->loop->liveOutFields);
  652. }
  653. }
  654. } NEXT_SUCCESSOR_BLOCK;
  655. this->tempBv->ClearAll();
  656. isPerformingLoopBackEdgeCompensation = false;
  657. }
  658. }
  659. block->globOptData.hasCSECandidates = this->blockData.hasCSECandidates;
  660. #if DBG
  661. // The set of live lossy int32 syms should be a subset of all live int32 syms
  662. this->tempBv->And(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  663. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  664. // The set of live lossy int32 syms should be a subset of live var or float syms (var or float sym containing the lossless
  665. // value of the sym should be live)
  666. this->tempBv->Or(block->globOptData.liveVarSyms, block->globOptData.liveFloat64Syms);
  667. this->tempBv->And(block->globOptData.liveLossyInt32Syms);
  668. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  669. this->tempBv->ClearAll();
  670. #endif
  671. }
  672. void
  673. GlobOpt::OptLoops(Loop *loop)
  674. {
  675. Assert(loop != nullptr);
  676. #if DBG
  677. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) &&
  678. !DoFunctionFieldCopyProp() && DoFieldCopyProp(loop))
  679. {
  680. Output::Print(_u("TRACE: CanDoFieldCopyProp Loop: "));
  681. this->func->DumpFullFunctionName();
  682. uint loopNumber = loop->GetLoopNumber();
  683. Assert(loopNumber != Js::LoopHeader::NoLoop);
  684. Output::Print(_u(" Loop: %d\n"), loopNumber);
  685. }
  686. #endif
  687. Loop *previousLoop = this->prePassLoop;
  688. this->prePassLoop = loop;
  689. if (previousLoop == nullptr)
  690. {
  691. Assert(this->rootLoopPrePass == nullptr);
  692. this->rootLoopPrePass = loop;
  693. this->prePassInstrMap->Clear();
  694. if (loop->parent == nullptr)
  695. {
  696. // Outer most loop...
  697. this->prePassCopyPropSym->ClearAll();
  698. }
  699. }
  700. if (loop->symsUsedBeforeDefined == nullptr)
  701. {
  702. loop->symsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  703. loop->likelyIntSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  704. loop->likelyNumberSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  705. loop->likelySimd128F4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  706. loop->likelySimd128I4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  707. loop->forceFloat64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  708. loop->forceSimd128F4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  709. loop->forceSimd128I4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  710. loop->symsDefInLoop = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  711. loop->fieldKilled = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  712. loop->fieldPRESymStore = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  713. loop->allFieldsKilled = false;
  714. }
  715. else
  716. {
  717. loop->symsUsedBeforeDefined->ClearAll();
  718. loop->likelyIntSymsUsedBeforeDefined->ClearAll();
  719. loop->likelyNumberSymsUsedBeforeDefined->ClearAll();
  720. loop->likelySimd128F4SymsUsedBeforeDefined->ClearAll();
  721. loop->likelySimd128I4SymsUsedBeforeDefined->ClearAll();
  722. loop->forceFloat64SymsOnEntry->ClearAll();
  723. loop->forceSimd128F4SymsOnEntry->ClearAll();
  724. loop->forceSimd128I4SymsOnEntry->ClearAll();
  725. loop->symsDefInLoop->ClearAll();
  726. loop->fieldKilled->ClearAll();
  727. loop->allFieldsKilled = false;
  728. loop->initialValueFieldMap.Reset();
  729. }
  730. FOREACH_BLOCK_IN_LOOP(block, loop)
  731. {
  732. block->SetDataUseCount(block->GetSuccList()->Count());
  733. OptBlock(block);
  734. } NEXT_BLOCK_IN_LOOP;
  735. if (previousLoop == nullptr)
  736. {
  737. Assert(this->rootLoopPrePass == loop);
  738. this->rootLoopPrePass = nullptr;
  739. }
  740. this->prePassLoop = previousLoop;
  741. }
  742. void
  743. GlobOpt::TailDupPass()
  744. {
  745. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  746. {
  747. BasicBlock* header = loop->GetHeadBlock();
  748. BasicBlock* loopTail = nullptr;
  749. FOREACH_PREDECESSOR_BLOCK(pred, header)
  750. {
  751. if (loop->IsDescendentOrSelf(pred->loop))
  752. {
  753. loopTail = pred;
  754. break;
  755. }
  756. } NEXT_PREDECESSOR_BLOCK;
  757. if (loopTail)
  758. {
  759. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  760. if (!loopTail->GetPredList()->HasOne())
  761. {
  762. TryTailDup(loopTail->GetLastInstr()->AsBranchInstr());
  763. }
  764. }
  765. } NEXT_LOOP_IN_FUNC_EDITING;
  766. }
  767. bool
  768. GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
  769. {
  770. if (PHASE_OFF(Js::TailDupPhase, tailBranch->m_func->GetTopFunc()))
  771. {
  772. return false;
  773. }
  774. if (tailBranch->IsConditional())
  775. {
  776. return false;
  777. }
  778. IR::Instr *instr;
  779. uint instrCount = 0;
  780. for (instr = tailBranch->GetPrevRealInstrOrLabel(); !instr->IsLabelInstr(); instr = instr->GetPrevRealInstrOrLabel())
  781. {
  782. if (instr->HasBailOutInfo())
  783. {
  784. break;
  785. }
  786. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  787. {
  788. // Consider: We could be more aggressive here
  789. break;
  790. }
  791. instrCount++;
  792. if (instrCount > 1)
  793. {
  794. // Consider: If copy handled single-def tmps renaming, we could do more instrs
  795. break;
  796. }
  797. }
  798. if (!instr->IsLabelInstr())
  799. {
  800. return false;
  801. }
  802. IR::LabelInstr *mergeLabel = instr->AsLabelInstr();
  803. IR::Instr *mergeLabelPrev = mergeLabel->m_prev;
  804. // Skip unreferenced labels
  805. while (mergeLabelPrev->IsLabelInstr() && mergeLabelPrev->AsLabelInstr()->labelRefs.Empty())
  806. {
  807. mergeLabelPrev = mergeLabelPrev->m_prev;
  808. }
  809. BasicBlock* labelBlock = mergeLabel->GetBasicBlock();
  810. uint origPredCount = labelBlock->GetPredList()->Count();
  811. uint dupCount = 0;
  812. // We are good to go. Let's do the tail duplication.
  813. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr*, branchEntry, &mergeLabel->labelRefs, iter)
  814. {
  815. if (branchEntry->IsUnconditional() && !branchEntry->IsMultiBranch() && branchEntry != mergeLabelPrev && branchEntry != tailBranch)
  816. {
  817. for (instr = mergeLabel->m_next; instr != tailBranch; instr = instr->m_next)
  818. {
  819. branchEntry->InsertBefore(instr->Copy());
  820. }
  821. instr = branchEntry;
  822. branchEntry->ReplaceTarget(mergeLabel, tailBranch->GetTarget());
  823. while(!instr->IsLabelInstr())
  824. {
  825. instr = instr->m_prev;
  826. }
  827. BasicBlock* branchBlock = instr->AsLabelInstr()->GetBasicBlock();
  828. labelBlock->RemovePred(branchBlock, func->m_fg);
  829. func->m_fg->AddEdge(branchBlock, tailBranch->GetTarget()->GetBasicBlock());
  830. dupCount++;
  831. }
  832. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  833. // If we've duplicated everywhere, tail block is dead and should be removed.
  834. if (dupCount == origPredCount)
  835. {
  836. AssertMsg(mergeLabel->IsUnreferenced(), "Should not remove block with referenced label.");
  837. func->m_fg->RemoveBlock(labelBlock, nullptr, true);
  838. }
  839. return true;
  840. }
  841. void
  842. GlobOpt::MergePredBlocksValueMaps(BasicBlock *block)
  843. {
  844. Assert(!this->isCallHelper);
  845. if (!this->isRecursiveCallOnLandingPad)
  846. {
  847. this->NulloutBlockData(&this->blockData);
  848. }
  849. else
  850. {
  851. // If we are going over the landing pad again after field PRE, just start again
  852. // with the value table where we left off.
  853. this->CopyBlockData(&this->blockData, &block->globOptData);
  854. return;
  855. }
  856. BVSparse<JitArenaAllocator> symsRequiringCompensation(tempAlloc);
  857. {
  858. BVSparse<JitArenaAllocator> symsCreatedForMerge(tempAlloc);
  859. bool forceTypeSpecOnLoopHeader = true;
  860. FOREACH_PREDECESSOR_BLOCK(pred, block)
  861. {
  862. if (pred->globOptData.callSequence && pred->globOptData.callSequence->Empty())
  863. {
  864. JitAdelete(this->alloc, pred->globOptData.callSequence);
  865. pred->globOptData.callSequence = nullptr;
  866. }
  867. if (block->isLoopHeader && this->IsLoopPrePass() && this->prePassLoop == block->loop && block->loop->IsDescendentOrSelf(pred->loop))
  868. {
  869. // Loop back-edge.
  870. // First pass on loop runs optimistically, without doing transforms.
  871. // Skip this edge for now.
  872. continue;
  873. }
  874. PathDependentInfo *const pathDependentInfo = __edge->GetPathDependentInfo();
  875. PathDependentInfoToRestore pathDependentInfoToRestore;
  876. if (pathDependentInfo)
  877. {
  878. pathDependentInfoToRestore = UpdatePathDependentInfo(pathDependentInfo);
  879. }
  880. Assert(pred->GetDataUseCount());
  881. // First pred?
  882. if (this->blockData.symToValueMap == nullptr)
  883. {
  884. // Only one edge?
  885. if (pred->GetSuccList()->HasOne() && block->GetPredList()->HasOne() && block->loop == nullptr)
  886. {
  887. this->ReuseBlockData(&this->blockData, &pred->globOptData);
  888. // Don't need to restore the old value info
  889. pathDependentInfoToRestore.Clear();
  890. }
  891. else
  892. {
  893. this->CloneBlockData(currentBlock, &this->blockData, pred);
  894. }
  895. }
  896. else
  897. {
  898. const bool isLoopPrePass = IsLoopPrePass();
  899. this->MergeBlockData(
  900. &this->blockData,
  901. block,
  902. pred,
  903. isLoopPrePass ? nullptr : &symsRequiringCompensation,
  904. isLoopPrePass ? nullptr : &symsCreatedForMerge,
  905. forceTypeSpecOnLoopHeader);
  906. forceTypeSpecOnLoopHeader = false; // can force type-spec on the loop header only for the first back edge.
  907. }
  908. // Restore the value for the next edge
  909. if (pathDependentInfo)
  910. {
  911. RestorePathDependentInfo(pathDependentInfo, pathDependentInfoToRestore);
  912. __edge->ClearPathDependentInfo(this->alloc);
  913. }
  914. } NEXT_PREDECESSOR_BLOCK;
  915. }
  916. // Consider: We can recreate values for hoisted field so it can copy prop out of the loop
  917. if (this->blockData.symToValueMap == nullptr)
  918. {
  919. Assert(this->blockData.hoistableFields == nullptr);
  920. this->InitBlockData();
  921. }
  922. else if (this->blockData.hoistableFields)
  923. {
  924. Assert(TrackHoistableFields());
  925. this->blockData.hoistableFields->And(this->blockData.liveFields);
  926. }
  927. if (!this->DoObjTypeSpec())
  928. {
  929. // Object type specialization is off, but if copy prop is on (e.g., /force:fieldhoist) we're not clearing liveFields,
  930. // so we may be letting type syms slip through this block.
  931. this->KillAllObjectTypes();
  932. }
  933. this->CopyBlockData(&block->globOptData, &this->blockData);
  934. if (this->IsLoopPrePass())
  935. {
  936. Assert(block->loop);
  937. if(DoBoundCheckHoist())
  938. {
  939. SetInductionVariableValueNumbers(&blockData);
  940. }
  941. if (block->isLoopHeader && this->rootLoopPrePass == block->loop)
  942. {
  943. // Capture bail out info in case we have optimization that needs it
  944. Assert(block->loop->bailOutInfo == nullptr);
  945. IR::Instr * firstInstr = block->GetFirstInstr();
  946. block->loop->bailOutInfo = JitAnew(this->func->m_alloc, BailOutInfo,
  947. firstInstr->GetByteCodeOffset(), firstInstr->m_func);
  948. this->FillBailOutInfo(block, block->loop->bailOutInfo);
  949. #if ENABLE_DEBUG_CONFIG_OPTIONS
  950. block->loop->bailOutInfo->bailOutOpcode = Js::OpCode::LoopBodyStart;
  951. #endif
  952. }
  953. // If loop pre-pass, don't insert convert from type-spec to var
  954. return;
  955. }
  956. this->CleanUpValueMaps();
  957. Sym *symIV = nullptr;
  958. // Clean up the syms requiring compensation by checking the final value in the merged block to see if the sym still requires
  959. // compensation. All the while, create a mapping from sym to value info in the merged block. This dictionary helps avoid a
  960. // value lookup in the merged block per predecessor.
  961. SymToValueInfoMap symsRequiringCompensationToMergedValueInfoMap(tempAlloc);
  962. if(!symsRequiringCompensation.IsEmpty())
  963. {
  964. const SymTable *const symTable = func->m_symTable;
  965. GlobHashTable *const symToValueMap = blockData.symToValueMap;
  966. FOREACH_BITSET_IN_SPARSEBV(id, &symsRequiringCompensation)
  967. {
  968. Sym *const sym = symTable->Find(id);
  969. Assert(sym);
  970. Value *const value = FindValue(symToValueMap, sym);
  971. if(!value)
  972. {
  973. continue;
  974. }
  975. ValueInfo *const valueInfo = value->GetValueInfo();
  976. if(!valueInfo->IsArrayValueInfo())
  977. {
  978. continue;
  979. }
  980. // At least one new sym was created while merging and associated with the merged value info, so those syms will
  981. // require compensation in predecessors. For now, the dead store phase is relied upon to remove compensation that is
  982. // dead due to no further uses of the new sym.
  983. symsRequiringCompensationToMergedValueInfoMap.Add(sym, valueInfo);
  984. } NEXT_BITSET_IN_SPARSEBV;
  985. symsRequiringCompensation.ClearAll();
  986. }
  987. if (block->isLoopHeader)
  988. {
  989. // Values on the back-edge in the prepass may be conservative for syms defined in the loop, and type specialization in
  990. // the prepass is not reflective of the value, but rather, is used to determine whether the sym should be specialized
  991. // around the loop. Additionally, some syms that are used before defined in the loop may be specialized in the loop
  992. // header despite not being specialized in the landing pad. Now that the type specialization bit-vectors are merged,
  993. // specialize the corresponding value infos in the loop header too.
  994. Assert(tempBv->IsEmpty());
  995. Loop *const loop = block->loop;
  996. SymTable *const symTable = func->m_symTable;
  997. GlobHashTable *const symToValueMap = blockData.symToValueMap;
  998. JitArenaAllocator *const alloc = this->alloc;
  999. // Int-specialized syms
  1000. tempBv->Or(loop->likelyIntSymsUsedBeforeDefined, loop->symsDefInLoop);
  1001. tempBv->And(blockData.liveInt32Syms);
  1002. tempBv->Minus(blockData.liveLossyInt32Syms);
  1003. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1004. {
  1005. StackSym *const varSym = symTable->FindStackSym(id);
  1006. Assert(varSym);
  1007. Value *const value = FindValue(symToValueMap, varSym);
  1008. Assert(value);
  1009. ValueInfo *const valueInfo = value->GetValueInfo();
  1010. if(!valueInfo->IsInt())
  1011. {
  1012. ChangeValueInfo(nullptr, value, valueInfo->SpecializeToInt32(alloc));
  1013. }
  1014. } NEXT_BITSET_IN_SPARSEBV;
  1015. // Float-specialized syms
  1016. tempBv->Or(loop->likelyNumberSymsUsedBeforeDefined, loop->symsDefInLoop);
  1017. tempBv->Or(loop->forceFloat64SymsOnEntry);
  1018. tempBv->And(blockData.liveFloat64Syms);
  1019. GlobOptBlockData &landingPadBlockData = loop->landingPad->globOptData;
  1020. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1021. {
  1022. StackSym *const varSym = symTable->FindStackSym(id);
  1023. Assert(varSym);
  1024. // If the type-spec sym is null or if the sym is not float-specialized in the loop landing pad, the sym may have
  1025. // been merged to float on a loop back-edge when it was live as float on the back-edge, and live as int in the loop
  1026. // header. In this case, compensation inserted in the loop landing pad will use BailOutNumberOnly, and so it is
  1027. // guaranteed that the value will be float. Otherwise, if the type-spec sym exists, its field can be checked to see
  1028. // if it's prevented from being anything but a number.
  1029. StackSym *const typeSpecSym = varSym->GetFloat64EquivSym(nullptr);
  1030. if(!typeSpecSym ||
  1031. typeSpecSym->m_requiresBailOnNotNumber ||
  1032. !IsFloat64TypeSpecialized(varSym, &landingPadBlockData))
  1033. {
  1034. Value *const value = FindValue(symToValueMap, varSym);
  1035. if(value)
  1036. {
  1037. ValueInfo *const valueInfo = value->GetValueInfo();
  1038. if(!valueInfo->IsNumber())
  1039. {
  1040. ChangeValueInfo(block, value, valueInfo->SpecializeToFloat64(alloc));
  1041. }
  1042. }
  1043. else
  1044. {
  1045. SetValue(&block->globOptData, NewGenericValue(ValueType::Float), varSym);
  1046. }
  1047. }
  1048. } NEXT_BITSET_IN_SPARSEBV;
  1049. // SIMD_JS
  1050. // Simd128 type-spec syms
  1051. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1052. // For syms we made alive in loop header because of hoisting, use-before-def, or def in Loop body, set their valueInfo to definite.
  1053. // Make live on header AND in one of forceSimd128* or likelySimd128* vectors.
  1054. tempBv->Or(loop->likelySimd128F4SymsUsedBeforeDefined, loop->symsDefInLoop);
  1055. tempBv->Or(loop->likelySimd128I4SymsUsedBeforeDefined);
  1056. tempBv->Or(loop->forceSimd128F4SymsOnEntry);
  1057. tempBv->Or(loop->forceSimd128I4SymsOnEntry);
  1058. tempBv2.Or(blockData.liveSimd128F4Syms, blockData.liveSimd128I4Syms);
  1059. tempBv->And(&tempBv2);
  1060. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  1061. {
  1062. StackSym * typeSpecSym = nullptr;
  1063. StackSym *const varSym = symTable->FindStackSym(id);
  1064. Assert(varSym);
  1065. if (blockData.liveSimd128F4Syms->Test(id))
  1066. {
  1067. typeSpecSym = varSym->GetSimd128F4EquivSym(nullptr);
  1068. if (!typeSpecSym || !IsSimd128F4TypeSpecialized(varSym, &landingPadBlockData))
  1069. {
  1070. Value *const value = FindValue(symToValueMap, varSym);
  1071. if (value)
  1072. {
  1073. ValueInfo *const valueInfo = value->GetValueInfo();
  1074. if (!valueInfo->IsSimd128Float32x4())
  1075. {
  1076. ChangeValueInfo(block, value, valueInfo->SpecializeToSimd128F4(alloc));
  1077. }
  1078. }
  1079. else
  1080. {
  1081. SetValue(&block->globOptData, NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Float32x4), varSym), varSym);
  1082. }
  1083. }
  1084. }
  1085. else if (blockData.liveSimd128I4Syms->Test(id))
  1086. {
  1087. typeSpecSym = varSym->GetSimd128I4EquivSym(nullptr);
  1088. if (!typeSpecSym || !IsSimd128I4TypeSpecialized(varSym, &landingPadBlockData))
  1089. {
  1090. Value *const value = FindValue(symToValueMap, varSym);
  1091. if (value)
  1092. {
  1093. ValueInfo *const valueInfo = value->GetValueInfo();
  1094. if (!valueInfo->IsSimd128Int32x4())
  1095. {
  1096. ChangeValueInfo(block, value, valueInfo->SpecializeToSimd128I4(alloc));
  1097. }
  1098. }
  1099. else
  1100. {
  1101. SetValue(&block->globOptData, NewGenericValue(ValueType::GetSimd128(ObjectType::Simd128Int32x4), varSym), varSym);
  1102. }
  1103. }
  1104. }
  1105. else
  1106. {
  1107. Assert(UNREACHED);
  1108. }
  1109. } NEXT_BITSET_IN_SPARSEBV;
  1110. tempBv->ClearAll();
  1111. }
  1112. // We need to handle the case where a symbol is type-spec'd coming from some predecessors,
  1113. // but not from others.
  1114. //
  1115. // We can do this by inserting the right conversion in the predecessor block, but we
  1116. // can only do this if we are the first successor of that block, since the previous successors
  1117. // would have already been processed. Instead, we'll need to break the edge and insert a block
  1118. // (airlock block) to put in the conversion code.
  1119. Assert(this->tempBv->IsEmpty());
  1120. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1121. BVSparse<JitArenaAllocator> tempBv3(this->tempAlloc);
  1122. BVSparse<JitArenaAllocator> tempBv4(this->tempAlloc);
  1123. // SIMD_JS
  1124. BVSparse<JitArenaAllocator> simd128F4SymsToUnbox(this->tempAlloc);
  1125. BVSparse<JitArenaAllocator> simd128I4SymsToUnbox(this->tempAlloc);
  1126. FOREACH_PREDECESSOR_EDGE_EDITING(edge, block, iter)
  1127. {
  1128. BasicBlock *pred = edge->GetPred();
  1129. if (pred->loop && pred->loop->GetHeadBlock() == block)
  1130. {
  1131. pred->DecrementDataUseCount();
  1132. // Skip loop back-edges. We will handle these when we get to the exit blocks.
  1133. continue;
  1134. }
  1135. BasicBlock *orgPred = nullptr;
  1136. if (pred->isAirLockCompensationBlock)
  1137. {
  1138. Assert(pred->GetPredList()->HasOne());
  1139. orgPred = pred;
  1140. pred = (pred->GetPredList()->Head())->GetPred();
  1141. }
  1142. // Lossy int in the merged block, and no int in the predecessor - need a lossy conversion to int
  1143. tempBv2.Minus(this->blockData.liveLossyInt32Syms, pred->globOptData.liveInt32Syms);
  1144. // Lossless int in the merged block, and no lossless int in the predecessor - need a lossless conversion to int
  1145. tempBv3.Minus(this->blockData.liveInt32Syms, this->blockData.liveLossyInt32Syms);
  1146. this->tempBv->Minus(pred->globOptData.liveInt32Syms, pred->globOptData.liveLossyInt32Syms);
  1147. tempBv3.Minus(this->tempBv);
  1148. this->tempBv->Minus(this->blockData.liveVarSyms, pred->globOptData.liveVarSyms);
  1149. tempBv4.Minus(this->blockData.liveFloat64Syms, pred->globOptData.liveFloat64Syms);
  1150. bool symIVNeedsSpecializing = (symIV && !pred->globOptData.liveInt32Syms->Test(symIV->m_id) && !tempBv3.Test(symIV->m_id));
  1151. // SIMD_JS
  1152. simd128F4SymsToUnbox.Minus(this->blockData.liveSimd128F4Syms, pred->globOptData.liveSimd128F4Syms);
  1153. simd128I4SymsToUnbox.Minus(this->blockData.liveSimd128I4Syms, pred->globOptData.liveSimd128I4Syms);
  1154. if (!this->tempBv->IsEmpty() ||
  1155. !tempBv2.IsEmpty() ||
  1156. !tempBv3.IsEmpty() ||
  1157. !tempBv4.IsEmpty() ||
  1158. !simd128F4SymsToUnbox.IsEmpty() ||
  1159. !simd128I4SymsToUnbox.IsEmpty() ||
  1160. symIVNeedsSpecializing ||
  1161. symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  1162. {
  1163. // We can't un-specialize a symbol in a predecessor if we've already processed
  1164. // a successor of that block. Instead, insert a new block on the flow edge
  1165. // (an airlock block) and do the un-specialization there.
  1166. //
  1167. // Alternatively, the current block could be an exit block out of this loop, and so the predecessor may exit the
  1168. // loop. In that case, if the predecessor may continue into the loop without exiting, then we need an airlock block
  1169. // to do the appropriate conversions only on the exit path (preferring not to do the conversions inside the loop).
  1170. // If, on the other hand, the predecessor always flows into the current block, then it always exits, so we don't need
  1171. // an airlock block and can just do the conversions in the predecessor.
  1172. if (pred->GetSuccList()->Head()->GetSucc() != block ||
  1173. (pred->loop && pred->loop->parent == block->loop && pred->GetSuccList()->Count() > 1))
  1174. {
  1175. BasicBlock *airlockBlock = nullptr;
  1176. if (!orgPred)
  1177. {
  1178. GOPT_TRACE(_u("Inserting airlock block to convert syms to var between block %d and %d\n"),
  1179. pred->GetBlockNum(), block->GetBlockNum());
  1180. airlockBlock = this->func->m_fg->InsertAirlockBlock(edge);
  1181. }
  1182. else
  1183. {
  1184. Assert(orgPred->isAirLockCompensationBlock);
  1185. airlockBlock = orgPred;
  1186. pred->DecrementDataUseCount();
  1187. airlockBlock->isAirLockCompensationBlock = false; // This is airlock block now. So remove the attribute.
  1188. }
  1189. this->CloneBlockData(airlockBlock, pred);
  1190. pred = airlockBlock;
  1191. }
  1192. if (!this->tempBv->IsEmpty())
  1193. {
  1194. this->ToVar(this->tempBv, pred);
  1195. }
  1196. if (!tempBv2.IsEmpty())
  1197. {
  1198. this->ToInt32(&tempBv2, pred, true /* lossy */);
  1199. }
  1200. if (!tempBv3.IsEmpty())
  1201. {
  1202. this->ToInt32(&tempBv3, pred, false /* lossy */);
  1203. }
  1204. if (!tempBv4.IsEmpty())
  1205. {
  1206. this->ToFloat64(&tempBv4, pred);
  1207. }
  1208. if (symIVNeedsSpecializing)
  1209. {
  1210. this->tempBv->ClearAll();
  1211. this->tempBv->Set(symIV->m_id);
  1212. this->ToInt32(this->tempBv, pred, false /* lossy */);
  1213. }
  1214. if(symsRequiringCompensationToMergedValueInfoMap.Count() != 0)
  1215. {
  1216. InsertValueCompensation(pred, symsRequiringCompensationToMergedValueInfoMap);
  1217. }
  1218. // SIMD_JS
  1219. if (!simd128F4SymsToUnbox.IsEmpty())
  1220. {
  1221. this->ToTypeSpec(&simd128F4SymsToUnbox, pred, TySimd128F4, IR::BailOutSimd128F4Only);
  1222. }
  1223. if (!simd128I4SymsToUnbox.IsEmpty())
  1224. {
  1225. this->ToTypeSpec(&simd128I4SymsToUnbox, pred, TySimd128I4, IR::BailOutSimd128I4Only);
  1226. }
  1227. }
  1228. } NEXT_PREDECESSOR_EDGE_EDITING;
  1229. FOREACH_PREDECESSOR_EDGE(edge, block)
  1230. {
  1231. // Peak Memory optimization:
  1232. // These are in an arena, but putting them on the free list greatly reduces
  1233. // the peak memory used by the global optimizer for complex flow graphs.
  1234. BasicBlock *pred = edge->GetPred();
  1235. if (!block->isLoopHeader || block->loop != pred->loop)
  1236. {
  1237. // Skip airlock compensation block as we are not going to walk this block.
  1238. if (pred->isAirLockCompensationBlock)
  1239. {
  1240. pred->DecrementDataUseCount();
  1241. Assert(pred->GetPredList()->HasOne());
  1242. pred = (pred->GetPredList()->Head())->GetPred();
  1243. }
  1244. if (pred->DecrementDataUseCount() == 0 && (!block->loop || block->loop->landingPad != pred))
  1245. {
  1246. if (!(pred->GetSuccList()->HasOne() && block->GetPredList()->HasOne() && block->loop == nullptr))
  1247. {
  1248. this->DeleteBlockData(&pred->globOptData);
  1249. }
  1250. else
  1251. {
  1252. this->NulloutBlockData(&pred->globOptData);
  1253. }
  1254. }
  1255. }
  1256. } NEXT_PREDECESSOR_EDGE;
  1257. this->tempBv->ClearAll();
  1258. Assert(!this->IsLoopPrePass()); // We already early return if we are in prepass
  1259. if (block->isLoopHeader)
  1260. {
  1261. Loop *const loop = block->loop;
  1262. // Save values live on loop entry, such that we can adjust the state of the
  1263. // values on the back-edge to match.
  1264. loop->varSymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1265. loop->varSymsOnEntry->Copy(block->globOptData.liveVarSyms);
  1266. loop->int32SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1267. loop->int32SymsOnEntry->Copy(block->globOptData.liveInt32Syms);
  1268. loop->lossyInt32SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1269. loop->lossyInt32SymsOnEntry->Copy(block->globOptData.liveLossyInt32Syms);
  1270. loop->float64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1271. loop->float64SymsOnEntry->Copy(block->globOptData.liveFloat64Syms);
  1272. // SIMD_JS
  1273. loop->simd128F4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1274. loop->simd128F4SymsOnEntry->Copy(block->globOptData.liveSimd128F4Syms);
  1275. loop->simd128I4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1276. loop->simd128I4SymsOnEntry->Copy(block->globOptData.liveSimd128I4Syms);
  1277. loop->liveFieldsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1278. loop->liveFieldsOnEntry->Copy(block->globOptData.liveFields);
  1279. if(DoBoundCheckHoist() && loop->inductionVariables)
  1280. {
  1281. FinalizeInductionVariables(loop, &blockData);
  1282. if(DoLoopCountBasedBoundCheckHoist())
  1283. {
  1284. DetermineDominatingLoopCountableBlock(loop, block);
  1285. }
  1286. }
  1287. }
  1288. else if (!block->loop)
  1289. {
  1290. block->SetDataUseCount(block->GetSuccList()->Count());
  1291. }
  1292. else if(block == block->loop->dominatingLoopCountableBlock)
  1293. {
  1294. DetermineLoopCount(block->loop);
  1295. }
  1296. }
  1297. void
  1298. GlobOpt::NulloutBlockData(GlobOptBlockData *data)
  1299. {
  1300. data->symToValueMap = nullptr;
  1301. data->exprToValueMap = nullptr;
  1302. data->liveFields = nullptr;
  1303. data->maybeWrittenTypeSyms = nullptr;
  1304. data->isTempSrc = nullptr;
  1305. data->liveVarSyms = nullptr;
  1306. data->liveInt32Syms = nullptr;
  1307. data->liveLossyInt32Syms = nullptr;
  1308. data->liveFloat64Syms = nullptr;
  1309. // SIMD_JS
  1310. data->liveSimd128F4Syms = nullptr;
  1311. data->liveSimd128I4Syms = nullptr;
  1312. data->hoistableFields = nullptr;
  1313. data->argObjSyms = nullptr;
  1314. data->maybeTempObjectSyms = nullptr;
  1315. data->canStoreTempObjectSyms = nullptr;
  1316. data->valuesToKillOnCalls = nullptr;
  1317. data->inductionVariables = nullptr;
  1318. data->availableIntBoundChecks = nullptr;
  1319. data->callSequence = nullptr;
  1320. data->startCallCount = 0;
  1321. data->argOutCount = 0;
  1322. data->totalOutParamCount = 0;
  1323. data->inlinedArgOutCount = 0;
  1324. data->hasCSECandidates = false;
  1325. data->curFunc = this->func;
  1326. data->stackLiteralInitFldDataMap = nullptr;
  1327. data->capturedValues = nullptr;
  1328. data->changedSyms = nullptr;
  1329. data->OnDataUnreferenced();
  1330. }
  1331. void
  1332. GlobOpt::InitBlockData()
  1333. {
  1334. GlobOptBlockData *const data = &this->blockData;
  1335. JitArenaAllocator *const alloc = this->alloc;
  1336. data->symToValueMap = GlobHashTable::New(alloc, 64);
  1337. data->exprToValueMap = ExprHashTable::New(alloc, 64);
  1338. data->liveFields = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1339. data->liveArrayValues = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1340. data->isTempSrc = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1341. data->liveVarSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1342. data->liveInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1343. data->liveLossyInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1344. data->liveFloat64Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1345. // SIMD_JS
  1346. data->liveSimd128F4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1347. data->liveSimd128I4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1348. data->hoistableFields = nullptr;
  1349. data->argObjSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1350. data->maybeTempObjectSyms = nullptr;
  1351. data->canStoreTempObjectSyms = nullptr;
  1352. data->valuesToKillOnCalls = JitAnew(alloc, ValueSet, alloc);
  1353. if(DoBoundCheckHoist())
  1354. {
  1355. data->inductionVariables = IsLoopPrePass() ? JitAnew(alloc, InductionVariableSet, alloc) : nullptr;
  1356. data->availableIntBoundChecks = JitAnew(alloc, IntBoundCheckSet, alloc);
  1357. }
  1358. data->maybeWrittenTypeSyms = nullptr;
  1359. data->callSequence = nullptr;
  1360. data->startCallCount = 0;
  1361. data->argOutCount = 0;
  1362. data->totalOutParamCount = 0;
  1363. data->inlinedArgOutCount = 0;
  1364. data->hasCSECandidates = false;
  1365. data->curFunc = this->func;
  1366. data->stackLiteralInitFldDataMap = nullptr;
  1367. data->changedSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1368. data->OnDataInitialized(alloc);
  1369. }
  1370. void
  1371. GlobOpt::ReuseBlockData(GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1372. {
  1373. // Reuse dead map
  1374. toData->symToValueMap = fromData->symToValueMap;
  1375. toData->exprToValueMap = fromData->exprToValueMap;
  1376. toData->liveFields = fromData->liveFields;
  1377. toData->liveArrayValues = fromData->liveArrayValues;
  1378. toData->maybeWrittenTypeSyms = fromData->maybeWrittenTypeSyms;
  1379. toData->isTempSrc = fromData->isTempSrc;
  1380. toData->liveVarSyms = fromData->liveVarSyms;
  1381. toData->liveInt32Syms = fromData->liveInt32Syms;
  1382. toData->liveLossyInt32Syms = fromData->liveLossyInt32Syms;
  1383. toData->liveFloat64Syms = fromData->liveFloat64Syms;
  1384. // SIMD_JS
  1385. toData->liveSimd128F4Syms = fromData->liveSimd128F4Syms;
  1386. toData->liveSimd128I4Syms = fromData->liveSimd128I4Syms;
  1387. if (TrackHoistableFields())
  1388. {
  1389. toData->hoistableFields = fromData->hoistableFields;
  1390. }
  1391. if (TrackArgumentsObject())
  1392. {
  1393. toData->argObjSyms = fromData->argObjSyms;
  1394. }
  1395. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms;
  1396. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms;
  1397. toData->curFunc = fromData->curFunc;
  1398. toData->valuesToKillOnCalls = fromData->valuesToKillOnCalls;
  1399. toData->inductionVariables = fromData->inductionVariables;
  1400. toData->availableIntBoundChecks = fromData->availableIntBoundChecks;
  1401. toData->callSequence = fromData->callSequence;
  1402. toData->startCallCount = fromData->startCallCount;
  1403. toData->argOutCount = fromData->argOutCount;
  1404. toData->totalOutParamCount = fromData->totalOutParamCount;
  1405. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1406. toData->hasCSECandidates = fromData->hasCSECandidates;
  1407. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap;
  1408. toData->changedSyms = fromData->changedSyms;
  1409. toData->changedSyms->ClearAll();
  1410. toData->OnDataReused(fromData);
  1411. }
  1412. void
  1413. GlobOpt::CopyBlockData(GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1414. {
  1415. toData->symToValueMap = fromData->symToValueMap;
  1416. toData->exprToValueMap = fromData->exprToValueMap;
  1417. toData->liveFields = fromData->liveFields;
  1418. toData->liveArrayValues = fromData->liveArrayValues;
  1419. toData->maybeWrittenTypeSyms = fromData->maybeWrittenTypeSyms;
  1420. toData->isTempSrc = fromData->isTempSrc;
  1421. toData->liveVarSyms = fromData->liveVarSyms;
  1422. toData->liveInt32Syms = fromData->liveInt32Syms;
  1423. toData->liveLossyInt32Syms = fromData->liveLossyInt32Syms;
  1424. toData->liveFloat64Syms = fromData->liveFloat64Syms;
  1425. // SIMD_JS
  1426. toData->liveSimd128F4Syms = fromData->liveSimd128F4Syms;
  1427. toData->liveSimd128I4Syms = fromData->liveSimd128I4Syms;
  1428. toData->hoistableFields = fromData->hoistableFields;
  1429. toData->argObjSyms = fromData->argObjSyms;
  1430. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms;
  1431. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms;
  1432. toData->curFunc = fromData->curFunc;
  1433. toData->valuesToKillOnCalls = fromData->valuesToKillOnCalls;
  1434. toData->inductionVariables = fromData->inductionVariables;
  1435. toData->availableIntBoundChecks = fromData->availableIntBoundChecks;
  1436. toData->callSequence = fromData->callSequence;
  1437. toData->startCallCount = fromData->startCallCount;
  1438. toData->argOutCount = fromData->argOutCount;
  1439. toData->totalOutParamCount = fromData->totalOutParamCount;
  1440. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1441. toData->hasCSECandidates = fromData->hasCSECandidates;
  1442. toData->changedSyms = fromData->changedSyms;
  1443. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap;
  1444. toData->OnDataReused(fromData);
  1445. }
  1446. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, BasicBlock *const fromBlock)
  1447. {
  1448. CloneBlockData(toBlock, &toBlock->globOptData, fromBlock);
  1449. }
  1450. void GlobOpt::CloneBlockData(BasicBlock *const toBlock, GlobOptBlockData *const toData, BasicBlock *const fromBlock)
  1451. {
  1452. GlobOptBlockData *const fromData = &fromBlock->globOptData;
  1453. JitArenaAllocator *const alloc = this->alloc;
  1454. toData->symToValueMap = fromData->symToValueMap->Copy();
  1455. toData->exprToValueMap = fromData->exprToValueMap->Copy();
  1456. // Clone the values as well to allow for flow-sensitive ValueInfo
  1457. this->CloneValues(toBlock, toData, fromData);
  1458. if(DoBoundCheckHoist())
  1459. {
  1460. CloneBoundCheckHoistBlockData(toBlock, toData, fromBlock, fromData);
  1461. }
  1462. toData->liveFields = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1463. toData->liveFields->Copy(fromData->liveFields);
  1464. toData->liveArrayValues = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1465. toData->liveArrayValues->Copy(fromData->liveArrayValues);
  1466. if (fromData->maybeWrittenTypeSyms)
  1467. {
  1468. toData->maybeWrittenTypeSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1469. toData->maybeWrittenTypeSyms->Copy(fromData->maybeWrittenTypeSyms);
  1470. }
  1471. toData->isTempSrc = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1472. toData->isTempSrc->Copy(fromData->isTempSrc);
  1473. toData->liveVarSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1474. toData->liveVarSyms->Copy(fromData->liveVarSyms);
  1475. toData->liveInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1476. toData->liveInt32Syms->Copy(fromData->liveInt32Syms);
  1477. toData->liveLossyInt32Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1478. toData->liveLossyInt32Syms->Copy(fromData->liveLossyInt32Syms);
  1479. toData->liveFloat64Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1480. toData->liveFloat64Syms->Copy(fromData->liveFloat64Syms);
  1481. // SIMD_JS
  1482. toData->liveSimd128F4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1483. toData->liveSimd128F4Syms->Copy(fromData->liveSimd128F4Syms);
  1484. toData->liveSimd128I4Syms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1485. toData->liveSimd128I4Syms->Copy(fromData->liveSimd128I4Syms);
  1486. if (TrackHoistableFields())
  1487. {
  1488. if (fromData->hoistableFields)
  1489. {
  1490. toData->hoistableFields = fromData->hoistableFields->CopyNew(alloc);
  1491. }
  1492. }
  1493. if (TrackArgumentsObject() && fromData->argObjSyms)
  1494. {
  1495. toData->argObjSyms = fromData->argObjSyms->CopyNew(alloc);
  1496. }
  1497. if (fromData->maybeTempObjectSyms && !fromData->maybeTempObjectSyms->IsEmpty())
  1498. {
  1499. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms->CopyNew(alloc);
  1500. if (fromData->canStoreTempObjectSyms && !fromData->canStoreTempObjectSyms->IsEmpty())
  1501. {
  1502. toData->canStoreTempObjectSyms = fromData->canStoreTempObjectSyms->CopyNew(alloc);
  1503. }
  1504. }
  1505. else
  1506. {
  1507. Assert(fromData->canStoreTempObjectSyms == nullptr || fromData->canStoreTempObjectSyms->IsEmpty());
  1508. }
  1509. toData->curFunc = fromData->curFunc;
  1510. if (fromData->callSequence != nullptr)
  1511. {
  1512. toData->callSequence = JitAnew(alloc, SListBase<IR::Opnd *>);
  1513. fromData->callSequence->CopyTo(alloc, *(toData->callSequence));
  1514. }
  1515. else
  1516. {
  1517. toData->callSequence = nullptr;
  1518. }
  1519. toData->startCallCount = fromData->startCallCount;
  1520. toData->argOutCount = fromData->argOutCount;
  1521. toData->totalOutParamCount = fromData->totalOutParamCount;
  1522. toData->inlinedArgOutCount = fromData->inlinedArgOutCount;
  1523. toData->hasCSECandidates = fromData->hasCSECandidates;
  1524. // Although we don't need the data on loop pre pass, we need to do it for the loop header
  1525. // because we capture the loop header bailout on loop prepass
  1526. if (fromData->stackLiteralInitFldDataMap != nullptr &&
  1527. (!this->IsLoopPrePass() || (toBlock->isLoopHeader && toBlock->loop == this->rootLoopPrePass)))
  1528. {
  1529. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap->Clone();
  1530. }
  1531. else
  1532. {
  1533. toData->stackLiteralInitFldDataMap = nullptr;
  1534. }
  1535. toData->changedSyms = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  1536. toData->changedSyms->Copy(fromData->changedSyms);
  1537. Assert(fromData->HasData());
  1538. toData->OnDataInitialized(alloc);
  1539. }
  1540. void
  1541. GlobOpt::CloneValues(BasicBlock *const toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  1542. {
  1543. ValueSet *const valuesToKillOnCalls = JitAnew(this->alloc, ValueSet, this->alloc);
  1544. toData->valuesToKillOnCalls = valuesToKillOnCalls;
  1545. // Values are shared between symbols with the same ValueNumber.
  1546. // Use a dictionary to share the clone values.
  1547. ValueSetByValueNumber *const valuesCreatedForClone = this->valuesCreatedForClone;
  1548. Assert(valuesCreatedForClone);
  1549. Assert(valuesCreatedForClone->Count() == 0);
  1550. DebugOnly(ValueSetByValueNumber originalValues(tempAlloc, 64));
  1551. const uint tableSize = toData->symToValueMap->tableSize;
  1552. SListBase<GlobHashBucket> *const table = toData->symToValueMap->table;
  1553. for (uint i = 0; i < tableSize; i++)
  1554. {
  1555. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &table[i])
  1556. {
  1557. Value *value = bucket.element;
  1558. ValueNumber valueNum = value->GetValueNumber();
  1559. #if DBG
  1560. // Ensure that the set of values in fromData contains only one value per value number. Byte-code constant values
  1561. // are reused in multiple blocks without cloning, so exclude those value numbers.
  1562. {
  1563. Value *const previouslyClonedOriginalValue = originalValues.Lookup(valueNum);
  1564. if (previouslyClonedOriginalValue)
  1565. {
  1566. if (!byteCodeConstantValueNumbersBv->Test(valueNum))
  1567. {
  1568. Assert(value == previouslyClonedOriginalValue);
  1569. }
  1570. }
  1571. else
  1572. {
  1573. originalValues.Add(value);
  1574. }
  1575. }
  1576. #endif
  1577. Value *newValue = valuesCreatedForClone->Lookup(valueNum);
  1578. if (!newValue)
  1579. {
  1580. newValue = CopyValue(value, valueNum);
  1581. TrackMergedValueForKills(newValue, toData, nullptr);
  1582. valuesCreatedForClone->Add(newValue);
  1583. }
  1584. bucket.element = newValue;
  1585. } NEXT_SLISTBASE_ENTRY;
  1586. }
  1587. valuesCreatedForClone->Clear();
  1588. ProcessValueKills(toBlock, toData);
  1589. }
  1590. template <typename CapturedList, typename CapturedItemsAreEqual>
  1591. void
  1592. GlobOpt::MergeCapturedValues(
  1593. GlobOptBlockData * toData,
  1594. SListBase<CapturedList> * toList,
  1595. SListBase<CapturedList> * fromList,
  1596. CapturedItemsAreEqual itemsAreEqual)
  1597. {
  1598. typename SListBase<CapturedList>::Iterator iterTo(toList);
  1599. typename SListBase<CapturedList>::Iterator iterFrom(fromList);
  1600. bool hasTo = iterTo.Next();
  1601. bool hasFrom = fromList == nullptr ? false : iterFrom.Next();
  1602. // to be conservative, only copy the captured value for common sym Ids
  1603. // in from and to CapturedList, mark all non-common sym Ids for re-capture
  1604. while (hasFrom && hasTo)
  1605. {
  1606. Sym * symFrom = iterFrom.Data().Key();
  1607. Sym * symTo = iterTo.Data().Key();
  1608. if (symFrom->m_id < symTo->m_id)
  1609. {
  1610. toData->changedSyms->Set(symFrom->m_id);
  1611. hasFrom = iterFrom.Next();
  1612. }
  1613. else if(symFrom->m_id > symTo->m_id)
  1614. {
  1615. toData->changedSyms->Set(symTo->m_id);
  1616. hasTo = iterTo.Next();
  1617. }
  1618. else
  1619. {
  1620. if (!itemsAreEqual(&iterFrom.Data(), &iterTo.Data()))
  1621. {
  1622. toData->changedSyms->Set(symTo->m_id);
  1623. }
  1624. hasFrom = iterFrom.Next();
  1625. hasTo = iterTo.Next();
  1626. }
  1627. }
  1628. bool hasRemain = hasFrom || hasTo;
  1629. if (hasRemain)
  1630. {
  1631. typename SListBase<CapturedList>::Iterator iterRemain(hasFrom ? iterFrom : iterTo);
  1632. do
  1633. {
  1634. Sym * symRemain = iterRemain.Data().Key();
  1635. toData->changedSyms->Set(symRemain->m_id);
  1636. hasRemain = iterRemain.Next();
  1637. } while (hasRemain);
  1638. }
  1639. }
  1640. void
  1641. GlobOpt::MergeBlockData(
  1642. GlobOptBlockData *toData,
  1643. BasicBlock *toBlock,
  1644. BasicBlock *fromBlock,
  1645. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  1646. BVSparse<JitArenaAllocator> *const symsCreatedForMerge,
  1647. bool forceTypeSpecOnLoopHeader)
  1648. {
  1649. GlobOptBlockData *fromData = &(fromBlock->globOptData);
  1650. if(DoBoundCheckHoist())
  1651. {
  1652. // Do this before merging values so that it can see whether a sym's value was changed on one side or the other
  1653. MergeBoundCheckHoistBlockData(toBlock, toData, fromBlock, fromData);
  1654. }
  1655. bool isLoopBackEdge = toBlock->isLoopHeader;
  1656. this->MergeValueMaps(toData, toBlock, fromBlock, symsRequiringCompensation, symsCreatedForMerge);
  1657. this->InsertCloneStrs(toBlock, toData, fromData);
  1658. toData->liveFields->And(fromData->liveFields);
  1659. toData->liveArrayValues->And(fromData->liveArrayValues);
  1660. toData->isTempSrc->And(fromData->isTempSrc);
  1661. toData->hasCSECandidates &= fromData->hasCSECandidates;
  1662. if (toData->capturedValues == nullptr)
  1663. {
  1664. toData->capturedValues = fromData->capturedValues;
  1665. toData->changedSyms->Or(fromData->changedSyms);
  1666. }
  1667. else
  1668. {
  1669. MergeCapturedValues(
  1670. toData,
  1671. &toData->capturedValues->constantValues,
  1672. fromData->capturedValues == nullptr ? nullptr : &fromData->capturedValues->constantValues,
  1673. [&](ConstantStackSymValue * symValueFrom, ConstantStackSymValue * symValueTo)
  1674. {
  1675. return symValueFrom->Value().IsEqual(symValueTo->Value());
  1676. });
  1677. MergeCapturedValues(
  1678. toData,
  1679. &toData->capturedValues->copyPropSyms,
  1680. fromData->capturedValues == nullptr ? nullptr : &fromData->capturedValues->copyPropSyms,
  1681. [&](CopyPropSyms * copyPropSymFrom, CopyPropSyms * copyPropSymTo)
  1682. {
  1683. if (copyPropSymFrom->Value()->m_id == copyPropSymTo->Value()->m_id)
  1684. {
  1685. Value * val = FindValue(copyPropSymFrom->Key());
  1686. Value * copyVal = FindValue(copyPropSymTo->Key());
  1687. return (val != nullptr && copyVal != nullptr &&
  1688. val->GetValueNumber() == copyVal->GetValueNumber());
  1689. }
  1690. return false;
  1691. });
  1692. }
  1693. if (fromData->maybeWrittenTypeSyms)
  1694. {
  1695. if (toData->maybeWrittenTypeSyms == nullptr)
  1696. {
  1697. toData->maybeWrittenTypeSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  1698. toData->maybeWrittenTypeSyms->Copy(fromData->maybeWrittenTypeSyms);
  1699. }
  1700. else
  1701. {
  1702. toData->maybeWrittenTypeSyms->Or(fromData->maybeWrittenTypeSyms);
  1703. }
  1704. }
  1705. {
  1706. // - Keep the var sym live if any of the following is true:
  1707. // - The var sym is live on both sides
  1708. // - The var sym is the only live sym that contains the lossless value of the sym on a side (that is, the lossless
  1709. // int32 sym is not live, and the float64 sym is not live on that side), and the sym of any type is live on the
  1710. // other side
  1711. // - On a side, the var and float64 syms are live, the lossless int32 sym is not live, the sym's merged value is
  1712. // likely int, and the sym of any type is live on the other side. Since the value is likely int, it may be
  1713. // int-specialized (with lossless conversion) later. Keeping only the float64 sym live requires doing a lossless
  1714. // conversion from float64 to int32, with bailout if the value of the float is not a true 32-bit integer. Checking
  1715. // that is costly, and if the float64 sym is converted back to var, it does not become a tagged int, causing a
  1716. // guaranteed bailout if a lossless conversion to int happens later. Keep the var sym live to preserve its
  1717. // tagged-ness so that it can be int-specialized while avoiding unnecessary bailouts.
  1718. // - Keep the int32 sym live if it's live on both sides
  1719. // - Mark the sym as lossy if it's lossy on any side
  1720. // - Keep the float64 sym live if it's live on a side and the sym of a specialized lossless type is live on the other
  1721. // side
  1722. //
  1723. // fromData.temp =
  1724. // (fromData.var - (fromData.int32 - fromData.lossyInt32)) &
  1725. // (toData.var | toData.int32 | toData.float64)
  1726. // toData.temp =
  1727. // (toData.var - (toData.int32 - toData.lossyInt32)) &
  1728. // (fromData.var | fromData.int32 | fromData.float64)
  1729. // toData.var =
  1730. // (fromData.var & toData.var) |
  1731. // (fromData.temp - fromData.float64) |
  1732. // (toData.temp - toData.float64) |
  1733. // (fromData.temp & fromData.float64 | toData.temp & toData.float64) & (value ~ int)
  1734. //
  1735. // toData.float64 =
  1736. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64) |
  1737. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1738. // toData.int32 &= fromData.int32
  1739. // toData.lossyInt32 = (fromData.lossyInt32 | toData.lossyInt32) & toData.int32
  1740. BVSparse<JitArenaAllocator> tempBv1(this->tempAlloc);
  1741. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  1742. if (isLoopBackEdge && forceTypeSpecOnLoopHeader)
  1743. {
  1744. Loop *const loop = toBlock->loop;
  1745. // Force to lossless int32:
  1746. // forceLosslessInt32 =
  1747. // ((fromData.int32 - fromData.lossyInt32) - (toData.int32 - toData.lossyInt32)) &
  1748. // loop.likelyIntSymsUsedBeforeDefined &
  1749. // toData.var
  1750. tempBv1.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1751. tempBv2.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1752. tempBv1.Minus(&tempBv2);
  1753. tempBv1.And(loop->likelyIntSymsUsedBeforeDefined);
  1754. tempBv1.And(toData->liveVarSyms);
  1755. toData->liveInt32Syms->Or(&tempBv1);
  1756. toData->liveLossyInt32Syms->Minus(&tempBv1);
  1757. if(DoLossyIntTypeSpec())
  1758. {
  1759. // Force to lossy int32:
  1760. // forceLossyInt32 = (fromData.int32 - toData.int32) & loop.symsUsedBeforeDefined & toData.var
  1761. tempBv1.Minus(fromData->liveInt32Syms, toData->liveInt32Syms);
  1762. tempBv1.And(loop->symsUsedBeforeDefined);
  1763. tempBv1.And(toData->liveVarSyms);
  1764. toData->liveInt32Syms->Or(&tempBv1);
  1765. toData->liveLossyInt32Syms->Or(&tempBv1);
  1766. }
  1767. // Force to float64:
  1768. // forceFloat64 =
  1769. // fromData.float64 & loop.forceFloat64 |
  1770. // (fromData.float64 - toData.float64) & loop.likelyNumberSymsUsedBeforeDefined
  1771. tempBv1.And(fromData->liveFloat64Syms, loop->forceFloat64SymsOnEntry);
  1772. toData->liveFloat64Syms->Or(&tempBv1);
  1773. tempBv1.Minus(fromData->liveFloat64Syms, toData->liveFloat64Syms);
  1774. tempBv1.And(loop->likelyNumberSymsUsedBeforeDefined);
  1775. toData->liveFloat64Syms->Or(&tempBv1);
  1776. // Force to Simd128 type:
  1777. // if live on the backedge and we are hoisting the operand.
  1778. // or if live on the backedge only and used before def in the loop.
  1779. tempBv1.And(fromData->liveSimd128F4Syms, loop->forceSimd128F4SymsOnEntry);
  1780. toData->liveSimd128F4Syms->Or(&tempBv1);
  1781. tempBv1.Minus(fromData->liveSimd128F4Syms, toData->liveSimd128F4Syms);
  1782. tempBv1.And(loop->likelySimd128F4SymsUsedBeforeDefined);
  1783. toData->liveSimd128F4Syms->Or(&tempBv1);
  1784. tempBv1.And(fromData->liveSimd128I4Syms, loop->forceSimd128I4SymsOnEntry);
  1785. toData->liveSimd128I4Syms->Or(&tempBv1);
  1786. tempBv1.Minus(fromData->liveSimd128I4Syms, toData->liveSimd128I4Syms);
  1787. tempBv1.And(loop->likelySimd128I4SymsUsedBeforeDefined);
  1788. toData->liveSimd128I4Syms->Or(&tempBv1);
  1789. }
  1790. BVSparse<JitArenaAllocator> simdSymsToVar(this->tempAlloc);
  1791. {
  1792. // SIMD_JS
  1793. // If we have simd128 type-spec sym live as one type on one side, but not of same type on the other, we look at the merged ValueType.
  1794. // If it's Likely the simd128 type, we choose to keep the type-spec sym (compensate with a FromVar), if the following is true:
  1795. // - We are not in jitLoopBody. Introducing a FromVar for compensation extends bytecode syms lifetime. If the value
  1796. // is actually dead, and we enter the loop-body after bailing out from SimpleJit, the value will not be restored in
  1797. // the bailout code.
  1798. // - Value was never Undefined/Null. Avoid unboxing of possibly uninitialized values.
  1799. // - Not loop back-edge. To keep unboxed value, the value has to be used-before def in the loop-body. This is done
  1800. // separately in forceSimd128*SymsOnEntry and included in loop-header.
  1801. // Live syms as F4 on one edge only
  1802. tempBv1.Xor(fromData->liveSimd128F4Syms, toData->liveSimd128F4Syms);
  1803. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv1)
  1804. {
  1805. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1806. Assert(stackSym);
  1807. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1808. ValueInfo * valueInfo = value ? value->GetValueInfo() : nullptr;
  1809. // There are two possible representations for Simd128F4 Value: F4 or Var.
  1810. // If the merged ValueType is LikelySimd128F4, then on the edge where F4 is dead, Var must be alive.
  1811. // Unbox to F4 type-spec sym.
  1812. if (
  1813. valueInfo && valueInfo->IsLikelySimd128Float32x4() &&
  1814. !valueInfo->HasBeenUndefined() && !valueInfo->HasBeenNull() &&
  1815. !isLoopBackEdge && !func->IsLoopBody()
  1816. )
  1817. {
  1818. toData->liveSimd128F4Syms->Set(id);
  1819. }
  1820. else
  1821. {
  1822. // If live on both edges, box it.
  1823. if (IsLive(stackSym, fromData) && IsLive(stackSym, toData))
  1824. {
  1825. simdSymsToVar.Set(id);
  1826. }
  1827. // kill F4 sym
  1828. toData->liveSimd128F4Syms->Clear(id);
  1829. }
  1830. } NEXT_BITSET_IN_SPARSEBV;
  1831. // Same for I4
  1832. tempBv1.Xor(fromData->liveSimd128I4Syms, toData->liveSimd128I4Syms);
  1833. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv1)
  1834. {
  1835. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1836. Assert(stackSym);
  1837. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1838. ValueInfo * valueInfo = value ? value->GetValueInfo() : nullptr;
  1839. if (
  1840. valueInfo && valueInfo->IsLikelySimd128Int32x4() &&
  1841. !valueInfo->HasBeenUndefined() && !valueInfo->HasBeenNull() &&
  1842. !isLoopBackEdge && !func->IsLoopBody()
  1843. )
  1844. {
  1845. toData->liveSimd128I4Syms->Set(id);
  1846. }
  1847. else
  1848. {
  1849. if (IsLive(stackSym, fromData) && IsLive(stackSym, toData))
  1850. {
  1851. simdSymsToVar.Set(id);
  1852. }
  1853. toData->liveSimd128I4Syms->Clear(id);
  1854. }
  1855. } NEXT_BITSET_IN_SPARSEBV;
  1856. }
  1857. {
  1858. BVSparse<JitArenaAllocator> tempBv3(this->tempAlloc);
  1859. // fromData.temp =
  1860. // (fromData.var - (fromData.int32 - fromData.lossyInt32)) &
  1861. // (toData.var | toData.int32 | toData.float64)
  1862. tempBv2.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1863. tempBv1.Minus(fromData->liveVarSyms, &tempBv2);
  1864. tempBv2.Or(toData->liveVarSyms, toData->liveInt32Syms);
  1865. tempBv2.Or(toData->liveFloat64Syms);
  1866. tempBv1.And(&tempBv2);
  1867. // toData.temp =
  1868. // (toData.var - (toData.int32 - toData.lossyInt32)) &
  1869. // (fromData.var | fromData.int32 | fromData.float64)
  1870. tempBv3.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1871. tempBv2.Minus(toData->liveVarSyms, &tempBv3);
  1872. tempBv3.Or(fromData->liveVarSyms, fromData->liveInt32Syms);
  1873. tempBv3.Or(fromData->liveFloat64Syms);
  1874. tempBv2.And(&tempBv3);
  1875. {
  1876. BVSparse<JitArenaAllocator> tempBv4(this->tempAlloc);
  1877. // fromData.temp & fromData.float64 | toData.temp & toData.float64
  1878. tempBv3.And(&tempBv1, fromData->liveFloat64Syms);
  1879. tempBv4.And(&tempBv2, toData->liveFloat64Syms);
  1880. tempBv3.Or(&tempBv4);
  1881. }
  1882. // (fromData.temp - fromData.float64) |
  1883. // (toData.temp - toData.float64)
  1884. tempBv1.Minus(fromData->liveFloat64Syms);
  1885. tempBv2.Minus(toData->liveFloat64Syms);
  1886. tempBv1.Or(&tempBv2);
  1887. // toData.var =
  1888. // (fromData.var & toData.var) |
  1889. // (fromData.temp - fromData.float64) |
  1890. // (toData.temp - toData.float64)
  1891. toData->liveVarSyms->And(fromData->liveVarSyms);
  1892. toData->liveVarSyms->Or(&tempBv1);
  1893. // toData.var |=
  1894. // (fromData.temp & fromData.float64 | toData.temp & toData.float64) & (value ~ int)
  1895. FOREACH_BITSET_IN_SPARSEBV(id, &tempBv3)
  1896. {
  1897. StackSym *const stackSym = this->func->m_symTable->FindStackSym(id);
  1898. Assert(stackSym);
  1899. Value *const value = this->FindValue(toData->symToValueMap, stackSym);
  1900. if(value)
  1901. {
  1902. ValueInfo *const valueInfo = value->GetValueInfo();
  1903. if(valueInfo->IsInt() || (valueInfo->IsLikelyInt() && DoAggressiveIntTypeSpec()))
  1904. {
  1905. toData->liveVarSyms->Set(id);
  1906. }
  1907. }
  1908. } NEXT_BITSET_IN_SPARSEBV;
  1909. // SIMD_JS
  1910. // Simd syms that need boxing
  1911. toData->liveVarSyms->Or(&simdSymsToVar);
  1912. }
  1913. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64)
  1914. tempBv1.Minus(toData->liveInt32Syms, toData->liveLossyInt32Syms);
  1915. tempBv1.Or(toData->liveFloat64Syms);
  1916. tempBv1.And(fromData->liveFloat64Syms);
  1917. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1918. tempBv2.Minus(fromData->liveInt32Syms, fromData->liveLossyInt32Syms);
  1919. tempBv2.Or(fromData->liveFloat64Syms);
  1920. tempBv2.And(toData->liveFloat64Syms);
  1921. // toData.float64 =
  1922. // fromData.float64 & ((toData.int32 - toData.lossyInt32) | toData.float64) |
  1923. // toData.float64 & ((fromData.int32 - fromData.lossyInt32) | fromData.float64)
  1924. toData->liveFloat64Syms->Or(&tempBv1, &tempBv2);
  1925. // toData.int32 &= fromData.int32
  1926. // toData.lossyInt32 = (fromData.lossyInt32 | toData.lossyInt32) & toData.int32
  1927. toData->liveInt32Syms->And(fromData->liveInt32Syms);
  1928. toData->liveLossyInt32Syms->Or(fromData->liveLossyInt32Syms);
  1929. toData->liveLossyInt32Syms->And(toData->liveInt32Syms);
  1930. }
  1931. if (TrackHoistableFields() && HasHoistableFields(fromData))
  1932. {
  1933. if (toData->hoistableFields)
  1934. {
  1935. toData->hoistableFields->Or(fromData->hoistableFields);
  1936. }
  1937. else
  1938. {
  1939. toData->hoistableFields = fromData->hoistableFields->CopyNew(this->alloc);
  1940. }
  1941. }
  1942. if (TrackArgumentsObject())
  1943. {
  1944. if (!toData->argObjSyms->Equal(fromData->argObjSyms))
  1945. {
  1946. CannotAllocateArgumentsObjectOnStack();
  1947. }
  1948. }
  1949. if (fromData->maybeTempObjectSyms && !fromData->maybeTempObjectSyms->IsEmpty())
  1950. {
  1951. if (toData->maybeTempObjectSyms)
  1952. {
  1953. toData->maybeTempObjectSyms->Or(fromData->maybeTempObjectSyms);
  1954. }
  1955. else
  1956. {
  1957. toData->maybeTempObjectSyms = fromData->maybeTempObjectSyms->CopyNew(this->alloc);
  1958. }
  1959. if (fromData->canStoreTempObjectSyms && !fromData->canStoreTempObjectSyms->IsEmpty())
  1960. {
  1961. if (toData->canStoreTempObjectSyms)
  1962. {
  1963. // Both need to be temp object
  1964. toData->canStoreTempObjectSyms->And(fromData->canStoreTempObjectSyms);
  1965. }
  1966. }
  1967. else if (toData->canStoreTempObjectSyms)
  1968. {
  1969. toData->canStoreTempObjectSyms->ClearAll();
  1970. }
  1971. }
  1972. else
  1973. {
  1974. Assert(!fromData->canStoreTempObjectSyms || fromData->canStoreTempObjectSyms->IsEmpty());
  1975. if (toData->canStoreTempObjectSyms)
  1976. {
  1977. toData->canStoreTempObjectSyms->ClearAll();
  1978. }
  1979. }
  1980. Assert(toData->curFunc == fromData->curFunc);
  1981. Assert((toData->callSequence == nullptr && fromData->callSequence == nullptr) || toData->callSequence->Equals(*(fromData->callSequence)));
  1982. Assert(toData->startCallCount == fromData->startCallCount);
  1983. Assert(toData->argOutCount == fromData->argOutCount);
  1984. Assert(toData->totalOutParamCount == fromData->totalOutParamCount);
  1985. Assert(toData->inlinedArgOutCount == fromData->inlinedArgOutCount);
  1986. // stackLiteralInitFldDataMap is a union of the stack literal from two path.
  1987. // Although we don't need the data on loop prepass, we need to do it for the loop header
  1988. // because we capture the loop header bailout on loop prepass.
  1989. if (fromData->stackLiteralInitFldDataMap != nullptr &&
  1990. (!this->IsLoopPrePass() || (toBlock->isLoopHeader && toBlock->loop == this->rootLoopPrePass)))
  1991. {
  1992. if (toData->stackLiteralInitFldDataMap == nullptr)
  1993. {
  1994. toData->stackLiteralInitFldDataMap = fromData->stackLiteralInitFldDataMap->Clone();
  1995. }
  1996. else
  1997. {
  1998. StackLiteralInitFldDataMap * toMap = toData->stackLiteralInitFldDataMap;
  1999. fromData->stackLiteralInitFldDataMap->Map([toMap](StackSym * stackSym, StackLiteralInitFldData const& data)
  2000. {
  2001. if (toMap->AddNew(stackSym, data) == -1)
  2002. {
  2003. // If there is an existing data for the stackSym, both path should match
  2004. DebugOnly(StackLiteralInitFldData const * currentData);
  2005. Assert(toMap->TryGetReference(stackSym, &currentData));
  2006. Assert(currentData->currentInitFldCount == data.currentInitFldCount);
  2007. Assert(currentData->propIds == data.propIds);
  2008. }
  2009. });
  2010. }
  2011. }
  2012. }
  2013. void
  2014. GlobOpt::DeleteBlockData(GlobOptBlockData *data)
  2015. {
  2016. JitArenaAllocator *const alloc = this->alloc;
  2017. data->symToValueMap->Delete();
  2018. data->exprToValueMap->Delete();
  2019. JitAdelete(alloc, data->liveFields);
  2020. JitAdelete(alloc, data->liveArrayValues);
  2021. if (data->maybeWrittenTypeSyms)
  2022. {
  2023. JitAdelete(alloc, data->maybeWrittenTypeSyms);
  2024. }
  2025. JitAdelete(alloc, data->isTempSrc);
  2026. JitAdelete(alloc, data->liveVarSyms);
  2027. JitAdelete(alloc, data->liveInt32Syms);
  2028. JitAdelete(alloc, data->liveLossyInt32Syms);
  2029. JitAdelete(alloc, data->liveFloat64Syms);
  2030. // SIMD_JS
  2031. JitAdelete(alloc, data->liveSimd128F4Syms);
  2032. JitAdelete(alloc, data->liveSimd128I4Syms);
  2033. if (data->hoistableFields)
  2034. {
  2035. JitAdelete(alloc, data->hoistableFields);
  2036. }
  2037. if (data->argObjSyms)
  2038. {
  2039. JitAdelete(alloc, data->argObjSyms);
  2040. }
  2041. if (data->maybeTempObjectSyms)
  2042. {
  2043. JitAdelete(alloc, data->maybeTempObjectSyms);
  2044. if (data->canStoreTempObjectSyms)
  2045. {
  2046. JitAdelete(alloc, data->canStoreTempObjectSyms);
  2047. }
  2048. }
  2049. else
  2050. {
  2051. Assert(!data->canStoreTempObjectSyms);
  2052. }
  2053. JitAdelete(alloc, data->valuesToKillOnCalls);
  2054. if(data->inductionVariables)
  2055. {
  2056. JitAdelete(alloc, data->inductionVariables);
  2057. }
  2058. if(data->availableIntBoundChecks)
  2059. {
  2060. JitAdelete(alloc, data->availableIntBoundChecks);
  2061. }
  2062. if (data->stackLiteralInitFldDataMap)
  2063. {
  2064. JitAdelete(alloc, data->stackLiteralInitFldDataMap);
  2065. }
  2066. JitAdelete(alloc, data->changedSyms);
  2067. data->changedSyms = nullptr;
  2068. data->OnDataDeleted();
  2069. }
  2070. void
  2071. GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  2072. {
  2073. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  2074. {
  2075. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  2076. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
  2077. IR::Instr *lastInstr = block->GetLastInstr();
  2078. if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
  2079. {
  2080. // If branch is using this symbol, hoist the operand as the ToVar load will get
  2081. // inserted right before the branch.
  2082. IR::Opnd *src1 = lastInstr->GetSrc1();
  2083. if (src1)
  2084. {
  2085. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  2086. {
  2087. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  2088. }
  2089. IR::Opnd *src2 = lastInstr->GetSrc2();
  2090. if (src2)
  2091. {
  2092. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  2093. {
  2094. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  2095. }
  2096. }
  2097. }
  2098. this->ToVar(lastInstr, newOpnd, block, nullptr, false);
  2099. }
  2100. else
  2101. {
  2102. IR::Instr *lastNextInstr = lastInstr->m_next;
  2103. this->ToVar(lastNextInstr, newOpnd, block, nullptr, false);
  2104. }
  2105. } NEXT_BITSET_IN_SPARSEBV;
  2106. }
  2107. void
  2108. GlobOpt::ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr)
  2109. {
  2110. return this->ToTypeSpec(bv, block, TyInt32, IR::BailOutIntOnly, lossy, insertBeforeInstr);
  2111. }
  2112. void
  2113. GlobOpt::ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  2114. {
  2115. return this->ToTypeSpec(bv, block, TyFloat64, IR::BailOutNumberOnly);
  2116. }
  2117. void
  2118. GlobOpt::ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  2119. {
  2120. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  2121. {
  2122. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  2123. IRType fromType;
  2124. // Win8 bug: 757126. If we are trying to type specialize the arguments object,
  2125. // let's make sure stack args optimization is not enabled. This is a problem, particularly,
  2126. // if the instruction comes from an unreachable block. In other cases, the pass on the
  2127. // instruction itself should disable arguments object optimization.
  2128. if(block->globOptData.argObjSyms && IsArgumentsSymID(id, block->globOptData))
  2129. {
  2130. CannotAllocateArgumentsObjectOnStack();
  2131. }
  2132. if (block->globOptData.liveVarSyms->Test(id))
  2133. {
  2134. fromType = TyVar;
  2135. }
  2136. else if (block->globOptData.liveInt32Syms->Test(id) && !block->globOptData.liveLossyInt32Syms->Test(id))
  2137. {
  2138. fromType = TyInt32;
  2139. stackSym = stackSym->GetInt32EquivSym(this->func);
  2140. }
  2141. else if (block->globOptData.liveFloat64Syms->Test(id))
  2142. {
  2143. fromType = TyFloat64;
  2144. stackSym = stackSym->GetFloat64EquivSym(this->func);
  2145. }
  2146. else
  2147. {
  2148. Assert(IsLiveAsSimd128(stackSym, &block->globOptData));
  2149. if (IsLiveAsSimd128F4(stackSym, &block->globOptData))
  2150. {
  2151. fromType = TySimd128F4;
  2152. stackSym = stackSym->GetSimd128F4EquivSym(this->func);
  2153. }
  2154. else
  2155. {
  2156. fromType = TySimd128I4;
  2157. stackSym = stackSym->GetSimd128I4EquivSym(this->func);
  2158. }
  2159. }
  2160. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, fromType, this->func);
  2161. IR::Instr *lastInstr = block->GetLastInstr();
  2162. if (!insertBeforeInstr && lastInstr->IsBranchInstr())
  2163. {
  2164. // If branch is using this symbol, hoist the operand as the ToInt32 load will get
  2165. // inserted right before the branch.
  2166. IR::Instr *instrPrev = lastInstr->m_prev;
  2167. IR::Opnd *src1 = lastInstr->GetSrc1();
  2168. if (src1)
  2169. {
  2170. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  2171. {
  2172. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  2173. }
  2174. IR::Opnd *src2 = lastInstr->GetSrc2();
  2175. if (src2)
  2176. {
  2177. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  2178. {
  2179. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  2180. }
  2181. }
  2182. // Did we insert anything?
  2183. if (lastInstr->m_prev != instrPrev)
  2184. {
  2185. // If we had ByteCodeUses right before the branch, move them back down.
  2186. IR::Instr *insertPoint = lastInstr;
  2187. for (IR::Instr *instrBytecode = instrPrev; instrBytecode->m_opcode == Js::OpCode::ByteCodeUses; instrBytecode = instrBytecode->m_prev)
  2188. {
  2189. instrBytecode->Unlink();
  2190. insertPoint->InsertBefore(instrBytecode);
  2191. insertPoint = instrBytecode;
  2192. }
  2193. }
  2194. }
  2195. }
  2196. this->ToTypeSpecUse(nullptr, newOpnd, block, nullptr, nullptr, toType, bailOutKind, lossy, insertBeforeInstr);
  2197. } NEXT_BITSET_IN_SPARSEBV;
  2198. }
  2199. void
  2200. GlobOpt::CleanUpValueMaps()
  2201. {
  2202. // Don't do cleanup if it's been done recently.
  2203. // Landing pad could get optimized twice...
  2204. // We want the same info out the first and second time. So always cleanup.
  2205. // Increasing the cleanup threshold count for asmjs to 500
  2206. uint cleanupCount = (!GetIsAsmJSFunc()) ? CONFIG_FLAG(GoptCleanupThreshold) : CONFIG_FLAG(AsmGoptCleanupThreshold);
  2207. if (!this->currentBlock->IsLandingPad() && this->instrCountSinceLastCleanUp < cleanupCount)
  2208. {
  2209. return;
  2210. }
  2211. this->instrCountSinceLastCleanUp = 0;
  2212. GlobHashTable *thisTable = this->blockData.symToValueMap;
  2213. BVSparse<JitArenaAllocator> deadSymsBv(this->tempAlloc);
  2214. BVSparse<JitArenaAllocator> keepAliveSymsBv(this->tempAlloc);
  2215. BVSparse<JitArenaAllocator> availableValueNumbers(this->tempAlloc);
  2216. availableValueNumbers.Copy(byteCodeConstantValueNumbersBv);
  2217. BVSparse<JitArenaAllocator> *upwardExposedUses = this->currentBlock->upwardExposedUses;
  2218. BVSparse<JitArenaAllocator> *upwardExposedFields = this->currentBlock->upwardExposedFields;
  2219. bool isInLoop = !!this->currentBlock->loop;
  2220. BVSparse<JitArenaAllocator> symsInCallSequence(this->tempAlloc);
  2221. SListBase<IR::Opnd *> * callSequence = this->currentBlock->globOptData.callSequence;
  2222. if (callSequence && !callSequence->Empty())
  2223. {
  2224. FOREACH_SLISTBASE_ENTRY(IR::Opnd *, opnd, callSequence)
  2225. {
  2226. StackSym * sym = opnd->GetStackSym();
  2227. symsInCallSequence.Set(sym->m_id);
  2228. }
  2229. }
  2230. NEXT_SLISTBASE_ENTRY;
  2231. for (uint i = 0; i < thisTable->tableSize; i++)
  2232. {
  2233. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  2234. {
  2235. bool isSymUpwardExposed = upwardExposedUses->Test(bucket.value->m_id) || upwardExposedFields->Test(bucket.value->m_id);
  2236. if (!isSymUpwardExposed && symsInCallSequence.Test(bucket.value->m_id))
  2237. {
  2238. // Don't remove/shrink sym-value pair if the sym is referenced in callSequence even if the sym is dead according to backward data flow.
  2239. // This is possible in some edge cases that an infinite loop is involved when evaluating parameter for a function (between StartCall and Call),
  2240. // there is no backward data flow into the infinite loop block, but non empty callSequence still populates to it in this (forward) pass
  2241. // which causes error when looking up value for the syms in callSequence (cannot find the value).
  2242. // It would cause error to fill out the bailout information for the loop blocks.
  2243. // Remove dead syms from callSequence has some risk because there are various associated counters which need to be consistent.
  2244. continue;
  2245. }
  2246. // Make sure symbol was created before backward pass.
  2247. // If symbols isn't upward exposed, mark it as dead.
  2248. // If a symbol was copy-prop'd in a loop prepass, the upwardExposedUses info could be wrong. So wait until we are out of the loop before clearing it.
  2249. if ((SymID)bucket.value->m_id <= this->maxInitialSymID && !isSymUpwardExposed
  2250. && (!isInLoop || !this->prePassCopyPropSym->Test(bucket.value->m_id)))
  2251. {
  2252. Value *val = bucket.element;
  2253. ValueInfo *valueInfo = val->GetValueInfo();
  2254. Sym * sym = bucket.value;
  2255. Sym *symStore = valueInfo->GetSymStore();
  2256. if (symStore && symStore == bucket.value)
  2257. {
  2258. // Keep constants around, as we don't know if there will be further uses
  2259. if (!bucket.element->GetValueInfo()->IsVarConstant() && !bucket.element->GetValueInfo()->HasIntConstantValue())
  2260. {
  2261. // Symbol may still be a copy-prop candidate. Wait before deleting it.
  2262. deadSymsBv.Set(bucket.value->m_id);
  2263. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  2264. // created in backward pass and so their symIds > maxInitialSymID.
  2265. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  2266. {
  2267. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  2268. }
  2269. }
  2270. availableValueNumbers.Set(val->GetValueNumber());
  2271. }
  2272. else
  2273. {
  2274. // Make sure the type sym is added to the dead syms vector as well, because type syms are
  2275. // created in backward pass and so their symIds > maxInitialSymID. Perhaps we could remove
  2276. // it explicitly here, but would it work alright with the iterator?
  2277. if (sym->IsStackSym() && sym->AsStackSym()->HasObjectTypeSym())
  2278. {
  2279. deadSymsBv.Set(sym->AsStackSym()->GetObjectTypeSym()->m_id);
  2280. }
  2281. // Not a copy-prop candidate; delete it right away.
  2282. iter.RemoveCurrent(thisTable->alloc);
  2283. this->blockData.liveInt32Syms->Clear(sym->m_id);
  2284. this->blockData.liveLossyInt32Syms->Clear(sym->m_id);
  2285. this->blockData.liveFloat64Syms->Clear(sym->m_id);
  2286. }
  2287. }
  2288. else
  2289. {
  2290. Sym * sym = bucket.value;
  2291. if (sym->IsPropertySym() && !this->blockData.liveFields->Test(sym->m_id))
  2292. {
  2293. // Remove propertySyms which are not live anymore.
  2294. iter.RemoveCurrent(thisTable->alloc);
  2295. this->blockData.liveInt32Syms->Clear(sym->m_id);
  2296. this->blockData.liveLossyInt32Syms->Clear(sym->m_id);
  2297. this->blockData.liveFloat64Syms->Clear(sym->m_id);
  2298. }
  2299. else
  2300. {
  2301. // Look at the copy-prop candidate. We don't want to get rid of the data for a symbol which is
  2302. // a copy-prop candidate.
  2303. Value *val = bucket.element;
  2304. ValueInfo *valueInfo = val->GetValueInfo();
  2305. Sym *symStore = valueInfo->GetSymStore();
  2306. if (symStore && symStore != bucket.value)
  2307. {
  2308. keepAliveSymsBv.Set(symStore->m_id);
  2309. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  2310. {
  2311. keepAliveSymsBv.Set(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  2312. }
  2313. }
  2314. availableValueNumbers.Set(val->GetValueNumber());
  2315. }
  2316. }
  2317. } NEXT_SLISTBASE_ENTRY_EDITING;
  2318. }
  2319. deadSymsBv.Minus(&keepAliveSymsBv);
  2320. // Now cleanup exprToValueMap table
  2321. ExprHashTable *thisExprTable = this->blockData.exprToValueMap;
  2322. bool oldHasCSECandidatesValue = this->currentBlock->globOptData.hasCSECandidates; // Could be false if none need bailout.
  2323. this->currentBlock->globOptData.hasCSECandidates = false;
  2324. for (uint i = 0; i < thisExprTable->tableSize; i++)
  2325. {
  2326. FOREACH_SLISTBASE_ENTRY_EDITING(ExprHashBucket, bucket, &thisExprTable->table[i], iter)
  2327. {
  2328. ExprHash hash = bucket.value;
  2329. ValueNumber src1ValNum = hash.GetSrc1ValueNumber();
  2330. ValueNumber src2ValNum = hash.GetSrc2ValueNumber();
  2331. // If src1Val or src2Val are not available anymore, no point keeping this CSE candidate
  2332. bool removeCurrent = false;
  2333. if ((src1ValNum && !availableValueNumbers.Test(src1ValNum))
  2334. || (src2ValNum && !availableValueNumbers.Test(src2ValNum)))
  2335. {
  2336. removeCurrent = true;
  2337. }
  2338. else
  2339. {
  2340. // If we are keeping this value, make sure we also keep the symStore in the value table
  2341. removeCurrent = true; // Remove by default, unless it's set to false later below.
  2342. Value *val = bucket.element;
  2343. if (val)
  2344. {
  2345. Sym *symStore = val->GetValueInfo()->GetSymStore();
  2346. if (symStore)
  2347. {
  2348. Value *symStoreVal = this->FindValue(this->currentBlock->globOptData.symToValueMap, symStore);
  2349. if (symStoreVal && symStoreVal->GetValueNumber() == val->GetValueNumber())
  2350. {
  2351. removeCurrent = false;
  2352. deadSymsBv.Clear(symStore->m_id);
  2353. if (symStore->IsStackSym() && symStore->AsStackSym()->HasObjectTypeSym())
  2354. {
  2355. deadSymsBv.Clear(symStore->AsStackSym()->GetObjectTypeSym()->m_id);
  2356. }
  2357. }
  2358. }
  2359. }
  2360. }
  2361. if(removeCurrent)
  2362. {
  2363. iter.RemoveCurrent(thisExprTable->alloc);
  2364. }
  2365. else
  2366. {
  2367. this->currentBlock->globOptData.hasCSECandidates = oldHasCSECandidatesValue;
  2368. }
  2369. } NEXT_SLISTBASE_ENTRY_EDITING;
  2370. }
  2371. FOREACH_BITSET_IN_SPARSEBV(dead_id, &deadSymsBv)
  2372. {
  2373. thisTable->Clear(dead_id);
  2374. }
  2375. NEXT_BITSET_IN_SPARSEBV;
  2376. if (!deadSymsBv.IsEmpty())
  2377. {
  2378. if (this->func->IsJitInDebugMode())
  2379. {
  2380. // Do not remove non-temp local vars from liveVarSyms (i.e. do not let them become dead).
  2381. // We will need to restore all initialized/used so far non-temp local during bail out.
  2382. // (See BackwardPass::ProcessBailOutInfo)
  2383. Assert(this->func->m_nonTempLocalVars);
  2384. BVSparse<JitArenaAllocator> tempBv(this->tempAlloc);
  2385. tempBv.Minus(&deadSymsBv, this->func->m_nonTempLocalVars);
  2386. this->blockData.liveVarSyms->Minus(&tempBv);
  2387. #if DBG
  2388. tempBv.And(this->blockData.liveInt32Syms, this->func->m_nonTempLocalVars);
  2389. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveInt32Syms?");
  2390. tempBv.And(this->blockData.liveLossyInt32Syms, this->func->m_nonTempLocalVars);
  2391. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveLossyInt32Syms?");
  2392. tempBv.And(this->blockData.liveFloat64Syms, this->func->m_nonTempLocalVars);
  2393. AssertMsg(tempBv.IsEmpty(), "Type spec is disabled under debugger. How come did we get a non-temp local in liveFloat64Syms?");
  2394. #endif
  2395. }
  2396. else
  2397. {
  2398. this->blockData.liveVarSyms->Minus(&deadSymsBv);
  2399. }
  2400. this->blockData.liveInt32Syms->Minus(&deadSymsBv);
  2401. this->blockData.liveLossyInt32Syms->Minus(&deadSymsBv);
  2402. this->blockData.liveFloat64Syms->Minus(&deadSymsBv);
  2403. }
  2404. JitAdelete(this->alloc, upwardExposedUses);
  2405. this->currentBlock->upwardExposedUses = nullptr;
  2406. JitAdelete(this->alloc, upwardExposedFields);
  2407. this->currentBlock->upwardExposedFields = nullptr;
  2408. if (this->currentBlock->cloneStrCandidates)
  2409. {
  2410. JitAdelete(this->alloc, this->currentBlock->cloneStrCandidates);
  2411. this->currentBlock->cloneStrCandidates = nullptr;
  2412. }
  2413. }
  2414. PRECandidatesList * GlobOpt::FindBackEdgePRECandidates(BasicBlock *block, JitArenaAllocator *alloc)
  2415. {
  2416. // Iterate over the value table looking for propertySyms which are candidates to
  2417. // pre-load in the landing pad for field PRE
  2418. GlobHashTable *valueTable = block->globOptData.symToValueMap;
  2419. Loop *loop = block->loop;
  2420. PRECandidatesList *candidates = nullptr;
  2421. for (uint i = 0; i < valueTable->tableSize; i++)
  2422. {
  2423. FOREACH_SLISTBASE_ENTRY(GlobHashBucket, bucket, &valueTable->table[i])
  2424. {
  2425. Sym *sym = bucket.value;
  2426. if (!sym->IsPropertySym())
  2427. {
  2428. continue;
  2429. }
  2430. PropertySym *propertySym = sym->AsPropertySym();
  2431. // Field should be live on the back-edge
  2432. if (!block->globOptData.liveFields->Test(propertySym->m_id))
  2433. {
  2434. continue;
  2435. }
  2436. // Field should be live in the landing pad as well
  2437. if (!loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  2438. {
  2439. continue;
  2440. }
  2441. Value *value = bucket.element;
  2442. Sym *symStore = value->GetValueInfo()->GetSymStore();
  2443. if (!symStore || !symStore->IsStackSym())
  2444. {
  2445. continue;
  2446. }
  2447. // Check upwardExposed in case of:
  2448. // s1 = 0;
  2449. // loop:
  2450. // = o.x;
  2451. // foo();
  2452. // o.x = s1;
  2453. // Can't thrash s1 in loop top.
  2454. if (!symStore->AsStackSym()->IsSingleDef() || loop->GetHeadBlock()->upwardExposedUses->Test(symStore->m_id))
  2455. {
  2456. // If symStore isn't singleDef, we need to make sure it still has the same value.
  2457. // This usually fails if we are not aggressive at transferring values in the prepass.
  2458. Value **pSymStoreFromValue = valueTable->Get(symStore->m_id);
  2459. // Consider: We should be fine if symStore isn't live in landing pad...
  2460. if (!pSymStoreFromValue || (*pSymStoreFromValue)->GetValueNumber() != value->GetValueNumber())
  2461. {
  2462. continue;
  2463. }
  2464. }
  2465. BasicBlock *landingPad = loop->landingPad;
  2466. Value *landingPadValue = this->FindValue(landingPad->globOptData.symToValueMap, propertySym);
  2467. if (!landingPadValue)
  2468. {
  2469. // Value should be added as initial value or already be there.
  2470. return nullptr;
  2471. }
  2472. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  2473. if (!ldInstr)
  2474. {
  2475. continue;
  2476. }
  2477. if (!candidates)
  2478. {
  2479. candidates = Anew(alloc, PRECandidatesList, alloc);
  2480. }
  2481. candidates->Prepend(&bucket);
  2482. } NEXT_SLISTBASE_ENTRY;
  2483. }
  2484. return candidates;
  2485. }
  2486. PRECandidatesList * GlobOpt::RemoveUnavailableCandidates(BasicBlock *block, PRECandidatesList *candidates, JitArenaAllocator *alloc)
  2487. {
  2488. // In case of multiple back-edges to the loop, make sure the candidates are still valid.
  2489. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates, iter)
  2490. {
  2491. Value *candidateValue = candidate->element;
  2492. PropertySym *candidatePropertySym = candidate->value->AsPropertySym();
  2493. ValueNumber valueNumber = candidateValue->GetValueNumber();
  2494. Sym *symStore = candidateValue->GetValueInfo()->GetSymStore();
  2495. Value *blockValue = this->FindValue(block->globOptData.symToValueMap, candidatePropertySym);
  2496. if (blockValue && blockValue->GetValueNumber() == valueNumber
  2497. && blockValue->GetValueInfo()->GetSymStore() == symStore)
  2498. {
  2499. Value *symStoreValue = this->FindValue(block->globOptData.symToValueMap, symStore);
  2500. if (symStoreValue && symStoreValue->GetValueNumber() == valueNumber)
  2501. {
  2502. continue;
  2503. }
  2504. }
  2505. iter.RemoveCurrent();
  2506. } NEXT_SLIST_ENTRY_EDITING;
  2507. return candidates;
  2508. }
  2509. PRECandidatesList * GlobOpt::FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc)
  2510. {
  2511. // Find the set of PRE candidates
  2512. BasicBlock *loopHeader = loop->GetHeadBlock();
  2513. PRECandidatesList *candidates = nullptr;
  2514. bool firstBackEdge = true;
  2515. FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
  2516. {
  2517. if (!loop->IsDescendentOrSelf(blockPred->loop))
  2518. {
  2519. // Not a loop back-edge
  2520. continue;
  2521. }
  2522. if (firstBackEdge)
  2523. {
  2524. candidates = this->FindBackEdgePRECandidates(blockPred, alloc);
  2525. }
  2526. else
  2527. {
  2528. candidates = this->RemoveUnavailableCandidates(blockPred, candidates, alloc);
  2529. }
  2530. } NEXT_PREDECESSOR_BLOCK;
  2531. return candidates;
  2532. }
  2533. BOOL GlobOpt::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
  2534. {
  2535. // Insert a load for each field PRE candidate.
  2536. PropertySym *propertySym = candidate->value->AsPropertySym();
  2537. StackSym *objPtrSym = propertySym->m_stackSym;
  2538. // If objPtr isn't live, we'll retry later.
  2539. // Another PRE candidate may insert a load for it.
  2540. if (!this->IsLive(objPtrSym, loop->landingPad))
  2541. {
  2542. return false;
  2543. }
  2544. BasicBlock *landingPad = loop->landingPad;
  2545. Value *value = candidate->element;
  2546. Sym *symStore = value->GetValueInfo()->GetSymStore();
  2547. // The symStore can't be live into the loop
  2548. // The symStore needs to still have the same value
  2549. Assert(symStore && symStore->IsStackSym());
  2550. if (this->IsLive(symStore, loop->landingPad))
  2551. {
  2552. // May have already been hoisted:
  2553. // o.x = t1;
  2554. // o.y = t1;
  2555. return false;
  2556. }
  2557. Value *landingPadValue = this->FindValue(landingPad->globOptData.symToValueMap, propertySym);
  2558. // Value should be added as initial value or already be there.
  2559. Assert(landingPadValue);
  2560. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  2561. Assert(ldInstr);
  2562. JITTypeHolder propertyType(nullptr);
  2563. // Create instr to put in landing pad for compensation
  2564. Assert(IsPREInstrCandidateLoad(ldInstr->m_opcode));
  2565. IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  2566. if (ldSrc->m_sym != propertySym)
  2567. {
  2568. // It's possible that the propertySym but have equivalent objPtrs. Verify their values.
  2569. Value *val1 = this->FindValue(ldSrc->m_sym->AsPropertySym()->m_stackSym);
  2570. Value *val2 = this->FindValue(propertySym->m_stackSym);
  2571. if (!val1 || !val2 || val1->GetValueNumber() != val2->GetValueNumber())
  2572. {
  2573. return false;
  2574. }
  2575. }
  2576. ldInstr = ldInstr->Copy();
  2577. // Consider: Shouldn't be necessary once we have copy-prop in prepass...
  2578. ldInstr->GetSrc1()->AsSymOpnd()->m_sym = propertySym;
  2579. ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  2580. if (ldSrc->IsPropertySymOpnd())
  2581. {
  2582. IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
  2583. IR::PropertySymOpnd *newPropSymOpnd;
  2584. if (propSymOpnd->IsMonoObjTypeSpecCandidate())
  2585. {
  2586. propertyType = propSymOpnd->GetType();
  2587. }
  2588. newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->func);
  2589. ldInstr->ReplaceSrc1(newPropSymOpnd);
  2590. }
  2591. if (ldInstr->GetDst()->AsRegOpnd()->m_sym != symStore)
  2592. {
  2593. ldInstr->ReplaceDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->func));
  2594. }
  2595. ldInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  2596. ldInstr->GetDst()->SetIsJITOptimizedReg(true);
  2597. landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
  2598. loop->fieldPRESymStore->Set(symStore->m_id);
  2599. ValueType valueType(ValueType::Uninitialized);
  2600. Value *initialValue;
  2601. if (loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue))
  2602. {
  2603. if (ldInstr->IsProfiledInstr())
  2604. {
  2605. if (initialValue->GetValueNumber() == value->GetValueNumber())
  2606. {
  2607. if (value->GetValueInfo()->IsUninitialized())
  2608. {
  2609. valueType = ldInstr->AsProfiledInstr()->u.FldInfo().valueType;
  2610. }
  2611. else
  2612. {
  2613. valueType = value->GetValueInfo()->Type();
  2614. }
  2615. }
  2616. else
  2617. {
  2618. valueType = ValueType::Uninitialized;
  2619. }
  2620. ldInstr->AsProfiledInstr()->u.FldInfo().valueType = valueType;
  2621. }
  2622. }
  2623. else
  2624. {
  2625. valueType = landingPadValue->GetValueInfo()->Type();
  2626. }
  2627. loop->symsUsedBeforeDefined->Set(symStore->m_id);
  2628. if (valueType.IsLikelyNumber())
  2629. {
  2630. loop->likelyNumberSymsUsedBeforeDefined->Set(symStore->m_id);
  2631. if (DoAggressiveIntTypeSpec() ? valueType.IsLikelyInt() : valueType.IsInt())
  2632. {
  2633. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  2634. // specialization is enabled
  2635. loop->likelyIntSymsUsedBeforeDefined->Set(symStore->m_id);
  2636. }
  2637. }
  2638. // Insert in landing pad
  2639. if (ldInstr->HasAnyImplicitCalls())
  2640. {
  2641. IR::Instr * bailInstr = EnsureDisableImplicitCallRegion(loop);
  2642. bailInstr->InsertBefore(ldInstr);
  2643. }
  2644. else if (loop->endDisableImplicitCall)
  2645. {
  2646. loop->endDisableImplicitCall->InsertBefore(ldInstr);
  2647. }
  2648. else
  2649. {
  2650. loop->landingPad->InsertAfter(ldInstr);
  2651. }
  2652. ldInstr->ClearByteCodeOffset();
  2653. ldInstr->SetByteCodeOffset(landingPad->GetFirstInstr());
  2654. #if DBG_DUMP
  2655. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  2656. {
  2657. Output::Print(_u("** TRACE: Field PRE: field pre-loaded in landing pad of loop head #%-3d: "), loop->GetHeadBlock()->GetBlockNum());
  2658. ldInstr->Dump();
  2659. Output::Print(_u("\n"));
  2660. }
  2661. #endif
  2662. return true;
  2663. }
  2664. void GlobOpt::PreloadPRECandidates(Loop *loop, PRECandidatesList *candidates)
  2665. {
  2666. // Insert loads in landing pad for field PRE candidates. Iterate while(changed)
  2667. // for the o.x.y cases.
  2668. BOOL changed = true;
  2669. if (!candidates)
  2670. {
  2671. return;
  2672. }
  2673. Assert(loop->landingPad->GetFirstInstr() == loop->landingPad->GetLastInstr());
  2674. while (changed)
  2675. {
  2676. changed = false;
  2677. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates, iter)
  2678. {
  2679. if (this->PreloadPRECandidate(loop, candidate))
  2680. {
  2681. changed = true;
  2682. iter.RemoveCurrent();
  2683. }
  2684. } NEXT_SLIST_ENTRY_EDITING;
  2685. }
  2686. }
  2687. void GlobOpt::FieldPRE(Loop *loop)
  2688. {
  2689. if (!DoFieldPRE(loop))
  2690. {
  2691. return;
  2692. }
  2693. PRECandidatesList *candidates;
  2694. JitArenaAllocator *alloc = this->tempAlloc;
  2695. candidates = this->FindPossiblePRECandidates(loop, alloc);
  2696. this->PreloadPRECandidates(loop, candidates);
  2697. }
  2698. void GlobOpt::InsertCloneStrs(BasicBlock *toBlock, GlobOptBlockData *toData, GlobOptBlockData *fromData)
  2699. {
  2700. if (toBlock->isLoopHeader // isLoopBackEdge
  2701. && toBlock->cloneStrCandidates
  2702. && !IsLoopPrePass())
  2703. {
  2704. Loop *loop = toBlock->loop;
  2705. BasicBlock *landingPad = loop->landingPad;
  2706. const SymTable *const symTable = func->m_symTable;
  2707. Assert(tempBv->IsEmpty());
  2708. tempBv->And(toBlock->cloneStrCandidates, fromData->isTempSrc);
  2709. FOREACH_BITSET_IN_SPARSEBV(id, tempBv)
  2710. {
  2711. StackSym *const sym = (StackSym *)symTable->Find(id);
  2712. Assert(sym);
  2713. if (!landingPad->globOptData.liveVarSyms->Test(id)
  2714. || !fromData->liveVarSyms->Test(id))
  2715. {
  2716. continue;
  2717. }
  2718. Value * landingPadValue = FindValue(landingPad->globOptData.symToValueMap, sym);
  2719. if (landingPadValue == nullptr)
  2720. {
  2721. continue;
  2722. }
  2723. Value * loopValue = FindValue(fromData->symToValueMap, sym);
  2724. if (loopValue == nullptr)
  2725. {
  2726. continue;
  2727. }
  2728. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  2729. ValueInfo *loopValueInfo = loopValue->GetValueInfo();
  2730. if (landingPadValueInfo->IsLikelyString()
  2731. && loopValueInfo->IsLikelyString())
  2732. {
  2733. IR::Instr *cloneStr = IR::Instr::New(Js::OpCode::CloneStr, this->func);
  2734. IR::RegOpnd *opnd = IR::RegOpnd::New(sym, IRType::TyVar, this->func);
  2735. cloneStr->SetDst(opnd);
  2736. cloneStr->SetSrc1(opnd);
  2737. if (loop->bailOutInfo->bailOutInstr)
  2738. {
  2739. loop->bailOutInfo->bailOutInstr->InsertBefore(cloneStr);
  2740. }
  2741. else
  2742. {
  2743. landingPad->InsertAfter(cloneStr);
  2744. }
  2745. toData->isTempSrc->Set(id);
  2746. }
  2747. }
  2748. NEXT_BITSET_IN_SPARSEBV;
  2749. tempBv->ClearAll();
  2750. }
  2751. }
  2752. void
  2753. GlobOpt::MergeValueMaps(
  2754. GlobOptBlockData *toData,
  2755. BasicBlock *toBlock,
  2756. BasicBlock *fromBlock,
  2757. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2758. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2759. {
  2760. GlobOptBlockData *fromData = &(fromBlock->globOptData);
  2761. bool isLoopBackEdge = toBlock->isLoopHeader;
  2762. Loop *loop = toBlock->loop;
  2763. bool isLoopPrepass = (loop && this->prePassLoop == loop);
  2764. Assert(valuesCreatedForMerge->Count() == 0);
  2765. DebugOnly(ValueSetByValueNumber mergedValues(tempAlloc, 64));
  2766. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills = tempBv;
  2767. Assert(mergedValueTypesTrackedForKills->IsEmpty());
  2768. toData->valuesToKillOnCalls->Clear(); // the tracking will be reevaluated based on merged value types
  2769. GlobHashTable *thisTable = toData->symToValueMap;
  2770. GlobHashTable *otherTable = fromData->symToValueMap;
  2771. for (uint i = 0; i < thisTable->tableSize; i++)
  2772. {
  2773. SListBase<GlobHashBucket>::Iterator iter2(&otherTable->table[i]);
  2774. iter2.Next();
  2775. FOREACH_SLISTBASE_ENTRY_EDITING(GlobHashBucket, bucket, &thisTable->table[i], iter)
  2776. {
  2777. while (iter2.IsValid() && bucket.value->m_id < iter2.Data().value->m_id)
  2778. {
  2779. iter2.Next();
  2780. }
  2781. Value *newValue = nullptr;
  2782. if (iter2.IsValid() && bucket.value->m_id == iter2.Data().value->m_id)
  2783. {
  2784. newValue =
  2785. MergeValues(
  2786. bucket.element,
  2787. iter2.Data().element,
  2788. iter2.Data().value,
  2789. toData,
  2790. fromData,
  2791. isLoopBackEdge,
  2792. symsRequiringCompensation,
  2793. symsCreatedForMerge);
  2794. }
  2795. if (newValue == nullptr)
  2796. {
  2797. iter.RemoveCurrent(thisTable->alloc);
  2798. continue;
  2799. }
  2800. else
  2801. {
  2802. #if DBG
  2803. // Ensure that only one value per value number is produced by merge. Byte-code constant values are reused in
  2804. // multiple blocks without cloning, so exclude those value numbers.
  2805. {
  2806. Value *const previouslyMergedValue = mergedValues.Lookup(newValue->GetValueNumber());
  2807. if (previouslyMergedValue)
  2808. {
  2809. if (!byteCodeConstantValueNumbersBv->Test(newValue->GetValueNumber()))
  2810. {
  2811. Assert(newValue == previouslyMergedValue);
  2812. }
  2813. }
  2814. else
  2815. {
  2816. mergedValues.Add(newValue);
  2817. }
  2818. }
  2819. #endif
  2820. TrackMergedValueForKills(newValue, toData, mergedValueTypesTrackedForKills);
  2821. bucket.element = newValue;
  2822. }
  2823. iter2.Next();
  2824. } NEXT_SLISTBASE_ENTRY_EDITING;
  2825. if (isLoopPrepass && !this->rootLoopPrePass->allFieldsKilled)
  2826. {
  2827. while (iter2.IsValid())
  2828. {
  2829. iter2.Next();
  2830. }
  2831. }
  2832. }
  2833. valuesCreatedForMerge->Clear();
  2834. DebugOnly(mergedValues.Reset());
  2835. mergedValueTypesTrackedForKills->ClearAll();
  2836. toData->exprToValueMap->And(fromData->exprToValueMap);
  2837. ProcessValueKills(toBlock, toData);
  2838. bool isLastLoopBackEdge = false;
  2839. if (isLoopBackEdge)
  2840. {
  2841. ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(toBlock, toData);
  2842. BasicBlock *lastBlock = nullptr;
  2843. FOREACH_PREDECESSOR_BLOCK(pred, toBlock)
  2844. {
  2845. Assert(!lastBlock || pred->GetBlockNum() > lastBlock->GetBlockNum());
  2846. lastBlock = pred;
  2847. }NEXT_PREDECESSOR_BLOCK;
  2848. isLastLoopBackEdge = (lastBlock == fromBlock);
  2849. }
  2850. }
  2851. Value *
  2852. GlobOpt::MergeValues(
  2853. Value *toDataValue,
  2854. Value *fromDataValue,
  2855. Sym *fromDataSym,
  2856. GlobOptBlockData *toData,
  2857. GlobOptBlockData *fromData,
  2858. bool isLoopBackEdge,
  2859. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2860. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2861. {
  2862. // Same map
  2863. if (toDataValue == fromDataValue)
  2864. {
  2865. return toDataValue;
  2866. }
  2867. const ValueNumberPair sourceValueNumberPair(toDataValue->GetValueNumber(), fromDataValue->GetValueNumber());
  2868. const bool sameValueNumber = sourceValueNumberPair.First() == sourceValueNumberPair.Second();
  2869. ValueInfo *newValueInfo =
  2870. this->MergeValueInfo(
  2871. toDataValue,
  2872. fromDataValue,
  2873. fromDataSym,
  2874. fromData,
  2875. isLoopBackEdge,
  2876. sameValueNumber,
  2877. symsRequiringCompensation,
  2878. symsCreatedForMerge);
  2879. if (newValueInfo == nullptr)
  2880. {
  2881. return nullptr;
  2882. }
  2883. if (sameValueNumber && newValueInfo == toDataValue->GetValueInfo())
  2884. {
  2885. return toDataValue;
  2886. }
  2887. // There may be other syms in toData that haven't been merged yet, referring to the current toData value for this sym. If
  2888. // the merge produced a new value info, don't corrupt the value info for the other sym by changing the same value. Instead,
  2889. // create one value per source value number pair per merge and reuse that for new value infos.
  2890. Value *newValue = valuesCreatedForMerge->Lookup(sourceValueNumberPair, nullptr);
  2891. if(newValue)
  2892. {
  2893. Assert(sameValueNumber == (newValue->GetValueNumber() == toDataValue->GetValueNumber()));
  2894. // This is an exception where Value::SetValueInfo is called directly instead of GlobOpt::ChangeValueInfo, because we're
  2895. // actually generating new value info through merges.
  2896. newValue->SetValueInfo(newValueInfo);
  2897. }
  2898. else
  2899. {
  2900. newValue = NewValue(sameValueNumber ? sourceValueNumberPair.First() : NewValueNumber(), newValueInfo);
  2901. valuesCreatedForMerge->Add(sourceValueNumberPair, newValue);
  2902. }
  2903. // Set symStore if same on both paths.
  2904. if (toDataValue->GetValueInfo()->GetSymStore() == fromDataValue->GetValueInfo()->GetSymStore())
  2905. {
  2906. this->SetSymStoreDirect(newValueInfo, toDataValue->GetValueInfo()->GetSymStore());
  2907. }
  2908. return newValue;
  2909. }
  2910. ValueInfo *
  2911. GlobOpt::MergeValueInfo(
  2912. Value *toDataVal,
  2913. Value *fromDataVal,
  2914. Sym *fromDataSym,
  2915. GlobOptBlockData *fromData,
  2916. bool isLoopBackEdge,
  2917. bool sameValueNumber,
  2918. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  2919. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  2920. {
  2921. ValueInfo *const toDataValueInfo = toDataVal->GetValueInfo();
  2922. ValueInfo *const fromDataValueInfo = fromDataVal->GetValueInfo();
  2923. // Same value
  2924. if (toDataValueInfo == fromDataValueInfo)
  2925. {
  2926. return toDataValueInfo;
  2927. }
  2928. if (toDataValueInfo->IsJsType() || fromDataValueInfo->IsJsType())
  2929. {
  2930. Assert(toDataValueInfo->IsJsType() && fromDataValueInfo->IsJsType());
  2931. return MergeJsTypeValueInfo(toDataValueInfo->AsJsType(), fromDataValueInfo->AsJsType(), isLoopBackEdge, sameValueNumber);
  2932. }
  2933. ValueType newValueType(toDataValueInfo->Type().Merge(fromDataValueInfo->Type()));
  2934. if (newValueType.IsLikelyInt())
  2935. {
  2936. return MergeLikelyIntValueInfo(toDataVal, fromDataVal, newValueType);
  2937. }
  2938. if(newValueType.IsLikelyAnyOptimizedArray())
  2939. {
  2940. if(newValueType.IsLikelyArrayOrObjectWithArray() &&
  2941. toDataValueInfo->IsLikelyArrayOrObjectWithArray() &&
  2942. fromDataValueInfo->IsLikelyArrayOrObjectWithArray())
  2943. {
  2944. // Value type merge for missing values is aggressive by default (for profile data) - if either side likely has no
  2945. // missing values, then the merged value type also likely has no missing values. This is because arrays often start
  2946. // off having missing values but are eventually filled up. In GlobOpt however, we need to be conservative because
  2947. // the existence of a value type that likely has missing values indicates that it is more likely for it to have
  2948. // missing values than not. Also, StElems that are likely to create missing values are tracked in profile data and
  2949. // will update value types to say they are now likely to have missing values, and that needs to be propagated
  2950. // conservatively.
  2951. newValueType =
  2952. newValueType.SetHasNoMissingValues(
  2953. toDataValueInfo->HasNoMissingValues() && fromDataValueInfo->HasNoMissingValues());
  2954. if(toDataValueInfo->HasIntElements() != fromDataValueInfo->HasIntElements() ||
  2955. toDataValueInfo->HasFloatElements() != fromDataValueInfo->HasFloatElements())
  2956. {
  2957. // When merging arrays with different native storage types, make the merged value type a likely version to force
  2958. // array checks to be done again and cause a conversion and/or bailout as necessary
  2959. newValueType = newValueType.ToLikely();
  2960. }
  2961. }
  2962. if(!(newValueType.IsObject() && toDataValueInfo->IsArrayValueInfo() && fromDataValueInfo->IsArrayValueInfo()))
  2963. {
  2964. return ValueInfo::New(alloc, newValueType);
  2965. }
  2966. return
  2967. MergeArrayValueInfo(
  2968. newValueType,
  2969. toDataValueInfo->AsArrayValueInfo(),
  2970. fromDataValueInfo->AsArrayValueInfo(),
  2971. fromDataSym,
  2972. symsRequiringCompensation,
  2973. symsCreatedForMerge);
  2974. }
  2975. // Consider: If both values are VarConstantValueInfo with the same value, we could
  2976. // merge them preserving the value.
  2977. return ValueInfo::New(this->alloc, newValueType);
  2978. }
  2979. ValueInfo *
  2980. GlobOpt::MergeLikelyIntValueInfo(Value *toDataVal, Value *fromDataVal, ValueType const newValueType)
  2981. {
  2982. Assert(newValueType.IsLikelyInt());
  2983. ValueInfo *const toDataValueInfo = toDataVal->GetValueInfo();
  2984. ValueInfo *const fromDataValueInfo = fromDataVal->GetValueInfo();
  2985. Assert(toDataValueInfo != fromDataValueInfo);
  2986. bool wasNegativeZeroPreventedByBailout;
  2987. if(newValueType.IsInt())
  2988. {
  2989. int32 toDataIntConstantValue, fromDataIntConstantValue;
  2990. if (toDataValueInfo->TryGetIntConstantValue(&toDataIntConstantValue) &&
  2991. fromDataValueInfo->TryGetIntConstantValue(&fromDataIntConstantValue) &&
  2992. toDataIntConstantValue == fromDataIntConstantValue)
  2993. {
  2994. // A new value number must be created to register the fact that the value has changed. Otherwise, if the value
  2995. // changed inside a loop, the sym may look invariant on the loop back-edge (and hence not turned into a number
  2996. // value), and its constant value from the first iteration may be incorrectly propagated after the loop.
  2997. return IntConstantValueInfo::New(this->alloc, toDataIntConstantValue);
  2998. }
  2999. wasNegativeZeroPreventedByBailout =
  3000. toDataValueInfo->WasNegativeZeroPreventedByBailout() ||
  3001. fromDataValueInfo->WasNegativeZeroPreventedByBailout();
  3002. }
  3003. else
  3004. {
  3005. wasNegativeZeroPreventedByBailout = false;
  3006. }
  3007. const IntBounds *const toDataValBounds =
  3008. toDataValueInfo->IsIntBounded() ? toDataValueInfo->AsIntBounded()->Bounds() : nullptr;
  3009. const IntBounds *const fromDataValBounds =
  3010. fromDataValueInfo->IsIntBounded() ? fromDataValueInfo->AsIntBounded()->Bounds() : nullptr;
  3011. if(toDataValBounds || fromDataValBounds)
  3012. {
  3013. const IntBounds *mergedBounds;
  3014. if(toDataValBounds && fromDataValBounds)
  3015. {
  3016. mergedBounds = IntBounds::Merge(toDataVal, toDataValBounds, fromDataVal, fromDataValBounds);
  3017. }
  3018. else
  3019. {
  3020. IntConstantBounds constantBounds;
  3021. if(toDataValBounds)
  3022. {
  3023. mergedBounds =
  3024. fromDataValueInfo->TryGetIntConstantBounds(&constantBounds, true)
  3025. ? IntBounds::Merge(toDataVal, toDataValBounds, fromDataVal, constantBounds)
  3026. : nullptr;
  3027. }
  3028. else
  3029. {
  3030. Assert(fromDataValBounds);
  3031. mergedBounds =
  3032. toDataValueInfo->TryGetIntConstantBounds(&constantBounds, true)
  3033. ? IntBounds::Merge(fromDataVal, fromDataValBounds, toDataVal, constantBounds)
  3034. : nullptr;
  3035. }
  3036. }
  3037. if(mergedBounds)
  3038. {
  3039. if(mergedBounds->RequiresIntBoundedValueInfo(newValueType))
  3040. {
  3041. return IntBoundedValueInfo::New(newValueType, mergedBounds, wasNegativeZeroPreventedByBailout, alloc);
  3042. }
  3043. mergedBounds->Delete();
  3044. }
  3045. }
  3046. if(newValueType.IsInt())
  3047. {
  3048. int32 min1, max1, min2, max2;
  3049. toDataValueInfo->GetIntValMinMax(&min1, &max1, false);
  3050. fromDataValueInfo->GetIntValMinMax(&min2, &max2, false);
  3051. return NewIntRangeValueInfo(min(min1, min2), max(max1, max2), wasNegativeZeroPreventedByBailout);
  3052. }
  3053. return ValueInfo::New(alloc, newValueType);
  3054. }
  3055. JsTypeValueInfo* GlobOpt::MergeJsTypeValueInfo(JsTypeValueInfo * toValueInfo, JsTypeValueInfo * fromValueInfo, bool isLoopBackEdge, bool sameValueNumber)
  3056. {
  3057. Assert(toValueInfo != fromValueInfo);
  3058. // On loop back edges we must be conservative and only consider type values which are invariant throughout the loop.
  3059. // That's because in dead store pass we can't correctly track object pointer assignments (o = p), and we may not
  3060. // be able to register correct type checks for the right properties upstream. If we ever figure out how to enhance
  3061. // the dead store pass to track this info we could go more aggressively, as below.
  3062. if (isLoopBackEdge && !sameValueNumber)
  3063. {
  3064. return nullptr;
  3065. }
  3066. const JITTypeHolder toType = toValueInfo->GetJsType();
  3067. const JITTypeHolder fromType = fromValueInfo->GetJsType();
  3068. const JITTypeHolder mergedType = toType == fromType ? toType : JITTypeHolder(nullptr);
  3069. Js::EquivalentTypeSet* toTypeSet = toValueInfo->GetJsTypeSet();
  3070. Js::EquivalentTypeSet* fromTypeSet = fromValueInfo->GetJsTypeSet();
  3071. Js::EquivalentTypeSet* mergedTypeSet = (toTypeSet != nullptr && fromTypeSet != nullptr && AreTypeSetsIdentical(toTypeSet, fromTypeSet)) ? toTypeSet : nullptr;
  3072. #if DBG_DUMP
  3073. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3074. {
  3075. Output::Print(_u("ObjTypeSpec: Merging type value info:\n"));
  3076. Output::Print(_u(" from (shared %d): "), fromValueInfo->GetIsShared());
  3077. fromValueInfo->Dump();
  3078. Output::Print(_u("\n to (shared %d): "), toValueInfo->GetIsShared());
  3079. toValueInfo->Dump();
  3080. }
  3081. #endif
  3082. if (mergedType == toType && mergedTypeSet == toTypeSet)
  3083. {
  3084. #if DBG_DUMP
  3085. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3086. {
  3087. Output::Print(_u("\n result (shared %d): "), toValueInfo->GetIsShared());
  3088. toValueInfo->Dump();
  3089. Output::Print(_u("\n"));
  3090. }
  3091. #endif
  3092. return toValueInfo;
  3093. }
  3094. if (mergedType == nullptr && mergedTypeSet == nullptr)
  3095. {
  3096. // No info, so don't bother making a value.
  3097. return nullptr;
  3098. }
  3099. if (toValueInfo->GetIsShared())
  3100. {
  3101. JsTypeValueInfo* mergedValueInfo = JsTypeValueInfo::New(this->alloc, mergedType, mergedTypeSet);
  3102. #if DBG_DUMP
  3103. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3104. {
  3105. Output::Print(_u("\n result (shared %d): "), mergedValueInfo->GetIsShared());
  3106. mergedValueInfo->Dump();
  3107. Output::Print(_u("\n"));
  3108. }
  3109. #endif
  3110. return mergedValueInfo;
  3111. }
  3112. else
  3113. {
  3114. toValueInfo->SetJsType(mergedType);
  3115. toValueInfo->SetJsTypeSet(mergedTypeSet);
  3116. #if DBG_DUMP
  3117. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->func) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->func))
  3118. {
  3119. Output::Print(_u("\n result (shared %d): "), toValueInfo->GetIsShared());
  3120. toValueInfo->Dump();
  3121. Output::Print(_u("\n"));
  3122. }
  3123. #endif
  3124. return toValueInfo;
  3125. }
  3126. }
  3127. ValueInfo *GlobOpt::MergeArrayValueInfo(
  3128. const ValueType mergedValueType,
  3129. const ArrayValueInfo *const toDataValueInfo,
  3130. const ArrayValueInfo *const fromDataValueInfo,
  3131. Sym *const arraySym,
  3132. BVSparse<JitArenaAllocator> *const symsRequiringCompensation,
  3133. BVSparse<JitArenaAllocator> *const symsCreatedForMerge)
  3134. {
  3135. Assert(mergedValueType.IsAnyOptimizedArray());
  3136. Assert(toDataValueInfo);
  3137. Assert(fromDataValueInfo);
  3138. Assert(toDataValueInfo != fromDataValueInfo);
  3139. Assert(arraySym);
  3140. Assert(!symsRequiringCompensation == IsLoopPrePass());
  3141. Assert(!symsCreatedForMerge == IsLoopPrePass());
  3142. // Merge the segment and segment length syms. If we have the segment and/or the segment length syms available on both sides
  3143. // but in different syms, create a new sym and record that the array sym requires compensation. Compensation will be
  3144. // inserted later to initialize this new sym from all predecessors of the merged block.
  3145. StackSym *newHeadSegmentSym;
  3146. if(toDataValueInfo->HeadSegmentSym() && fromDataValueInfo->HeadSegmentSym())
  3147. {
  3148. if(toDataValueInfo->HeadSegmentSym() == fromDataValueInfo->HeadSegmentSym())
  3149. {
  3150. newHeadSegmentSym = toDataValueInfo->HeadSegmentSym();
  3151. }
  3152. else
  3153. {
  3154. Assert(!IsLoopPrePass());
  3155. Assert(symsRequiringCompensation);
  3156. symsRequiringCompensation->Set(arraySym->m_id);
  3157. Assert(symsCreatedForMerge);
  3158. if(symsCreatedForMerge->Test(toDataValueInfo->HeadSegmentSym()->m_id))
  3159. {
  3160. newHeadSegmentSym = toDataValueInfo->HeadSegmentSym();
  3161. }
  3162. else
  3163. {
  3164. newHeadSegmentSym = StackSym::New(TyMachPtr, func);
  3165. symsCreatedForMerge->Set(newHeadSegmentSym->m_id);
  3166. }
  3167. }
  3168. }
  3169. else
  3170. {
  3171. newHeadSegmentSym = nullptr;
  3172. }
  3173. StackSym *newHeadSegmentLengthSym;
  3174. if(toDataValueInfo->HeadSegmentLengthSym() && fromDataValueInfo->HeadSegmentLengthSym())
  3175. {
  3176. if(toDataValueInfo->HeadSegmentLengthSym() == fromDataValueInfo->HeadSegmentLengthSym())
  3177. {
  3178. newHeadSegmentLengthSym = toDataValueInfo->HeadSegmentLengthSym();
  3179. }
  3180. else
  3181. {
  3182. Assert(!IsLoopPrePass());
  3183. Assert(symsRequiringCompensation);
  3184. symsRequiringCompensation->Set(arraySym->m_id);
  3185. Assert(symsCreatedForMerge);
  3186. if(symsCreatedForMerge->Test(toDataValueInfo->HeadSegmentLengthSym()->m_id))
  3187. {
  3188. newHeadSegmentLengthSym = toDataValueInfo->HeadSegmentLengthSym();
  3189. }
  3190. else
  3191. {
  3192. newHeadSegmentLengthSym = StackSym::New(TyUint32, func);
  3193. symsCreatedForMerge->Set(newHeadSegmentLengthSym->m_id);
  3194. }
  3195. }
  3196. }
  3197. else
  3198. {
  3199. newHeadSegmentLengthSym = nullptr;
  3200. }
  3201. StackSym *newLengthSym;
  3202. if(toDataValueInfo->LengthSym() && fromDataValueInfo->LengthSym())
  3203. {
  3204. if(toDataValueInfo->LengthSym() == fromDataValueInfo->LengthSym())
  3205. {
  3206. newLengthSym = toDataValueInfo->LengthSym();
  3207. }
  3208. else
  3209. {
  3210. Assert(!IsLoopPrePass());
  3211. Assert(symsRequiringCompensation);
  3212. symsRequiringCompensation->Set(arraySym->m_id);
  3213. Assert(symsCreatedForMerge);
  3214. if(symsCreatedForMerge->Test(toDataValueInfo->LengthSym()->m_id))
  3215. {
  3216. newLengthSym = toDataValueInfo->LengthSym();
  3217. }
  3218. else
  3219. {
  3220. newLengthSym = StackSym::New(TyUint32, func);
  3221. symsCreatedForMerge->Set(newLengthSym->m_id);
  3222. }
  3223. }
  3224. }
  3225. else
  3226. {
  3227. newLengthSym = nullptr;
  3228. }
  3229. if(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym)
  3230. {
  3231. return ArrayValueInfo::New(alloc, mergedValueType, newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  3232. }
  3233. if(symsRequiringCompensation)
  3234. {
  3235. symsRequiringCompensation->Clear(arraySym->m_id);
  3236. }
  3237. return ValueInfo::New(alloc, mergedValueType);
  3238. }
  3239. void GlobOpt::InsertValueCompensation(
  3240. BasicBlock *const predecessor,
  3241. const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap)
  3242. {
  3243. Assert(predecessor);
  3244. Assert(symsRequiringCompensationToMergedValueInfoMap.Count() != 0);
  3245. IR::Instr *insertBeforeInstr = predecessor->GetLastInstr();
  3246. Func *const func = insertBeforeInstr->m_func;
  3247. bool setLastInstrInPredecessor;
  3248. if(insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  3249. {
  3250. // Don't insert code between the branch and the corresponding ByteCodeUses instructions
  3251. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  3252. {
  3253. insertBeforeInstr = insertBeforeInstr->m_prev;
  3254. }
  3255. setLastInstrInPredecessor = false;
  3256. }
  3257. else
  3258. {
  3259. // Insert at the end of the block and set the last instruction
  3260. Assert(insertBeforeInstr->m_next);
  3261. insertBeforeInstr = insertBeforeInstr->m_next; // Instruction after the last instruction in the predecessor
  3262. setLastInstrInPredecessor = true;
  3263. }
  3264. GlobOptBlockData &predecessorBlockData = predecessor->globOptData;
  3265. GlobHashTable *const predecessorSymToValueMap = predecessor->globOptData.symToValueMap;
  3266. GlobOptBlockData &successorBlockData = blockData;
  3267. GlobHashTable *const successorSymToValueMap = blockData.symToValueMap;
  3268. for(auto it = symsRequiringCompensationToMergedValueInfoMap.GetIterator(); it.IsValid(); it.MoveNext())
  3269. {
  3270. const auto &entry = it.Current();
  3271. Sym *const sym = entry.Key();
  3272. Value *const predecessorValue = FindValue(predecessorSymToValueMap, sym);
  3273. Assert(predecessorValue);
  3274. ValueInfo *const predecessorValueInfo = predecessorValue->GetValueInfo();
  3275. // Currently, array value infos are the only ones that require compensation based on values
  3276. Assert(predecessorValueInfo->IsAnyOptimizedArray());
  3277. const ArrayValueInfo *const predecessorArrayValueInfo = predecessorValueInfo->AsArrayValueInfo();
  3278. StackSym *const predecessorHeadSegmentSym = predecessorArrayValueInfo->HeadSegmentSym();
  3279. StackSym *const predecessorHeadSegmentLengthSym = predecessorArrayValueInfo->HeadSegmentLengthSym();
  3280. StackSym *const predecessorLengthSym = predecessorArrayValueInfo->LengthSym();
  3281. ValueInfo *const mergedValueInfo = entry.Value();
  3282. const ArrayValueInfo *const mergedArrayValueInfo = mergedValueInfo->AsArrayValueInfo();
  3283. StackSym *const mergedHeadSegmentSym = mergedArrayValueInfo->HeadSegmentSym();
  3284. StackSym *const mergedHeadSegmentLengthSym = mergedArrayValueInfo->HeadSegmentLengthSym();
  3285. StackSym *const mergedLengthSym = mergedArrayValueInfo->LengthSym();
  3286. Assert(!mergedHeadSegmentSym || predecessorHeadSegmentSym);
  3287. Assert(!mergedHeadSegmentLengthSym || predecessorHeadSegmentLengthSym);
  3288. Assert(!mergedLengthSym || predecessorLengthSym);
  3289. bool compensated = false;
  3290. if(mergedHeadSegmentSym && predecessorHeadSegmentSym != mergedHeadSegmentSym)
  3291. {
  3292. IR::Instr *const newInstr =
  3293. IR::Instr::New(
  3294. Js::OpCode::Ld_A,
  3295. IR::RegOpnd::New(mergedHeadSegmentSym, mergedHeadSegmentSym->GetType(), func),
  3296. IR::RegOpnd::New(predecessorHeadSegmentSym, predecessorHeadSegmentSym->GetType(), func),
  3297. func);
  3298. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3299. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3300. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3301. insertBeforeInstr->InsertBefore(newInstr);
  3302. compensated = true;
  3303. }
  3304. if(mergedHeadSegmentLengthSym && predecessorHeadSegmentLengthSym != mergedHeadSegmentLengthSym)
  3305. {
  3306. IR::Instr *const newInstr =
  3307. IR::Instr::New(
  3308. Js::OpCode::Ld_I4,
  3309. IR::RegOpnd::New(mergedHeadSegmentLengthSym, mergedHeadSegmentLengthSym->GetType(), func),
  3310. IR::RegOpnd::New(predecessorHeadSegmentLengthSym, predecessorHeadSegmentLengthSym->GetType(), func),
  3311. func);
  3312. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3313. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3314. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3315. insertBeforeInstr->InsertBefore(newInstr);
  3316. compensated = true;
  3317. // Merge the head segment length value
  3318. Assert(predecessorBlockData.liveVarSyms->Test(predecessorHeadSegmentLengthSym->m_id));
  3319. predecessorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  3320. successorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  3321. Value *const predecessorHeadSegmentLengthValue =
  3322. FindValue(predecessorSymToValueMap, predecessorHeadSegmentLengthSym);
  3323. Assert(predecessorHeadSegmentLengthValue);
  3324. SetValue(&predecessorBlockData, predecessorHeadSegmentLengthValue, mergedHeadSegmentLengthSym);
  3325. Value *const mergedHeadSegmentLengthValue = FindValue(successorSymToValueMap, mergedHeadSegmentLengthSym);
  3326. if(mergedHeadSegmentLengthValue)
  3327. {
  3328. Assert(mergedHeadSegmentLengthValue->GetValueNumber() != predecessorHeadSegmentLengthValue->GetValueNumber());
  3329. if(predecessorHeadSegmentLengthValue->GetValueInfo() != mergedHeadSegmentLengthValue->GetValueInfo())
  3330. {
  3331. mergedHeadSegmentLengthValue->SetValueInfo(
  3332. MergeLikelyIntValueInfo(
  3333. mergedHeadSegmentLengthValue,
  3334. predecessorHeadSegmentLengthValue,
  3335. mergedHeadSegmentLengthValue->GetValueInfo()->Type()
  3336. .Merge(predecessorHeadSegmentLengthValue->GetValueInfo()->Type())));
  3337. }
  3338. }
  3339. else
  3340. {
  3341. SetValue(&successorBlockData, CopyValue(predecessorHeadSegmentLengthValue), mergedHeadSegmentLengthSym);
  3342. }
  3343. }
  3344. if(mergedLengthSym && predecessorLengthSym != mergedLengthSym)
  3345. {
  3346. IR::Instr *const newInstr =
  3347. IR::Instr::New(
  3348. Js::OpCode::Ld_I4,
  3349. IR::RegOpnd::New(mergedLengthSym, mergedLengthSym->GetType(), func),
  3350. IR::RegOpnd::New(predecessorLengthSym, predecessorLengthSym->GetType(), func),
  3351. func);
  3352. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  3353. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  3354. newInstr->SetByteCodeOffset(insertBeforeInstr);
  3355. insertBeforeInstr->InsertBefore(newInstr);
  3356. compensated = true;
  3357. // Merge the length value
  3358. Assert(predecessorBlockData.liveVarSyms->Test(predecessorLengthSym->m_id));
  3359. predecessorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  3360. successorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  3361. Value *const predecessorLengthValue = FindValue(predecessorSymToValueMap, predecessorLengthSym);
  3362. Assert(predecessorLengthValue);
  3363. SetValue(&predecessorBlockData, predecessorLengthValue, mergedLengthSym);
  3364. Value *const mergedLengthValue = FindValue(successorSymToValueMap, mergedLengthSym);
  3365. if(mergedLengthValue)
  3366. {
  3367. Assert(mergedLengthValue->GetValueNumber() != predecessorLengthValue->GetValueNumber());
  3368. if(predecessorLengthValue->GetValueInfo() != mergedLengthValue->GetValueInfo())
  3369. {
  3370. mergedLengthValue->SetValueInfo(
  3371. MergeLikelyIntValueInfo(
  3372. mergedLengthValue,
  3373. predecessorLengthValue,
  3374. mergedLengthValue->GetValueInfo()->Type().Merge(predecessorLengthValue->GetValueInfo()->Type())));
  3375. }
  3376. }
  3377. else
  3378. {
  3379. SetValue(&successorBlockData, CopyValue(predecessorLengthValue), mergedLengthSym);
  3380. }
  3381. }
  3382. if(compensated)
  3383. {
  3384. ChangeValueInfo(
  3385. predecessor,
  3386. predecessorValue,
  3387. ArrayValueInfo::New(
  3388. alloc,
  3389. predecessorValueInfo->Type(),
  3390. mergedHeadSegmentSym ? mergedHeadSegmentSym : predecessorHeadSegmentSym,
  3391. mergedHeadSegmentLengthSym ? mergedHeadSegmentLengthSym : predecessorHeadSegmentLengthSym,
  3392. mergedLengthSym ? mergedLengthSym : predecessorLengthSym,
  3393. predecessorValueInfo->GetSymStore()),
  3394. false /*allowIncompatibleType*/,
  3395. compensated);
  3396. }
  3397. }
  3398. if(setLastInstrInPredecessor)
  3399. {
  3400. predecessor->SetLastInstr(insertBeforeInstr->m_prev);
  3401. }
  3402. }
  3403. BOOLEAN
  3404. GlobOpt::IsArgumentsSymID(SymID id, const GlobOptBlockData& blockData)
  3405. {
  3406. return blockData.argObjSyms->Test(id);
  3407. }
  3408. BOOLEAN
  3409. GlobOpt::IsArgumentsOpnd(IR::Opnd* opnd)
  3410. {
  3411. SymID id = 0;
  3412. if (opnd->IsRegOpnd())
  3413. {
  3414. id = opnd->AsRegOpnd()->m_sym->m_id;
  3415. return IsArgumentsSymID(id, this->blockData);
  3416. }
  3417. else if (opnd->IsSymOpnd())
  3418. {
  3419. Sym *sym = opnd->AsSymOpnd()->m_sym;
  3420. if (sym && sym->IsPropertySym())
  3421. {
  3422. PropertySym *propertySym = sym->AsPropertySym();
  3423. id = propertySym->m_stackSym->m_id;
  3424. return IsArgumentsSymID(id, this->blockData);
  3425. }
  3426. return false;
  3427. }
  3428. else if (opnd->IsIndirOpnd())
  3429. {
  3430. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  3431. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  3432. return IsArgumentsSymID(baseOpnd->m_sym->m_id, this->blockData) || (indexOpnd && IsArgumentsSymID(indexOpnd->m_sym->m_id, this->blockData));
  3433. }
  3434. AssertMsg(false, "Unknown type");
  3435. return false;
  3436. }
  3437. void
  3438. GlobOpt::TrackArgumentsSym(IR::RegOpnd* opnd)
  3439. {
  3440. if(!blockData.curFunc->argObjSyms)
  3441. {
  3442. blockData.curFunc->argObjSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  3443. }
  3444. blockData.curFunc->argObjSyms->Set(opnd->m_sym->m_id);
  3445. blockData.argObjSyms->Set(opnd->m_sym->m_id);
  3446. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  3447. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  3448. {
  3449. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3450. char16 debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  3451. Output::Print(_u("Created a new alias s%d for arguments object in function %s(%s) topFunc %s(%s)\n"),
  3452. opnd->m_sym->m_id,
  3453. blockData.curFunc->GetJITFunctionBody()->GetDisplayName(),
  3454. blockData.curFunc->GetDebugNumberSet(debugStringBuffer),
  3455. this->func->GetJITFunctionBody()->GetDisplayName(),
  3456. this->func->GetDebugNumberSet(debugStringBuffer2)
  3457. );
  3458. Output::Flush();
  3459. }
  3460. #endif
  3461. }
  3462. void
  3463. GlobOpt::ClearArgumentsSym(IR::RegOpnd* opnd)
  3464. {
  3465. // We blindly clear so need to check func has argObjSyms
  3466. if (blockData.curFunc->argObjSyms)
  3467. {
  3468. blockData.curFunc->argObjSyms->Clear(opnd->m_sym->m_id);
  3469. }
  3470. blockData.argObjSyms->Clear(opnd->m_sym->m_id);
  3471. }
  3472. bool
  3473. GlobOpt::AreFromSameBytecodeFunc(IR::RegOpnd* src1, IR::RegOpnd* dst)
  3474. {
  3475. Assert(this->func->m_symTable->FindStackSym(src1->m_sym->m_id) == src1->m_sym);
  3476. Assert(this->func->m_symTable->FindStackSym(dst->m_sym->m_id) == dst->m_sym);
  3477. if (dst->m_sym->HasByteCodeRegSlot() && src1->m_sym->HasByteCodeRegSlot())
  3478. {
  3479. return src1->m_sym->GetByteCodeFunc() == dst->m_sym->GetByteCodeFunc();
  3480. }
  3481. return false;
  3482. }
  3483. BOOLEAN
  3484. GlobOpt::TestAnyArgumentsSym()
  3485. {
  3486. return blockData.argObjSyms->TestEmpty();
  3487. }
  3488. /*
  3489. * This is for scope object removal along with Heap Arguments optimization.
  3490. * We track several instructions to facilitate the removal of scope object.
  3491. * - LdSlotArr - This instr is tracked to keep track of the formals array (the dest)
  3492. * - InlineeStart - To keep track of the stack syms for the formals of the inlinee.
  3493. */
  3494. void
  3495. GlobOpt::TrackInstrsForScopeObjectRemoval(IR::Instr * instr)
  3496. {
  3497. IR::Opnd* dst = instr->GetDst();
  3498. IR::Opnd* src1 = instr->GetSrc1();
  3499. if (instr->m_opcode == Js::OpCode::Ld_A && src1->IsRegOpnd())
  3500. {
  3501. AssertMsg(!instr->m_func->IsStackArgsEnabled() || !src1->IsScopeObjOpnd(instr->m_func), "There can be no aliasing for scope object.");
  3502. }
  3503. // The following is to track formals array for Stack Arguments optimization with Formals
  3504. if (instr->m_func->IsStackArgsEnabled() && !this->IsLoopPrePass())
  3505. {
  3506. if (instr->m_opcode == Js::OpCode::LdSlotArr)
  3507. {
  3508. if (instr->GetSrc1()->IsScopeObjOpnd(instr->m_func))
  3509. {
  3510. AssertMsg(!instr->m_func->GetJITFunctionBody()->HasImplicitArgIns(), "No mapping is required in this case. So it should already be generating ArgIns.");
  3511. instr->m_func->TrackFormalsArraySym(dst->GetStackSym()->m_id);
  3512. }
  3513. }
  3514. else if (instr->m_opcode == Js::OpCode::InlineeStart)
  3515. {
  3516. Assert(instr->m_func->IsInlined());
  3517. Js::ArgSlot actualsCount = instr->m_func->actualCount - 1;
  3518. Js::ArgSlot formalsCount = instr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  3519. Func * func = instr->m_func;
  3520. Func * inlinerFunc = func->GetParentFunc(); //Inliner's func
  3521. IR::Instr * argOutInstr = instr->GetSrc2()->GetStackSym()->GetInstrDef();
  3522. //The argout immediately before the InlineeStart will be the ArgOut for NewScObject
  3523. //So we don't want to track the stack sym for this argout.- Skipping it here.
  3524. if (instr->m_func->IsInlinedConstructor())
  3525. {
  3526. //PRE might introduce a second defintion for the Src1. So assert for the opcode only when it has single definition.
  3527. Assert(argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef() == nullptr ||
  3528. argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::NewScObjectNoCtor);
  3529. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3530. }
  3531. if (formalsCount < actualsCount)
  3532. {
  3533. Js::ArgSlot extraActuals = actualsCount - formalsCount;
  3534. //Skipping extra actuals passed
  3535. for (Js::ArgSlot i = 0; i < extraActuals; i++)
  3536. {
  3537. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3538. }
  3539. }
  3540. StackSym * undefinedSym = nullptr;
  3541. for (Js::ArgSlot param = formalsCount; param > 0; param--)
  3542. {
  3543. StackSym * argOutSym = nullptr;
  3544. if (argOutInstr->GetSrc1())
  3545. {
  3546. if (argOutInstr->GetSrc1()->IsRegOpnd())
  3547. {
  3548. argOutSym = argOutInstr->GetSrc1()->GetStackSym();
  3549. }
  3550. else
  3551. {
  3552. // We will always have ArgOut instr - so the source operand will not be removed.
  3553. argOutSym = StackSym::New(inlinerFunc);
  3554. IR::Opnd * srcOpnd = argOutInstr->GetSrc1();
  3555. IR::Opnd * dstOpnd = IR::RegOpnd::New(argOutSym, TyVar, inlinerFunc);
  3556. IR::Instr * assignInstr = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  3557. instr->InsertBefore(assignInstr);
  3558. }
  3559. }
  3560. Assert(!func->HasStackSymForFormal(param - 1));
  3561. if (param <= actualsCount)
  3562. {
  3563. Assert(argOutSym);
  3564. func->TrackStackSymForFormalIndex(param - 1, argOutSym);
  3565. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  3566. }
  3567. else
  3568. {
  3569. /*When param is out of range of actuals count, load undefined*/
  3570. // TODO: saravind: This will insert undefined for each of the param not having an actual. - Clean up this by having a sym for undefined on func ?
  3571. Assert(formalsCount > actualsCount);
  3572. if (undefinedSym == nullptr)
  3573. {
  3574. undefinedSym = StackSym::New(inlinerFunc);
  3575. IR::Opnd * srcOpnd = IR::AddrOpnd::New(inlinerFunc->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicMisc, inlinerFunc);
  3576. IR::Opnd * dstOpnd = IR::RegOpnd::New(undefinedSym, TyVar, inlinerFunc);
  3577. IR::Instr * assignUndefined = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  3578. instr->InsertBefore(assignUndefined);
  3579. }
  3580. func->TrackStackSymForFormalIndex(param - 1, undefinedSym);
  3581. }
  3582. }
  3583. }
  3584. }
  3585. }
  3586. void
  3587. GlobOpt::OptArguments(IR::Instr *instr)
  3588. {
  3589. IR::Opnd* dst = instr->GetDst();
  3590. IR::Opnd* src1 = instr->GetSrc1();
  3591. IR::Opnd* src2 = instr->GetSrc2();
  3592. TrackInstrsForScopeObjectRemoval(instr);
  3593. if (!TrackArgumentsObject())
  3594. {
  3595. return;
  3596. }
  3597. if (instr->HasAnyLoadHeapArgsOpCode())
  3598. {
  3599. if (instr->m_func->IsStackArgsEnabled())
  3600. {
  3601. if (instr->GetSrc1()->IsRegOpnd() && instr->m_func->GetJITFunctionBody()->GetInParamsCount() > 1)
  3602. {
  3603. StackSym * scopeObjSym = instr->GetSrc1()->GetStackSym();
  3604. Assert(scopeObjSym);
  3605. Assert(scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::InitCachedScope || scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::NewScopeObject);
  3606. Assert(instr->m_func->GetScopeObjSym() == scopeObjSym);
  3607. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  3608. {
  3609. Output::Print(_u("StackArgFormals : %s (%d) :Setting scopeObjSym in forward pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetJITFunctionBody()->GetFunctionNumber());
  3610. Output::Flush();
  3611. }
  3612. }
  3613. }
  3614. if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
  3615. {
  3616. CannotAllocateArgumentsObjectOnStack();
  3617. }
  3618. else
  3619. {
  3620. TrackArgumentsSym(dst->AsRegOpnd());
  3621. }
  3622. return;
  3623. }
  3624. // Keep track of arguments objects and its aliases
  3625. // LdHeapArguments loads the arguments object and Ld_A tracks the aliases.
  3626. if ((instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::BytecodeArgOutCapture) && (src1->IsRegOpnd() && IsArgumentsOpnd(src1)))
  3627. {
  3628. // In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
  3629. if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
  3630. {
  3631. CannotAllocateArgumentsObjectOnStack();
  3632. return;
  3633. }
  3634. if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
  3635. {
  3636. TrackArgumentsSym(dst->AsRegOpnd());
  3637. }
  3638. return;
  3639. }
  3640. if (!TestAnyArgumentsSym())
  3641. {
  3642. // There are no syms to track yet, don't start tracking arguments sym.
  3643. return;
  3644. }
  3645. // Avoid loop prepass
  3646. if (this->currentBlock->loop && this->IsLoopPrePass())
  3647. {
  3648. return;
  3649. }
  3650. SymID id = 0;
  3651. switch(instr->m_opcode)
  3652. {
  3653. case Js::OpCode::LdElemI_A:
  3654. case Js::OpCode::TypeofElem:
  3655. {
  3656. Assert(src1->IsIndirOpnd());
  3657. IR::RegOpnd *indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  3658. if (indexOpnd && IsArgumentsSymID(indexOpnd->m_sym->m_id, this->blockData))
  3659. {
  3660. // Pathological test cases such as a[arguments]
  3661. CannotAllocateArgumentsObjectOnStack();
  3662. return;
  3663. }
  3664. IR::RegOpnd *baseOpnd = src1->AsIndirOpnd()->GetBaseOpnd();
  3665. id = baseOpnd->m_sym->m_id;
  3666. if (IsArgumentsSymID(id, this->blockData))
  3667. {
  3668. instr->usesStackArgumentsObject = true;
  3669. }
  3670. break;
  3671. }
  3672. case Js::OpCode::LdLen_A:
  3673. {
  3674. Assert(src1->IsRegOpnd());
  3675. if(IsArgumentsOpnd(src1))
  3676. {
  3677. instr->usesStackArgumentsObject = true;
  3678. }
  3679. break;
  3680. }
  3681. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  3682. {
  3683. if (IsArgumentsOpnd(src1))
  3684. {
  3685. instr->usesStackArgumentsObject = true;
  3686. }
  3687. if (IsArgumentsOpnd(src1) &&
  3688. src1->AsRegOpnd()->m_sym->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture)
  3689. {
  3690. // Apply inlining results in such usage - this is to ignore this sym that is def'd by ByteCodeArgOutCapture
  3691. // It's needed because we do not have block level merging of arguments object and this def due to inlining can turn off stack args opt.
  3692. IR::Instr* builtinStart = instr->GetNextRealInstr();
  3693. if (builtinStart->m_opcode == Js::OpCode::InlineBuiltInStart)
  3694. {
  3695. IR::Opnd* builtinOpnd = builtinStart->GetSrc1();
  3696. if (builtinStart->GetSrc1()->IsAddrOpnd())
  3697. {
  3698. Assert(builtinOpnd->AsAddrOpnd()->m_isFunction);
  3699. Js::BuiltinFunction builtinFunction = Js::JavascriptLibrary::GetBuiltInForFuncInfo(((JITTimeFixedField*)builtinOpnd->AsAddrOpnd()->m_metadata)->GetFuncInfoAddr(), func->GetThreadContextInfo());
  3700. if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
  3701. {
  3702. ClearArgumentsSym(src1->AsRegOpnd());
  3703. }
  3704. }
  3705. else if (builtinOpnd->IsRegOpnd())
  3706. {
  3707. if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
  3708. {
  3709. ClearArgumentsSym(src1->AsRegOpnd());
  3710. }
  3711. }
  3712. }
  3713. }
  3714. break;
  3715. }
  3716. case Js::OpCode::BailOnNotStackArgs:
  3717. case Js::OpCode::ArgOut_A_FromStackArgs:
  3718. case Js::OpCode::BytecodeArgOutUse:
  3719. {
  3720. if (src1 && IsArgumentsOpnd(src1))
  3721. {
  3722. instr->usesStackArgumentsObject = true;
  3723. }
  3724. break;
  3725. }
  3726. default:
  3727. {
  3728. // Super conservative here, if we see the arguments or any of its alias being used in any
  3729. // other opcode just don't do this optimization. Revisit this to optimize further if we see any common
  3730. // case is missed.
  3731. if (src1)
  3732. {
  3733. if (src1->IsRegOpnd() || src1->IsSymOpnd() || src1->IsIndirOpnd())
  3734. {
  3735. if (IsArgumentsOpnd(src1))
  3736. {
  3737. #ifdef PERF_HINT
  3738. if (PHASE_TRACE1(Js::PerfHintPhase))
  3739. {
  3740. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  3741. }
  3742. #endif
  3743. CannotAllocateArgumentsObjectOnStack();
  3744. return;
  3745. }
  3746. }
  3747. }
  3748. if (src2)
  3749. {
  3750. if (src2->IsRegOpnd() || src2->IsSymOpnd() || src2->IsIndirOpnd())
  3751. {
  3752. if (IsArgumentsOpnd(src2))
  3753. {
  3754. #ifdef PERF_HINT
  3755. if (PHASE_TRACE1(Js::PerfHintPhase))
  3756. {
  3757. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  3758. }
  3759. #endif
  3760. CannotAllocateArgumentsObjectOnStack();
  3761. return;
  3762. }
  3763. }
  3764. }
  3765. // We should look at dst last to correctly handle cases where it's the same as one of the src operands.
  3766. if (dst)
  3767. {
  3768. if (dst->IsIndirOpnd() || dst->IsSymOpnd())
  3769. {
  3770. if (IsArgumentsOpnd(dst))
  3771. {
  3772. #ifdef PERF_HINT
  3773. if (PHASE_TRACE1(Js::PerfHintPhase))
  3774. {
  3775. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  3776. }
  3777. #endif
  3778. CannotAllocateArgumentsObjectOnStack();
  3779. return;
  3780. }
  3781. }
  3782. else if (dst->IsRegOpnd())
  3783. {
  3784. if (this->currentBlock->loop && IsArgumentsOpnd(dst))
  3785. {
  3786. #ifdef PERF_HINT
  3787. if (PHASE_TRACE1(Js::PerfHintPhase))
  3788. {
  3789. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  3790. }
  3791. #endif
  3792. CannotAllocateArgumentsObjectOnStack();
  3793. return;
  3794. }
  3795. ClearArgumentsSym(dst->AsRegOpnd());
  3796. }
  3797. }
  3798. }
  3799. break;
  3800. }
  3801. return;
  3802. }
  3803. void
  3804. GlobOpt::MarkArgumentsUsedForBranch(IR::Instr * instr)
  3805. {
  3806. // If it's a conditional branch instruction and the operand used for branching is one of the arguments
  3807. // to the function, tag the m_argUsedForBranch of the functionBody so that it can be used later for inlining decisions.
  3808. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsUnconditional())
  3809. {
  3810. IR::BranchInstr * bInstr = instr->AsBranchInstr();
  3811. IR::Opnd *src1 = bInstr->GetSrc1();
  3812. IR::Opnd *src2 = bInstr->GetSrc2();
  3813. // These are used because we don't want to rely on src1 or src2 to always be the register/constant
  3814. IR::RegOpnd *regOpnd = nullptr;
  3815. if (!src2 && (instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A) && src1->IsRegOpnd())
  3816. {
  3817. regOpnd = src1->AsRegOpnd();
  3818. }
  3819. // We need to check for (0===arg) and (arg===0); this is especially important since some minifiers
  3820. // change all instances of one to the other.
  3821. else if (src2 && src2->IsConstOpnd() && src1->IsRegOpnd())
  3822. {
  3823. regOpnd = src1->AsRegOpnd();
  3824. }
  3825. else if (src2 && src2->IsRegOpnd() && src1->IsConstOpnd())
  3826. {
  3827. regOpnd = src2->AsRegOpnd();
  3828. }
  3829. if (regOpnd != nullptr)
  3830. {
  3831. if (regOpnd->m_sym->IsSingleDef())
  3832. {
  3833. IR::Instr * defInst = regOpnd->m_sym->GetInstrDef();
  3834. IR::Opnd *defSym = defInst->GetSrc1();
  3835. if (defSym && defSym->IsSymOpnd() && defSym->AsSymOpnd()->m_sym->IsStackSym()
  3836. && defSym->AsSymOpnd()->m_sym->AsStackSym()->IsParamSlotSym())
  3837. {
  3838. uint16 param = defSym->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  3839. // We only support functions with 13 arguments to ensure optimal size of callSiteInfo
  3840. if (param < Js::Constants::MaximumArgumentCountForConstantArgumentInlining)
  3841. {
  3842. this->func->GetJITOutput()->SetArgUsedForBranch((uint8)param);
  3843. }
  3844. }
  3845. }
  3846. }
  3847. }
  3848. }
  3849. const InductionVariable*
  3850. GlobOpt::GetInductionVariable(SymID sym, Loop *loop)
  3851. {
  3852. if (loop->inductionVariables)
  3853. {
  3854. for (auto it = loop->inductionVariables->GetIterator(); it.IsValid(); it.MoveNext())
  3855. {
  3856. InductionVariable* iv = &it.CurrentValueReference();
  3857. if (!iv->IsChangeDeterminate() || !iv->IsChangeUnidirectional())
  3858. {
  3859. continue;
  3860. }
  3861. if (iv->Sym()->m_id == sym)
  3862. {
  3863. return iv;
  3864. }
  3865. }
  3866. }
  3867. return nullptr;
  3868. }
  3869. bool
  3870. GlobOpt::IsSymIDInductionVariable(SymID sym, Loop *loop)
  3871. {
  3872. return GetInductionVariable(sym, loop) != nullptr;
  3873. }
  3874. SymID
  3875. GlobOpt::GetVarSymID(StackSym *sym)
  3876. {
  3877. if (sym && sym->m_type != TyVar)
  3878. {
  3879. sym = sym->GetVarEquivSym(nullptr);
  3880. }
  3881. if (!sym)
  3882. {
  3883. return Js::Constants::InvalidSymID;
  3884. }
  3885. return sym->m_id;
  3886. }
  3887. bool
  3888. GlobOpt::IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd)
  3889. {
  3890. Assert(instr);
  3891. if (!baseOpnd || !indexOpnd)
  3892. {
  3893. return false;
  3894. }
  3895. Loop* loop = this->currentBlock->loop;
  3896. const ValueType baseValueType(baseOpnd->GetValueType());
  3897. const ValueType indexValueType(indexOpnd->GetValueType());
  3898. // Validate the array and index types
  3899. if (
  3900. !indexValueType.IsInt() ||
  3901. !(
  3902. baseValueType.IsTypedIntOrFloatArray() ||
  3903. baseValueType.IsArray()
  3904. )
  3905. )
  3906. {
  3907. #if DBG_DUMP
  3908. wchar indexValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  3909. indexValueType.ToString(indexValueTypeStr);
  3910. wchar baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  3911. baseValueType.ToString(baseValueTypeStr);
  3912. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index[%s] or Array[%s] value type is invalid"), indexValueTypeStr, baseValueTypeStr);
  3913. #endif
  3914. return false;
  3915. }
  3916. // The following is conservative and works around a bug in induction variable analysis.
  3917. if (baseOpnd->IsArrayRegOpnd())
  3918. {
  3919. IR::ArrayRegOpnd *baseArrayOp = baseOpnd->AsArrayRegOpnd();
  3920. bool hasBoundChecksRemoved = (
  3921. baseArrayOp->EliminatedLowerBoundCheck() &&
  3922. baseArrayOp->EliminatedUpperBoundCheck() &&
  3923. !instr->extractedUpperBoundCheckWithoutHoisting &&
  3924. !instr->loadedArrayHeadSegment &&
  3925. !instr->loadedArrayHeadSegmentLength
  3926. );
  3927. if (!hasBoundChecksRemoved)
  3928. {
  3929. TRACE_MEMOP_VERBOSE(loop, instr, _u("Missing bounds check optimization"));
  3930. return false;
  3931. }
  3932. }
  3933. if (!baseValueType.IsTypedArray())
  3934. {
  3935. // Check if the instr can kill the value type of the array
  3936. JsArrayKills arrayKills = CheckJsArrayKills(instr);
  3937. if (arrayKills.KillsValueType(baseValueType))
  3938. {
  3939. TRACE_MEMOP_VERBOSE(loop, instr, _u("The array (s%d) can lose its value type"), GetVarSymID(baseOpnd->GetStackSym()));
  3940. return false;
  3941. }
  3942. }
  3943. // Process the Index Operand
  3944. if (!this->OptIsInvariant(baseOpnd, this->currentBlock, loop, this->FindValue(baseOpnd->m_sym), false, true))
  3945. {
  3946. TRACE_MEMOP_VERBOSE(loop, instr, _u("Base (s%d) is not invariant"), GetVarSymID(baseOpnd->GetStackSym()));
  3947. return false;
  3948. }
  3949. // Validate the index
  3950. Assert(indexOpnd->GetStackSym());
  3951. SymID indexSymID = GetVarSymID(indexOpnd->GetStackSym());
  3952. const InductionVariable* iv = GetInductionVariable(indexSymID, loop);
  3953. if (!iv)
  3954. {
  3955. // If the index is not an induction variable return
  3956. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index (s%d) is not an induction variable"), indexSymID);
  3957. return false;
  3958. }
  3959. Assert(iv->IsChangeDeterminate() && iv->IsChangeUnidirectional());
  3960. const IntConstantBounds & bounds = iv->ChangeBounds();
  3961. if (loop->memOpInfo)
  3962. {
  3963. // Only accept induction variables that increments by 1
  3964. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  3965. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(indexSymID, inductionVariableChangeInfo);
  3966. if (
  3967. (bounds.LowerBound() != 1 && bounds.LowerBound() != -1) ||
  3968. (bounds.UpperBound() != bounds.LowerBound()) ||
  3969. inductionVariableChangeInfo.unroll > 1 // Must be 0 (not seen yet) or 1 (already seen)
  3970. )
  3971. {
  3972. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index does not change by 1: %d><%d, unroll=%d"), bounds.LowerBound(), bounds.UpperBound(), inductionVariableChangeInfo.unroll);
  3973. return false;
  3974. }
  3975. // Check if the index is the same in all MemOp optimization in this loop
  3976. if (!loop->memOpInfo->candidates->Empty())
  3977. {
  3978. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  3979. // All MemOp operations within the same loop must use the same index
  3980. if (previousCandidate->index != indexSymID)
  3981. {
  3982. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index is not the same as other MemOp in the loop"));
  3983. return false;
  3984. }
  3985. }
  3986. }
  3987. return true;
  3988. }
  3989. bool
  3990. GlobOpt::CollectMemcopyLdElementI(IR::Instr *instr, Loop *loop)
  3991. {
  3992. Assert(instr->GetSrc1()->IsIndirOpnd());
  3993. IR::IndirOpnd *src1 = instr->GetSrc1()->AsIndirOpnd();
  3994. IR::Opnd *indexOpnd = src1->GetIndexOpnd();
  3995. IR::RegOpnd *baseOpnd = src1->GetBaseOpnd()->AsRegOpnd();
  3996. SymID baseSymID = GetVarSymID(baseOpnd->GetStackSym());
  3997. if (!IsAllowedForMemOpt(instr, false, baseOpnd, indexOpnd))
  3998. {
  3999. return false;
  4000. }
  4001. SymID inductionSymID = GetVarSymID(indexOpnd->GetStackSym());
  4002. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  4003. loop->EnsureMemOpVariablesInitialized();
  4004. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4005. IR::Opnd * dst = instr->GetDst();
  4006. if (!dst->IsRegOpnd() || !dst->AsRegOpnd()->GetStackSym()->IsSingleDef())
  4007. {
  4008. return false;
  4009. }
  4010. Loop::MemCopyCandidate* memcopyInfo = memcopyInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemCopyCandidate);
  4011. memcopyInfo->ldBase = baseSymID;
  4012. memcopyInfo->ldCount = 1;
  4013. memcopyInfo->count = 0;
  4014. memcopyInfo->bIndexAlreadyChanged = isIndexPreIncr;
  4015. memcopyInfo->base = Js::Constants::InvalidSymID; //need to find the stElem first
  4016. memcopyInfo->index = inductionSymID;
  4017. memcopyInfo->transferSym = dst->AsRegOpnd()->GetStackSym();
  4018. loop->memOpInfo->candidates->Prepend(memcopyInfo);
  4019. return true;
  4020. }
  4021. bool
  4022. GlobOpt::CollectMemsetStElementI(IR::Instr *instr, Loop *loop)
  4023. {
  4024. Assert(instr->GetDst()->IsIndirOpnd());
  4025. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  4026. IR::Opnd *indexOp = dst->GetIndexOpnd();
  4027. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  4028. if (!IsAllowedForMemOpt(instr, true, baseOp, indexOp))
  4029. {
  4030. return false;
  4031. }
  4032. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  4033. IR::Opnd *srcDef = instr->GetSrc1();
  4034. StackSym *srcSym = nullptr;
  4035. if (srcDef->IsRegOpnd())
  4036. {
  4037. IR::RegOpnd* opnd = srcDef->AsRegOpnd();
  4038. if (this->OptIsInvariant(opnd, this->currentBlock, loop, this->FindValue(opnd->m_sym), true, true))
  4039. {
  4040. srcSym = opnd->GetStackSym();
  4041. }
  4042. }
  4043. BailoutConstantValue constant = {TyIllegal, 0};
  4044. if (srcDef->IsFloatConstOpnd())
  4045. {
  4046. constant.InitFloatConstValue(srcDef->AsFloatConstOpnd()->m_value);
  4047. }
  4048. else if (srcDef->IsIntConstOpnd())
  4049. {
  4050. constant.InitIntConstValue(srcDef->AsIntConstOpnd()->GetValue(), srcDef->AsIntConstOpnd()->GetType());
  4051. }
  4052. else if (srcDef->IsAddrOpnd())
  4053. {
  4054. constant.InitVarConstValue(srcDef->AsAddrOpnd()->m_address);
  4055. }
  4056. else if(!srcSym)
  4057. {
  4058. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Source is not an invariant"));
  4059. return false;
  4060. }
  4061. // Process the Index Operand
  4062. Assert(indexOp->GetStackSym());
  4063. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  4064. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  4065. loop->EnsureMemOpVariablesInitialized();
  4066. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4067. Loop::MemSetCandidate* memsetInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemSetCandidate);
  4068. memsetInfo->base = baseSymID;
  4069. memsetInfo->index = inductionSymID;
  4070. memsetInfo->constant = constant;
  4071. memsetInfo->srcSym = srcSym;
  4072. memsetInfo->count = 1;
  4073. memsetInfo->bIndexAlreadyChanged = isIndexPreIncr;
  4074. loop->memOpInfo->candidates->Prepend(memsetInfo);
  4075. return true;
  4076. }
  4077. bool GlobOpt::CollectMemcopyStElementI(IR::Instr *instr, Loop *loop)
  4078. {
  4079. if (!loop->memOpInfo || loop->memOpInfo->candidates->Empty())
  4080. {
  4081. // There is no ldElem matching this stElem
  4082. return false;
  4083. }
  4084. Assert(instr->GetDst()->IsIndirOpnd());
  4085. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  4086. IR::Opnd *indexOp = dst->GetIndexOpnd();
  4087. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  4088. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  4089. if (!instr->GetSrc1()->IsRegOpnd())
  4090. {
  4091. return false;
  4092. }
  4093. IR::RegOpnd* src1 = instr->GetSrc1()->AsRegOpnd();
  4094. if (!src1->GetIsDead())
  4095. {
  4096. // This must be the last use of the register.
  4097. // It will invalidate `var m = a[i]; b[i] = m;` but this is not a very interesting case.
  4098. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Source (s%d) is still alive after StElemI"), baseSymID);
  4099. return false;
  4100. }
  4101. if (!IsAllowedForMemOpt(instr, false, baseOp, indexOp))
  4102. {
  4103. return false;
  4104. }
  4105. SymID srcSymID = GetVarSymID(src1->GetStackSym());
  4106. // Prepare the memcopyCandidate entry
  4107. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  4108. if (!previousCandidate->IsMemCopy())
  4109. {
  4110. return false;
  4111. }
  4112. Loop::MemCopyCandidate* memcopyInfo = previousCandidate->AsMemCopy();
  4113. // The previous candidate has to have been created by the matching ldElem
  4114. if (
  4115. memcopyInfo->base != Js::Constants::InvalidSymID ||
  4116. GetVarSymID(memcopyInfo->transferSym) != srcSymID
  4117. )
  4118. {
  4119. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("No matching LdElem found (s%d)"), baseSymID);
  4120. return false;
  4121. }
  4122. Assert(indexOp->GetStackSym());
  4123. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  4124. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  4125. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  4126. if (isIndexPreIncr != memcopyInfo->bIndexAlreadyChanged)
  4127. {
  4128. // The index changed between the load and the store
  4129. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Index value changed between ldElem and stElem"));
  4130. return false;
  4131. }
  4132. // Consider: Can we remove the count field?
  4133. memcopyInfo->count++;
  4134. memcopyInfo->base = baseSymID;
  4135. return true;
  4136. }
  4137. bool
  4138. GlobOpt::CollectMemOpLdElementI(IR::Instr *instr, Loop *loop)
  4139. {
  4140. Assert(instr->m_opcode == Js::OpCode::LdElemI_A);
  4141. return (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyLdElementI(instr, loop));
  4142. }
  4143. bool
  4144. GlobOpt::CollectMemOpStElementI(IR::Instr *instr, Loop *loop)
  4145. {
  4146. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict);
  4147. Assert(instr->GetSrc1());
  4148. return (!PHASE_OFF(Js::MemSetPhase, this->func) && CollectMemsetStElementI(instr, loop)) ||
  4149. (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyStElementI(instr, loop));
  4150. }
  4151. bool
  4152. GlobOpt::CollectMemOpInfo(IR::Instr *instr, Value *src1Val, Value *src2Val)
  4153. {
  4154. Assert(this->currentBlock->loop);
  4155. Loop *loop = this->currentBlock->loop;
  4156. if (!loop->blockList.HasTwo())
  4157. {
  4158. // We support memcopy and memset for loops which have only two blocks.
  4159. return false;
  4160. }
  4161. if (loop->GetLoopFlags().isInterpreted && !loop->GetLoopFlags().memopMinCountReached)
  4162. {
  4163. TRACE_MEMOP_VERBOSE(loop, instr, _u("minimum loop count not reached"))
  4164. loop->doMemOp = false;
  4165. return false;
  4166. }
  4167. Assert(loop->doMemOp);
  4168. bool isIncr = true, isChangedByOne = false;
  4169. switch (instr->m_opcode)
  4170. {
  4171. case Js::OpCode::StElemI_A:
  4172. case Js::OpCode::StElemI_A_Strict:
  4173. if (!CollectMemOpStElementI(instr, loop))
  4174. {
  4175. loop->doMemOp = false;
  4176. return false;
  4177. }
  4178. break;
  4179. case Js::OpCode::LdElemI_A:
  4180. if (!CollectMemOpLdElementI(instr, loop))
  4181. {
  4182. loop->doMemOp = false;
  4183. return false;
  4184. }
  4185. break;
  4186. case Js::OpCode::Decr_A:
  4187. isIncr = false;
  4188. case Js::OpCode::Incr_A:
  4189. isChangedByOne = true;
  4190. goto MemOpCheckInductionVariable;
  4191. case Js::OpCode::Sub_I4:
  4192. case Js::OpCode::Sub_A:
  4193. isIncr = false;
  4194. case Js::OpCode::Add_A:
  4195. case Js::OpCode::Add_I4:
  4196. {
  4197. MemOpCheckInductionVariable:
  4198. StackSym *sym = instr->GetSrc1()->GetStackSym();
  4199. if (!sym)
  4200. {
  4201. sym = instr->GetSrc2()->GetStackSym();
  4202. }
  4203. SymID inductionSymID = GetVarSymID(sym);
  4204. if (IsSymIDInductionVariable(inductionSymID, this->currentBlock->loop))
  4205. {
  4206. if (!isChangedByOne)
  4207. {
  4208. IR::Opnd *src1, *src2;
  4209. src1 = instr->GetSrc1();
  4210. src2 = instr->GetSrc2();
  4211. if (src2->IsRegOpnd())
  4212. {
  4213. Value *val = this->FindValue(src2->AsRegOpnd()->m_sym);
  4214. if (val)
  4215. {
  4216. ValueInfo *vi = val->GetValueInfo();
  4217. int constValue;
  4218. if (vi && vi->TryGetIntConstantValue(&constValue))
  4219. {
  4220. if (constValue == 1)
  4221. {
  4222. isChangedByOne = true;
  4223. }
  4224. }
  4225. }
  4226. }
  4227. else if (src2->IsIntConstOpnd())
  4228. {
  4229. if (src2->AsIntConstOpnd()->GetValue() == 1)
  4230. {
  4231. isChangedByOne = true;
  4232. }
  4233. }
  4234. }
  4235. loop->EnsureMemOpVariablesInitialized();
  4236. if (!isChangedByOne)
  4237. {
  4238. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { Js::Constants::InvalidLoopUnrollFactor, 0 };
  4239. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  4240. {
  4241. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  4242. }
  4243. else
  4244. {
  4245. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  4246. }
  4247. }
  4248. else
  4249. {
  4250. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  4251. {
  4252. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
  4253. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  4254. }
  4255. else
  4256. {
  4257. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  4258. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
  4259. inductionVariableChangeInfo.unroll++;
  4260. inductionVariableChangeInfo.isIncremental = isIncr;
  4261. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  4262. }
  4263. }
  4264. break;
  4265. }
  4266. // Fallthrough if not an induction variable
  4267. }
  4268. default:
  4269. if (IsInstrInvalidForMemOp(instr, loop, src1Val, src2Val))
  4270. {
  4271. loop->doMemOp = false;
  4272. return false;
  4273. }
  4274. // Make sure this instruction doesn't use the memcopy transfer sym before it is checked by StElemI
  4275. if (loop->memOpInfo && !loop->memOpInfo->candidates->Empty())
  4276. {
  4277. Loop::MemOpCandidate* prevCandidate = loop->memOpInfo->candidates->Head();
  4278. if (prevCandidate->IsMemCopy())
  4279. {
  4280. Loop::MemCopyCandidate* memcopyCandidate = prevCandidate->AsMemCopy();
  4281. if (memcopyCandidate->base == Js::Constants::InvalidSymID)
  4282. {
  4283. if (instr->FindRegUse(memcopyCandidate->transferSym))
  4284. {
  4285. loop->doMemOp = false;
  4286. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Found illegal use of LdElemI value(s%d)"), GetVarSymID(memcopyCandidate->transferSym));
  4287. return false;
  4288. }
  4289. }
  4290. }
  4291. }
  4292. }
  4293. return true;
  4294. }
  4295. bool
  4296. GlobOpt::IsInstrInvalidForMemOp(IR::Instr *instr, Loop *loop, Value *src1Val, Value *src2Val)
  4297. {
  4298. // List of instruction that are valid with memop (ie: instr that gets removed if memop is emitted)
  4299. if (
  4300. this->currentBlock != loop->GetHeadBlock() &&
  4301. !instr->IsLabelInstr() &&
  4302. instr->IsRealInstr() &&
  4303. instr->m_opcode != Js::OpCode::IncrLoopBodyCount &&
  4304. instr->m_opcode != Js::OpCode::StLoopBodyCount &&
  4305. instr->m_opcode != Js::OpCode::Ld_A &&
  4306. instr->m_opcode != Js::OpCode::Ld_I4 &&
  4307. !(instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  4308. )
  4309. {
  4310. TRACE_MEMOP_VERBOSE(loop, instr, _u("Instruction not accepted for memop"));
  4311. return true;
  4312. }
  4313. // Check prev instr because it could have been added by an optimization and we won't see it here.
  4314. if (OpCodeAttr::FastFldInstr(instr->m_opcode) || (instr->m_prev && OpCodeAttr::FastFldInstr(instr->m_prev->m_opcode)))
  4315. {
  4316. // Refuse any operations interacting with Fields
  4317. TRACE_MEMOP_VERBOSE(loop, instr, _u("Field interaction detected"));
  4318. return true;
  4319. }
  4320. if (Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementSlot)
  4321. {
  4322. // Refuse any operations interacting with slots
  4323. TRACE_MEMOP_VERBOSE(loop, instr, _u("Slot interaction detected"));
  4324. return true;
  4325. }
  4326. if (this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val))
  4327. {
  4328. TRACE_MEMOP_VERBOSE(loop, instr, _u("Implicit call bailout detected"));
  4329. return true;
  4330. }
  4331. return false;
  4332. }
  4333. void
  4334. GlobOpt::TryReplaceLdLen(IR::Instr *& instr)
  4335. {
  4336. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  4337. if ((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->m_opcode == Js::OpCode::ProfiledLdFld) || instr->m_opcode == Js::OpCode::LdFld || instr->m_opcode == Js::OpCode::ScopedLdFld)
  4338. {
  4339. IR::SymOpnd * opnd = instr->GetSrc1()->AsSymOpnd();
  4340. Sym *sym = opnd->m_sym;
  4341. if (sym->IsPropertySym())
  4342. {
  4343. PropertySym *originalPropertySym = sym->AsPropertySym();
  4344. // only on .length
  4345. if (this->lengthEquivBv != nullptr && this->lengthEquivBv->Test(originalPropertySym->m_id))
  4346. {
  4347. IR::RegOpnd* newopnd = IR::RegOpnd::New(originalPropertySym->m_stackSym, IRType::TyVar, instr->m_func);
  4348. ValueInfo *const objectValueInfo = FindValue(originalPropertySym->m_stackSym)->GetValueInfo();
  4349. // Only for things we'd emit a fast path for
  4350. if (
  4351. objectValueInfo->IsLikelyAnyArray() ||
  4352. objectValueInfo->HasHadStringTag() ||
  4353. objectValueInfo->IsLikelyString() ||
  4354. newopnd->IsArgumentsObject() ||
  4355. (this->blockData.argObjSyms && IsArgumentsOpnd(newopnd))
  4356. )
  4357. {
  4358. // We need to properly transfer over the information from the old operand, which is
  4359. // a SymOpnd, to the new one, which is a RegOpnd. Unfortunately, the types mean the
  4360. // normal copy methods won't work here, so we're going to directly copy data.
  4361. newopnd->SetIsJITOptimizedReg(opnd->GetIsJITOptimizedReg());
  4362. newopnd->SetValueType(objectValueInfo->Type());
  4363. newopnd->SetIsDead(opnd->GetIsDead());
  4364. // Now that we have the operand we need, we can go ahead and make the new instr.
  4365. IR::Instr *newinstr = IR::Instr::New(Js::OpCode::LdLen_A, instr->m_func);
  4366. instr->TransferTo(newinstr);
  4367. newinstr->UnlinkSrc1();
  4368. newinstr->SetSrc1(newopnd);
  4369. instr->InsertAfter(newinstr);
  4370. instr->Remove();
  4371. instr = newinstr;
  4372. }
  4373. }
  4374. }
  4375. }
  4376. }
  4377. IR::Instr *
  4378. GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
  4379. {
  4380. Assert(instr->m_func->IsTopFunc() || instr->m_func->isGetterSetter || instr->m_func->callSiteIdInParentFunc != UINT16_MAX);
  4381. IR::Opnd *src1, *src2;
  4382. Value *src1Val = nullptr, *src2Val = nullptr, *dstVal = nullptr;
  4383. Value *src1IndirIndexVal = nullptr, *dstIndirIndexVal = nullptr;
  4384. IR::Instr *instrPrev = instr->m_prev;
  4385. IR::Instr *instrNext = instr->m_next;
  4386. if (instr->IsLabelInstr() && this->func->HasTry() && this->func->DoOptimizeTryCatch())
  4387. {
  4388. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  4389. Assert(this->currentRegion);
  4390. }
  4391. if(PrepareForIgnoringIntOverflow(instr))
  4392. {
  4393. if(!IsLoopPrePass())
  4394. {
  4395. *isInstrRemoved = true;
  4396. currentBlock->RemoveInstr(instr);
  4397. }
  4398. return instrNext;
  4399. }
  4400. if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
  4401. {
  4402. return instrNext;
  4403. }
  4404. if (instr->m_opcode == Js::OpCode::Yield)
  4405. {
  4406. // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
  4407. this->KillStateForGeneratorYield();
  4408. }
  4409. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  4410. this->TryReplaceLdLen(instr);
  4411. // Consider: Do we ever get post-op bailout here, and if so is the FillBailOutInfo call in the right place?
  4412. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  4413. {
  4414. this->FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  4415. }
  4416. this->instrCountSinceLastCleanUp++;
  4417. instr = this->PreOptPeep(instr);
  4418. this->OptArguments(instr);
  4419. //StackArguments Optimization - We bail out if the index is out of range of actuals.
  4420. if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  4421. instr->DoStackArgsOpt(this->func) && !this->IsLoopPrePass())
  4422. {
  4423. GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
  4424. }
  4425. #if DBG
  4426. PropertySym *propertySymUseBefore = nullptr;
  4427. Assert(this->byteCodeUses == nullptr);
  4428. this->byteCodeUsesBeforeOpt->ClearAll();
  4429. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  4430. Assert(noImplicitCallUsesToInsert->Count() == 0);
  4431. #endif
  4432. this->ignoredIntOverflowForCurrentInstr = false;
  4433. this->ignoredNegativeZeroForCurrentInstr = false;
  4434. src1 = instr->GetSrc1();
  4435. src2 = instr->GetSrc2();
  4436. if (src1)
  4437. {
  4438. src1Val = this->OptSrc(src1, &instr, &src1IndirIndexVal);
  4439. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  4440. if (src2)
  4441. {
  4442. src2Val = this->OptSrc(src2, &instr);
  4443. }
  4444. }
  4445. if(instr->GetDst() && instr->GetDst()->IsIndirOpnd())
  4446. {
  4447. this->OptSrc(instr->GetDst(), &instr, &dstIndirIndexVal);
  4448. }
  4449. MarkArgumentsUsedForBranch(instr);
  4450. CSEOptimize(this->currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal);
  4451. OptimizeChecks(instr, src1Val, src2Val);
  4452. OptArraySrc(&instr);
  4453. OptNewScObject(&instr, src1Val);
  4454. instr = this->OptPeep(instr, src1Val, src2Val);
  4455. if (instr->m_opcode == Js::OpCode::Nop ||
  4456. (instr->m_opcode == Js::OpCode::CheckThis &&
  4457. instr->GetSrc1()->IsRegOpnd() &&
  4458. instr->GetSrc1()->AsRegOpnd()->m_sym->m_isSafeThis))
  4459. {
  4460. instrNext = instr->m_next;
  4461. InsertNoImplicitCallUses(instr);
  4462. if (this->byteCodeUses)
  4463. {
  4464. this->InsertByteCodeUses(instr);
  4465. }
  4466. *isInstrRemoved = true;
  4467. this->currentBlock->RemoveInstr(instr);
  4468. return instrNext;
  4469. }
  4470. else if (instr->m_opcode == Js::OpCode::GetNewScObject && !this->IsLoopPrePass() && src1Val->GetValueInfo()->IsPrimitive())
  4471. {
  4472. // Constructor returned (src1) a primitive value, so fold this into "dst = Ld_A src2", where src2 is the new object that
  4473. // was passed into the constructor as its 'this' parameter
  4474. instr->FreeSrc1();
  4475. instr->SetSrc1(instr->UnlinkSrc2());
  4476. instr->m_opcode = Js::OpCode::Ld_A;
  4477. src1Val = src2Val;
  4478. src2Val = nullptr;
  4479. }
  4480. else if (instr->m_opcode == Js::OpCode::TryCatch && this->func->DoOptimizeTryCatch())
  4481. {
  4482. ProcessTryCatch(instr);
  4483. }
  4484. else if (instr->m_opcode == Js::OpCode::BrOnException)
  4485. {
  4486. // BrOnException was added to model flow from try region to the catch region to assist
  4487. // the backward pass in propagating bytecode upward exposed info from the catch block
  4488. // to the try, and to handle break blocks. Removing it here as it has served its purpose
  4489. // and keeping it around might also have unintended effects while merging block data for
  4490. // the catch block's predecessors.
  4491. // Note that the Deadstore pass will still be able to propagate bytecode upward exposed info
  4492. // because it doesn't skip dead blocks for that.
  4493. this->RemoveFlowEdgeToCatchBlock(instr);
  4494. *isInstrRemoved = true;
  4495. this->currentBlock->RemoveInstr(instr);
  4496. return instrNext;
  4497. }
  4498. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  4499. {
  4500. this->RemoveFlowEdgeToCatchBlock(instr);
  4501. }
  4502. bool isAlreadyTypeSpecialized = false;
  4503. if (!IsLoopPrePass() && instr->HasBailOutInfo())
  4504. {
  4505. if (instr->GetBailOutKind() == IR::BailOutExpectingInteger)
  4506. {
  4507. isAlreadyTypeSpecialized = TypeSpecializeBailoutExpectedInteger(instr, src1Val, &dstVal);
  4508. }
  4509. else if (instr->GetBailOutKind() == IR::BailOutExpectingString)
  4510. {
  4511. if (instr->GetSrc1()->IsRegOpnd())
  4512. {
  4513. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyString())
  4514. {
  4515. // Disable SwitchOpt if the source is definitely not a string - This may be realized only in Globopt
  4516. Assert(IsSwitchOptEnabled());
  4517. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  4518. }
  4519. }
  4520. }
  4521. }
  4522. bool forceInvariantHoisting = false;
  4523. const bool ignoreIntOverflowInRangeForInstr = instr->ignoreIntOverflowInRange; // Save it since the instr can change
  4524. if (!isAlreadyTypeSpecialized)
  4525. {
  4526. bool redoTypeSpec;
  4527. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  4528. if(redoTypeSpec && instr->m_opcode != Js::OpCode::Nop)
  4529. {
  4530. forceInvariantHoisting = false;
  4531. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  4532. Assert(!redoTypeSpec);
  4533. }
  4534. if (instr->m_opcode == Js::OpCode::Nop)
  4535. {
  4536. InsertNoImplicitCallUses(instr);
  4537. if (this->byteCodeUses)
  4538. {
  4539. this->InsertByteCodeUses(instr);
  4540. }
  4541. instrNext = instr->m_next;
  4542. *isInstrRemoved = true;
  4543. this->currentBlock->RemoveInstr(instr);
  4544. return instrNext;
  4545. }
  4546. }
  4547. if (ignoreIntOverflowInRangeForInstr)
  4548. {
  4549. VerifyIntSpecForIgnoringIntOverflow(instr);
  4550. }
  4551. // Track calls after any pre-op bailouts have been inserted before the call, because they will need to restore out params.
  4552. // We don't inline in asmjs and hence we don't need to track calls in asmjs too, skipping this step for asmjs.
  4553. if (!GetIsAsmJSFunc())
  4554. {
  4555. this->TrackCalls(instr);
  4556. }
  4557. if (instr->GetSrc1())
  4558. {
  4559. this->UpdateObjPtrValueType(instr->GetSrc1(), instr);
  4560. }
  4561. IR::Opnd *dst = instr->GetDst();
  4562. if (dst)
  4563. {
  4564. // Copy prop dst uses and mark live/available type syms before tracking kills.
  4565. CopyPropDstUses(dst, instr, src1Val);
  4566. }
  4567. // Track mark temp object before we process the dst so we can generate pre-op bailout
  4568. instr = this->TrackMarkTempObject(instrPrev->m_next, instr);
  4569. bool removed = OptTagChecks(instr);
  4570. if (removed)
  4571. {
  4572. *isInstrRemoved = true;
  4573. return instrNext;
  4574. }
  4575. dstVal = this->OptDst(&instr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  4576. dst = instr->GetDst();
  4577. instrNext = instr->m_next;
  4578. if (dst)
  4579. {
  4580. if (this->func->HasTry() && this->func->DoOptimizeTryCatch())
  4581. {
  4582. this->InsertToVarAtDefInTryRegion(instr, dst);
  4583. }
  4584. instr = this->SetTypeCheckBailOut(dst, instr, nullptr);
  4585. this->UpdateObjPtrValueType(dst, instr);
  4586. }
  4587. BVSparse<JitArenaAllocator> instrByteCodeStackSymUsedAfter(this->alloc);
  4588. PropertySym *propertySymUseAfter = nullptr;
  4589. if (this->byteCodeUses != nullptr)
  4590. {
  4591. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  4592. }
  4593. #if DBG
  4594. else
  4595. {
  4596. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  4597. instrByteCodeStackSymUsedAfter.Equal(this->byteCodeUsesBeforeOpt);
  4598. Assert(propertySymUseAfter == propertySymUseBefore);
  4599. }
  4600. #endif
  4601. bool isHoisted = false;
  4602. if (this->currentBlock->loop && !this->IsLoopPrePass())
  4603. {
  4604. isHoisted = this->TryHoistInvariant(instr, this->currentBlock, dstVal, src1Val, src2Val, true, false, forceInvariantHoisting);
  4605. }
  4606. src1 = instr->GetSrc1();
  4607. if (!this->IsLoopPrePass() && src1)
  4608. {
  4609. // instr const, nonConst => canonicalize by swapping operands
  4610. // This simplifies lowering. (somewhat machine dependent)
  4611. // Note that because of Var overflows, src1 may not have been constant prop'd to an IntConst
  4612. this->PreLowerCanonicalize(instr, &src1Val, &src2Val);
  4613. }
  4614. if (!PHASE_OFF(Js::MemOpPhase, this->func) &&
  4615. !isHoisted &&
  4616. !(instr->IsJitProfilingInstr()) &&
  4617. this->currentBlock->loop && !IsLoopPrePass() &&
  4618. !func->IsJitInDebugMode() &&
  4619. (func->HasProfileInfo() && !func->GetReadOnlyProfileInfo()->IsMemOpDisabled()) &&
  4620. this->currentBlock->loop->doMemOp)
  4621. {
  4622. CollectMemOpInfo(instr, src1Val, src2Val);
  4623. }
  4624. InsertNoImplicitCallUses(instr);
  4625. if (this->byteCodeUses != nullptr)
  4626. {
  4627. // Optimization removed some uses from the instruction.
  4628. // Need to insert fake uses so we can get the correct live register to restore in bailout.
  4629. this->byteCodeUses->Minus(&instrByteCodeStackSymUsedAfter);
  4630. if (this->propertySymUse == propertySymUseAfter)
  4631. {
  4632. this->propertySymUse = nullptr;
  4633. }
  4634. this->InsertByteCodeUses(instr);
  4635. }
  4636. if (!this->IsLoopPrePass() && !isHoisted && this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val))
  4637. {
  4638. IR::BailOutKind kind = IR::BailOutOnImplicitCalls;
  4639. if(instr->HasBailOutInfo())
  4640. {
  4641. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  4642. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4643. if((bailOutKind & ~IR::BailOutKindBits) != IR::BailOutOnImplicitCallsPreOp)
  4644. {
  4645. Assert(!(bailOutKind & ~IR::BailOutKindBits));
  4646. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  4647. }
  4648. }
  4649. else if (instr->forcePreOpBailOutIfNeeded || this->isRecursiveCallOnLandingPad)
  4650. {
  4651. // We can't have a byte code reg slot as dst to generate a
  4652. // pre-op implicit call after we have processed the dst.
  4653. // Consider: This might miss an opportunity to use a copy prop sym to restore
  4654. // some other byte code reg if the dst is that copy prop that we already killed.
  4655. Assert(!instr->GetDst()
  4656. || !instr->GetDst()->IsRegOpnd()
  4657. || instr->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg()
  4658. || !instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot());
  4659. this->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  4660. }
  4661. else
  4662. {
  4663. // Capture value of the bailout after the operation is done.
  4664. this->GenerateBailAfterOperation(&instr, kind);
  4665. }
  4666. }
  4667. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  4668. {
  4669. GlobOptBlockData * globOptData = &this->currentBlock->globOptData;
  4670. globOptData->changedSyms->ClearAll();
  4671. if (!this->changedSymsAfterIncBailoutCandidate->IsEmpty())
  4672. {
  4673. //
  4674. // some symbols are changed after the values for current bailout have been
  4675. // captured (GlobOpt::CapturedValues), need to restore such symbols as changed
  4676. // for following incremental bailout construction, or we will miss capturing
  4677. // values for later bailout
  4678. //
  4679. // swap changedSyms and changedSymsAfterIncBailoutCandidate
  4680. // because both are from this->alloc
  4681. BVSparse<JitArenaAllocator> * tempBvSwap = globOptData->changedSyms;
  4682. globOptData->changedSyms = this->changedSymsAfterIncBailoutCandidate;
  4683. this->changedSymsAfterIncBailoutCandidate = tempBvSwap;
  4684. }
  4685. globOptData->capturedValues = globOptData->capturedValuesCandidate;
  4686. // null out capturedValuesCandicate to stop tracking symbols change for it
  4687. globOptData->capturedValuesCandidate = nullptr;
  4688. }
  4689. return instrNext;
  4690. }
  4691. bool
  4692. GlobOpt::OptTagChecks(IR::Instr *instr)
  4693. {
  4694. if (PHASE_OFF(Js::OptTagChecksPhase, this->func) || !this->DoTagChecks())
  4695. {
  4696. return false;
  4697. }
  4698. StackSym *stackSym = nullptr;
  4699. IR::SymOpnd *symOpnd = nullptr;
  4700. IR::RegOpnd *regOpnd = nullptr;
  4701. switch(instr->m_opcode)
  4702. {
  4703. case Js::OpCode::LdFld:
  4704. case Js::OpCode::LdMethodFld:
  4705. case Js::OpCode::CheckFixedFld:
  4706. case Js::OpCode::CheckPropertyGuardAndLoadType:
  4707. symOpnd = instr->GetSrc1()->AsSymOpnd();
  4708. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  4709. break;
  4710. case Js::OpCode::BailOnNotObject:
  4711. case Js::OpCode::BailOnNotArray:
  4712. if (instr->GetSrc1()->IsRegOpnd())
  4713. {
  4714. regOpnd = instr->GetSrc1()->AsRegOpnd();
  4715. stackSym = regOpnd->m_sym;
  4716. }
  4717. break;
  4718. case Js::OpCode::StFld:
  4719. symOpnd = instr->GetDst()->AsSymOpnd();
  4720. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  4721. break;
  4722. }
  4723. if (stackSym)
  4724. {
  4725. Value *value = FindValue(blockData.symToValueMap, stackSym);
  4726. if (value)
  4727. {
  4728. ValueType valueType = value->GetValueInfo()->Type();
  4729. if (instr->m_opcode == Js::OpCode::BailOnNotObject)
  4730. {
  4731. if (valueType.CanBeTaggedValue())
  4732. {
  4733. ChangeValueType(nullptr, value, valueType.SetCanBeTaggedValue(false), false);
  4734. return false;
  4735. }
  4736. if (this->byteCodeUses)
  4737. {
  4738. this->InsertByteCodeUses(instr);
  4739. }
  4740. this->currentBlock->RemoveInstr(instr);
  4741. return true;
  4742. }
  4743. if (valueType.CanBeTaggedValue() &&
  4744. !valueType.HasBeenNumber() &&
  4745. (this->IsLoopPrePass() || !this->currentBlock->loop))
  4746. {
  4747. ValueType newValueType = valueType.SetCanBeTaggedValue(false);
  4748. // Split out the tag check as a separate instruction.
  4749. IR::Instr *bailOutInstr;
  4750. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, IR::BailOutOnTaggedValue, instr, instr->m_func);
  4751. if (!this->IsLoopPrePass())
  4752. {
  4753. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  4754. }
  4755. IR::RegOpnd *srcOpnd = regOpnd;
  4756. if (!srcOpnd)
  4757. {
  4758. srcOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), instr->m_func);
  4759. AnalysisAssert(symOpnd);
  4760. if (symOpnd->GetIsJITOptimizedReg())
  4761. {
  4762. srcOpnd->SetIsJITOptimizedReg(true);
  4763. }
  4764. }
  4765. bailOutInstr->SetSrc1(srcOpnd);
  4766. bailOutInstr->GetSrc1()->SetValueType(valueType);
  4767. instr->InsertBefore(bailOutInstr);
  4768. if (symOpnd)
  4769. {
  4770. symOpnd->SetPropertyOwnerValueType(newValueType);
  4771. }
  4772. else
  4773. {
  4774. regOpnd->SetValueType(newValueType);
  4775. }
  4776. ChangeValueType(nullptr, value, newValueType, false);
  4777. }
  4778. }
  4779. }
  4780. return false;
  4781. }
  4782. bool
  4783. GlobOpt::TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal)
  4784. {
  4785. bool isAlreadyTypeSpecialized = false;
  4786. if(instr->GetSrc1()->IsRegOpnd())
  4787. {
  4788. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyInt() || instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  4789. {
  4790. Assert(IsSwitchOptEnabled());
  4791. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  4792. }
  4793. // Attach the BailOutExpectingInteger to FromVar and Remove the bail out info on the Ld_A (Begin Switch) instr.
  4794. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, TyInt32, IR::BailOutExpectingInteger, false, instr);
  4795. //TypeSpecialize the dst of Ld_A
  4796. TypeSpecializeIntDst(instr, instr->m_opcode, src1Val, src1Val, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, dstVal);
  4797. isAlreadyTypeSpecialized = true;
  4798. }
  4799. instr->ClearBailOutInfo();
  4800. return isAlreadyTypeSpecialized;
  4801. }
  4802. Value*
  4803. GlobOpt::OptDst(
  4804. IR::Instr ** pInstr,
  4805. Value *dstVal,
  4806. Value *src1Val,
  4807. Value *src2Val,
  4808. Value *dstIndirIndexVal,
  4809. Value *src1IndirIndexVal)
  4810. {
  4811. IR::Instr *&instr = *pInstr;
  4812. IR::Opnd *opnd = instr->GetDst();
  4813. if (opnd)
  4814. {
  4815. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  4816. {
  4817. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  4818. }
  4819. else if (instr->m_opcode == Js::OpCode::StElemI_A ||
  4820. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  4821. instr->m_opcode == Js::OpCode::InitComputedProperty)
  4822. {
  4823. this->KillObjectHeaderInlinedTypeSyms(this->currentBlock, false);
  4824. }
  4825. if (opnd->IsIndirOpnd() && !this->IsLoopPrePass())
  4826. {
  4827. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  4828. const ValueType baseValueType(baseOpnd->GetValueType());
  4829. if ((
  4830. baseValueType.IsLikelyNativeArray() ||
  4831. #ifdef _M_IX86
  4832. (
  4833. !AutoSystemInfo::Data.SSE2Available() &&
  4834. baseValueType.IsLikelyObject() &&
  4835. (
  4836. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  4837. baseValueType.GetObjectType() == ObjectType::Float64Array
  4838. )
  4839. )
  4840. #else
  4841. false
  4842. #endif
  4843. ) &&
  4844. instr->GetSrc1()->IsVar())
  4845. {
  4846. if(instr->m_opcode == Js::OpCode::StElemC)
  4847. {
  4848. // StElemC has different code that handles native array conversion or missing value stores. Add a bailout
  4849. // for those cases.
  4850. Assert(baseValueType.IsLikelyNativeArray());
  4851. Assert(!instr->HasBailOutInfo());
  4852. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  4853. }
  4854. else if(instr->HasBailOutInfo())
  4855. {
  4856. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4857. // path. Note that the removed bailouts should not be necessary for correctness. Bailout on native array
  4858. // conversion will be handled automatically as normal.
  4859. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4860. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4861. {
  4862. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4863. }
  4864. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4865. {
  4866. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4867. }
  4868. if(bailOutKind)
  4869. {
  4870. instr->SetBailOutKind(bailOutKind);
  4871. }
  4872. else
  4873. {
  4874. instr->ClearBailOutInfo();
  4875. }
  4876. }
  4877. }
  4878. }
  4879. }
  4880. this->ProcessKills(instr);
  4881. if (opnd)
  4882. {
  4883. if (dstVal == nullptr)
  4884. {
  4885. dstVal = ValueNumberDst(pInstr, src1Val, src2Val);
  4886. }
  4887. if (this->IsLoopPrePass())
  4888. {
  4889. // Keep track of symbols defined in the loop.
  4890. if (opnd->IsRegOpnd())
  4891. {
  4892. StackSym *symDst = opnd->AsRegOpnd()->m_sym;
  4893. rootLoopPrePass->symsDefInLoop->Set(symDst->m_id);
  4894. }
  4895. }
  4896. else if (dstVal)
  4897. {
  4898. opnd->SetValueType(dstVal->GetValueInfo()->Type());
  4899. if(currentBlock->loop &&
  4900. !IsLoopPrePass() &&
  4901. (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
  4902. instr->GetSrc1()->IsRegOpnd() &&
  4903. !func->IsJitInDebugMode() &&
  4904. func->DoGlobOptsForGeneratorFunc())
  4905. {
  4906. // Look for the following patterns:
  4907. //
  4908. // Pattern 1:
  4909. // s1[liveOnBackEdge] = s3[dead]
  4910. //
  4911. // Pattern 2:
  4912. // s3 = operation(s1[liveOnBackEdge], s2)
  4913. // s1[liveOnBackEdge] = s3
  4914. //
  4915. // In both patterns, s1 and s3 have the same value by the end. Prefer to use s1 as the sym store instead of s3
  4916. // since s1 is live on back-edge, as otherwise, their lifetimes overlap, requiring two registers to hold the
  4917. // value instead of one.
  4918. do
  4919. {
  4920. IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  4921. StackSym *srcVarSym = src->m_sym;
  4922. if(srcVarSym->IsTypeSpec())
  4923. {
  4924. srcVarSym = srcVarSym->GetVarEquivSym(nullptr);
  4925. Assert(srcVarSym);
  4926. }
  4927. if(dstVal->GetValueInfo()->GetSymStore() != srcVarSym)
  4928. {
  4929. break;
  4930. }
  4931. IR::RegOpnd *const dst = opnd->AsRegOpnd();
  4932. StackSym *dstVarSym = dst->m_sym;
  4933. if(dstVarSym->IsTypeSpec())
  4934. {
  4935. dstVarSym = dstVarSym->GetVarEquivSym(nullptr);
  4936. Assert(dstVarSym);
  4937. }
  4938. if(!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(dstVarSym->m_id))
  4939. {
  4940. break;
  4941. }
  4942. Value *const srcValue = FindValue(srcVarSym);
  4943. if(srcValue->GetValueNumber() != dstVal->GetValueNumber())
  4944. {
  4945. break;
  4946. }
  4947. if(!src->GetIsDead())
  4948. {
  4949. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  4950. IR::Opnd *const prevDst = prevInstr->GetDst();
  4951. if(!prevDst ||
  4952. !src->IsEqualInternal(prevDst) ||
  4953. !(
  4954. (prevInstr->GetSrc1() && dst->IsEqual(prevInstr->GetSrc1())) ||
  4955. (prevInstr->GetSrc2() && dst->IsEqual(prevInstr->GetSrc2()))
  4956. ))
  4957. {
  4958. break;
  4959. }
  4960. }
  4961. this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
  4962. } while(false);
  4963. }
  4964. }
  4965. this->ValueNumberObjectType(opnd, instr);
  4966. }
  4967. this->CSEAddInstr(this->currentBlock, *pInstr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  4968. return dstVal;
  4969. }
  4970. void
  4971. GlobOpt::CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val)
  4972. {
  4973. if (opnd->IsSymOpnd())
  4974. {
  4975. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  4976. if (symOpnd->m_sym->IsPropertySym())
  4977. {
  4978. PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
  4979. Value *const objectValue = FindValue(originalPropertySym->m_stackSym);
  4980. symOpnd->SetPropertyOwnerValueType(objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  4981. this->FieldHoistOptDst(instr, originalPropertySym, src1Val);
  4982. PropertySym * sym = this->CopyPropPropertySymObj(symOpnd, instr);
  4983. if (sym != originalPropertySym && !this->IsLoopPrePass())
  4984. {
  4985. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  4986. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  4987. this->FieldHoistOptDst(instr, sym, src1Val);
  4988. }
  4989. }
  4990. }
  4991. }
  4992. void
  4993. GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym)
  4994. {
  4995. Value *initialValue;
  4996. StackSym *symStore;
  4997. if (loop->allFieldsKilled || loop->fieldKilled->Test(originalPropertySym->m_id))
  4998. {
  4999. return;
  5000. }
  5001. Assert(!loop->fieldKilled->Test(propertySym->m_id));
  5002. // Value already exists
  5003. if (this->FindValue(propertySym))
  5004. {
  5005. return;
  5006. }
  5007. // If this initial value was already added, we would find in the current value table.
  5008. Assert(!loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue));
  5009. // If propertySym is live in landingPad, we don't need an initial value.
  5010. if (loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  5011. {
  5012. return;
  5013. }
  5014. Value *landingPadObjPtrVal, *currentObjPtrVal;
  5015. landingPadObjPtrVal = this->FindValue(loop->landingPad->globOptData.symToValueMap, propertySym->m_stackSym);
  5016. currentObjPtrVal = this->FindValue(propertySym->m_stackSym);
  5017. if (!currentObjPtrVal || !landingPadObjPtrVal || currentObjPtrVal->GetValueNumber() != landingPadObjPtrVal->GetValueNumber())
  5018. {
  5019. // objPtr has a different value in the landing pad.
  5020. return;
  5021. }
  5022. // The opnd's value type has not yet been initialized. Since the property sym doesn't have a value, it effectively has an
  5023. // Uninitialized value type. Use the profiled value type from the instruction.
  5024. const ValueType profiledValueType =
  5025. instr->IsProfiledInstr() ? instr->AsProfiledInstr()->u.FldInfo().valueType : ValueType::Uninitialized;
  5026. Assert(!profiledValueType.IsDefinite()); // Hence the values created here don't need to be tracked for kills
  5027. initialValue = this->NewGenericValue(profiledValueType, propertySym);
  5028. symStore = StackSym::New(this->func);
  5029. initialValue->GetValueInfo()->SetSymStore(symStore);
  5030. loop->initialValueFieldMap.Add(propertySym, initialValue->Copy(this->alloc, initialValue->GetValueNumber()));
  5031. // Copy the initial value into the landing pad, but without a symStore
  5032. Value *landingPadInitialValue = Value::New(this->alloc, initialValue->GetValueNumber(),
  5033. ValueInfo::New(this->alloc, initialValue->GetValueInfo()->Type()));
  5034. this->SetValue(&(loop->landingPad->globOptData), landingPadInitialValue, propertySym);
  5035. loop->landingPad->globOptData.liveFields->Set(propertySym->m_id);
  5036. #if DBG_DUMP
  5037. if (PHASE_TRACE(Js::FieldPREPhase, this->func))
  5038. {
  5039. Output::Print(_u("** TRACE: Field PRE initial value for loop head #%d. Val:%d symStore:"),
  5040. loop->GetHeadBlock()->GetBlockNum(), initialValue->GetValueNumber());
  5041. symStore->Dump();
  5042. Output::Print(_u("\n Instr: "));
  5043. instr->Dump();
  5044. }
  5045. #endif
  5046. // Add initial value to all the previous blocks in the loop.
  5047. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, this->currentBlock->GetPrev(), loop->GetHeadBlock())
  5048. {
  5049. if (block->GetDataUseCount() == 0)
  5050. {
  5051. // All successor blocks have been processed, no point in adding the value.
  5052. continue;
  5053. }
  5054. Value *newValue = initialValue->Copy(this->alloc, initialValue->GetValueNumber());
  5055. this->SetValue(&(block->globOptData), newValue, propertySym);
  5056. block->globOptData.liveFields->Set(propertySym->m_id);
  5057. this->SetValue(&(block->globOptData), newValue, symStore);
  5058. block->globOptData.liveVarSyms->Set(symStore->m_id);
  5059. } NEXT_BLOCK_BACKWARD_IN_RANGE;
  5060. this->SetValue(&(this->currentBlock->globOptData), initialValue, symStore);
  5061. this->currentBlock->globOptData.liveVarSyms->Set(symStore->m_id);
  5062. this->blockData.liveFields->Set(propertySym->m_id);
  5063. }
  5064. // Examine src, apply copy prop and value number it
  5065. Value*
  5066. GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, IR::IndirOpnd *parentIndirOpnd)
  5067. {
  5068. IR::Instr * &instr = *pInstr;
  5069. Assert(!indirIndexValRef || !*indirIndexValRef);
  5070. Assert(
  5071. parentIndirOpnd
  5072. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5073. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5074. Sym *sym;
  5075. Value *val;
  5076. PropertySym *originalPropertySym = nullptr;
  5077. switch(opnd->GetKind())
  5078. {
  5079. case IR::OpndKindIntConst:
  5080. val = this->GetIntConstantValue(opnd->AsIntConstOpnd()->AsInt32(), instr);
  5081. opnd->SetValueType(val->GetValueInfo()->Type());
  5082. return val;
  5083. case IR::OpndKindInt64Const:
  5084. return nullptr;
  5085. case IR::OpndKindFloatConst:
  5086. {
  5087. const FloatConstType floatValue = opnd->AsFloatConstOpnd()->m_value;
  5088. int32 int32Value;
  5089. if(Js::JavascriptNumber::TryGetInt32Value(floatValue, &int32Value))
  5090. {
  5091. val = GetIntConstantValue(int32Value, instr);
  5092. }
  5093. else
  5094. {
  5095. val = NewFloatConstantValue(floatValue);
  5096. }
  5097. opnd->SetValueType(val->GetValueInfo()->Type());
  5098. return val;
  5099. }
  5100. case IR::OpndKindAddr:
  5101. {
  5102. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  5103. if (addrOpnd->m_isFunction)
  5104. {
  5105. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func), "Fixed function address operand with fixed method calls phase disabled?");
  5106. val = NewFixedFunctionValue((Js::JavascriptFunction *)addrOpnd->m_address, addrOpnd);
  5107. opnd->SetValueType(val->GetValueInfo()->Type());
  5108. return val;
  5109. }
  5110. else if (addrOpnd->IsVar() && Js::TaggedInt::Is(addrOpnd->m_address))
  5111. {
  5112. val = this->GetIntConstantValue(Js::TaggedInt::ToInt32(addrOpnd->m_address), instr);
  5113. opnd->SetValueType(val->GetValueInfo()->Type());
  5114. return val;
  5115. }
  5116. val = this->GetVarConstantValue(addrOpnd);
  5117. return val;
  5118. }
  5119. case IR::OpndKindSym:
  5120. {
  5121. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  5122. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  5123. // value if available, before returning from this function.
  5124. opnd->SetValueType(ValueType::Uninitialized);
  5125. sym = opnd->AsSymOpnd()->m_sym;
  5126. // Don't create a new value for ArgSlots and don't copy prop them away.
  5127. if (sym->IsStackSym() && sym->AsStackSym()->IsArgSlotSym())
  5128. {
  5129. return nullptr;
  5130. }
  5131. // Unless we have profile info, don't create a new value for ArgSlots and don't copy prop them away.
  5132. if (sym->IsStackSym() && sym->AsStackSym()->IsParamSlotSym())
  5133. {
  5134. if (!instr->m_func->IsLoopBody() && instr->m_func->HasProfileInfo())
  5135. {
  5136. // Skip "this" pointer.
  5137. int paramSlotNum = sym->AsStackSym()->GetParamSlotNum() - 2;
  5138. if (paramSlotNum >= 0)
  5139. {
  5140. const auto parameterType = instr->m_func->GetReadOnlyProfileInfo()->GetParameterInfo(static_cast<Js::ArgSlot>(paramSlotNum));
  5141. val = NewGenericValue(parameterType);
  5142. opnd->SetValueType(val->GetValueInfo()->Type());
  5143. return val;
  5144. }
  5145. }
  5146. return nullptr;
  5147. }
  5148. if (!sym->IsPropertySym())
  5149. {
  5150. break;
  5151. }
  5152. originalPropertySym = sym->AsPropertySym();
  5153. Value *const objectValue = FindValue(originalPropertySym->m_stackSym);
  5154. opnd->AsSymOpnd()->SetPropertyOwnerValueType(
  5155. objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  5156. if (!FieldHoistOptSrc(opnd->AsSymOpnd(), instr, originalPropertySym))
  5157. {
  5158. sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
  5159. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  5160. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  5161. if (originalPropertySym == sym || this->IsLoopPrePass() ||
  5162. !FieldHoistOptSrc(opnd->AsSymOpnd(), instr, sym->AsPropertySym()))
  5163. {
  5164. if (!DoFieldCopyProp())
  5165. {
  5166. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  5167. {
  5168. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  5169. }
  5170. return nullptr;
  5171. }
  5172. switch (instr->m_opcode)
  5173. {
  5174. // These need the symbolic reference to the field, don't copy prop the value of the field
  5175. case Js::OpCode::DeleteFld:
  5176. case Js::OpCode::DeleteRootFld:
  5177. case Js::OpCode::DeleteFldStrict:
  5178. case Js::OpCode::DeleteRootFldStrict:
  5179. case Js::OpCode::ScopedDeleteFld:
  5180. case Js::OpCode::ScopedDeleteFldStrict:
  5181. case Js::OpCode::LdMethodFromFlags:
  5182. case Js::OpCode::BrOnNoProperty:
  5183. case Js::OpCode::BrOnHasProperty:
  5184. case Js::OpCode::LdMethodFldPolyInlineMiss:
  5185. case Js::OpCode::StSlotChkUndecl:
  5186. return nullptr;
  5187. };
  5188. if (instr->CallsGetter())
  5189. {
  5190. return nullptr;
  5191. }
  5192. if (this->IsLoopPrePass() && this->DoFieldPRE(this->rootLoopPrePass))
  5193. {
  5194. if (!this->prePassLoop->allFieldsKilled && !this->prePassLoop->fieldKilled->Test(sym->m_id))
  5195. {
  5196. this->SetLoopFieldInitialValue(this->rootLoopPrePass, instr, sym->AsPropertySym(), originalPropertySym);
  5197. }
  5198. if (this->IsPREInstrCandidateLoad(instr->m_opcode))
  5199. {
  5200. // Foreach property sym, remember the first instruction that loads it.
  5201. // Can this be done in one call?
  5202. if (!this->prePassInstrMap->ContainsKey(sym->m_id))
  5203. {
  5204. this->prePassInstrMap->AddNew(sym->m_id, instr);
  5205. }
  5206. }
  5207. }
  5208. break;
  5209. }
  5210. }
  5211. // We field hoisted, we can continue as a reg.
  5212. opnd = instr->GetSrc1();
  5213. }
  5214. case IR::OpndKindReg:
  5215. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  5216. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  5217. // value if available, before returning from this function.
  5218. opnd->SetValueType(ValueType::Uninitialized);
  5219. sym = opnd->AsRegOpnd()->m_sym;
  5220. this->MarkTempLastUse(instr, opnd->AsRegOpnd());
  5221. if (sym->AsStackSym()->IsTypeSpec())
  5222. {
  5223. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  5224. }
  5225. break;
  5226. case IR::OpndKindIndir:
  5227. this->OptimizeIndirUses(opnd->AsIndirOpnd(), &instr, indirIndexValRef);
  5228. return nullptr;
  5229. default:
  5230. return nullptr;
  5231. }
  5232. val = this->FindValue(sym);
  5233. if (val)
  5234. {
  5235. Assert(GlobOpt::IsLive(sym, this->currentBlock) || (sym->IsPropertySym()));
  5236. if (instr)
  5237. {
  5238. opnd = this->CopyProp(opnd, instr, val, parentIndirOpnd);
  5239. }
  5240. // Check if we freed the operand.
  5241. if (opnd == nullptr)
  5242. {
  5243. return nullptr;
  5244. }
  5245. // In a loop prepass, determine stack syms that are used before they are defined in the root loop for which the prepass
  5246. // is being done. This information is used to do type specialization conversions in the landing pad where appropriate.
  5247. if(IsLoopPrePass() &&
  5248. sym->IsStackSym() &&
  5249. !rootLoopPrePass->symsUsedBeforeDefined->Test(sym->m_id) &&
  5250. IsLive(sym, &rootLoopPrePass->landingPad->globOptData) && !isAsmJSFunc) // no typespec in asmjs and hence skipping this
  5251. {
  5252. Value *const landingPadValue = FindValue(rootLoopPrePass->landingPad->globOptData.symToValueMap, sym);
  5253. if(landingPadValue && val->GetValueNumber() == landingPadValue->GetValueNumber())
  5254. {
  5255. rootLoopPrePass->symsUsedBeforeDefined->Set(sym->m_id);
  5256. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  5257. if(landingPadValueInfo->IsLikelyNumber())
  5258. {
  5259. rootLoopPrePass->likelyNumberSymsUsedBeforeDefined->Set(sym->m_id);
  5260. if(DoAggressiveIntTypeSpec() ? landingPadValueInfo->IsLikelyInt() : landingPadValueInfo->IsInt())
  5261. {
  5262. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  5263. // specialization is enabled.
  5264. rootLoopPrePass->likelyIntSymsUsedBeforeDefined->Set(sym->m_id);
  5265. }
  5266. }
  5267. #ifdef ENABLE_SIMDJS
  5268. // SIMD_JS
  5269. // For uses before defs, we set likelySimd128*SymsUsedBeforeDefined bits for syms that have landing pad value info that allow type-spec to happen in the loop body.
  5270. // The BV will be added to loop header if the backedge has a live matching type-spec value. We then compensate in the loop header to unbox the value.
  5271. // This allows type-spec in the landing pad instead of boxing/unboxing on each iteration.
  5272. if (Js::IsSimd128Opcode(instr->m_opcode))
  5273. {
  5274. // Simd ops are strongly typed. We type-spec only if the type is likely/Definitely the expected type or if we have object which can come from merging different Simd types.
  5275. // Simd value must be initialized properly on all paths before the loop entry. Cannot be merged with Undefined/Null.
  5276. ThreadContext::SimdFuncSignature funcSignature;
  5277. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, funcSignature);
  5278. Assert(funcSignature.valid);
  5279. ValueType expectedType = funcSignature.args[opnd == instr->GetSrc1() ? 0 : 1];
  5280. if (expectedType.IsSimd128Float32x4())
  5281. {
  5282. if (
  5283. (landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5284. &&
  5285. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  5286. )
  5287. {
  5288. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  5289. }
  5290. }
  5291. else if (expectedType.IsSimd128Int32x4())
  5292. {
  5293. if (
  5294. (landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5295. &&
  5296. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  5297. )
  5298. {
  5299. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  5300. }
  5301. }
  5302. }
  5303. else if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc1() && instr->GetDst()->GetValueType().IsSimd128())
  5304. {
  5305. // Extended_Args for Simd ops are annotated with the expected type by the inliner. Use this info to find out if type-spec is supposed to happen.
  5306. ValueType expectedType = instr->GetDst()->GetValueType();
  5307. if ((landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5308. && expectedType.IsSimd128Float32x4())
  5309. {
  5310. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  5311. }
  5312. else if ((landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  5313. && expectedType.IsSimd128Int32x4())
  5314. {
  5315. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  5316. }
  5317. }
  5318. #endif
  5319. }
  5320. }
  5321. }
  5322. else if ((GlobOpt::TransferSrcValue(instr) || OpCodeAttr::CanCSE(instr->m_opcode)) && (opnd == instr->GetSrc1() || opnd == instr->GetSrc2()))
  5323. {
  5324. if (sym->IsPropertySym())
  5325. {
  5326. val = this->CreateFieldSrcValue(sym->AsPropertySym(), originalPropertySym, &opnd, instr);
  5327. }
  5328. else
  5329. {
  5330. val = this->NewGenericValue(ValueType::Uninitialized, opnd);
  5331. }
  5332. }
  5333. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  5334. {
  5335. TryOptimizeInstrWithFixedDataProperty(&instr);
  5336. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  5337. }
  5338. if (val)
  5339. {
  5340. ValueType valueType(val->GetValueInfo()->Type());
  5341. if (valueType.IsLikelyNativeArray() && !valueType.IsObject() && instr->IsProfiledInstr())
  5342. {
  5343. // See if we have profile data for the array type
  5344. IR::ProfiledInstr *const profiledInstr = instr->AsProfiledInstr();
  5345. ValueType profiledArrayType;
  5346. switch(instr->m_opcode)
  5347. {
  5348. case Js::OpCode::LdElemI_A:
  5349. if(instr->GetSrc1()->IsIndirOpnd() && opnd == instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd())
  5350. {
  5351. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  5352. }
  5353. break;
  5354. case Js::OpCode::StElemI_A:
  5355. case Js::OpCode::StElemI_A_Strict:
  5356. case Js::OpCode::StElemC:
  5357. if(instr->GetDst()->IsIndirOpnd() && opnd == instr->GetDst()->AsIndirOpnd()->GetBaseOpnd())
  5358. {
  5359. profiledArrayType = profiledInstr->u.stElemInfo->GetArrayType();
  5360. }
  5361. break;
  5362. case Js::OpCode::LdLen_A:
  5363. if(instr->GetSrc1()->IsRegOpnd() && opnd == instr->GetSrc1())
  5364. {
  5365. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  5366. }
  5367. break;
  5368. }
  5369. if(profiledArrayType.IsLikelyObject() &&
  5370. profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
  5371. (profiledArrayType.HasVarElements() || (valueType.HasIntElements() && profiledArrayType.HasFloatElements())))
  5372. {
  5373. // Merge array type we pulled from profile with type propagated by dataflow.
  5374. valueType = valueType.Merge(profiledArrayType).SetHasNoMissingValues(valueType.HasNoMissingValues());
  5375. ChangeValueType(currentBlock, FindValue(blockData.symToValueMap, opnd->AsRegOpnd()->m_sym), valueType, false);
  5376. }
  5377. }
  5378. opnd->SetValueType(valueType);
  5379. if(!IsLoopPrePass() && opnd->IsSymOpnd() && valueType.IsDefinite())
  5380. {
  5381. if (opnd->AsSymOpnd()->m_sym->IsPropertySym())
  5382. {
  5383. // A property sym can only be guaranteed to have a definite value type when implicit calls are disabled from the
  5384. // point where the sym was defined with the definite value type. Insert an instruction to indicate to the
  5385. // dead-store pass that implicit calls need to be kept disabled until after this instruction.
  5386. Assert(DoFieldCopyProp());
  5387. CaptureNoImplicitCallUses(opnd, false, instr);
  5388. }
  5389. }
  5390. }
  5391. else
  5392. {
  5393. opnd->SetValueType(ValueType::Uninitialized);
  5394. }
  5395. return val;
  5396. }
  5397. /*
  5398. * GlobOpt::TryOptimizeInstrWithFixedDataProperty
  5399. * Converts Ld[Root]Fld instr to
  5400. * * CheckFixedFld
  5401. * * Dst = Ld_A <int Constant value>
  5402. * This API assumes that the source operand is a Sym/PropertySym kind.
  5403. */
  5404. void
  5405. GlobOpt::TryOptimizeInstrWithFixedDataProperty(IR::Instr ** const pInstr)
  5406. {
  5407. Assert(pInstr);
  5408. IR::Instr * &instr = *pInstr;
  5409. IR::Opnd * src1 = instr->GetSrc1();
  5410. Assert(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->IsPropertySymOpnd());
  5411. if(PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func))
  5412. {
  5413. return;
  5414. }
  5415. if (!this->IsLoopPrePass() && !this->isRecursiveCallOnLandingPad &&
  5416. OpCodeAttr::CanLoadFixedFields(instr->m_opcode))
  5417. {
  5418. instr->TryOptimizeInstrWithFixedDataProperty(&instr, this);
  5419. }
  5420. }
  5421. bool
  5422. GlobOpt::TransferSrcValue(IR::Instr * instr)
  5423. {
  5424. // Return whether the instruction transfers a value to the destination.
  5425. // This is used to determine whether we should generate a value for the src so that it will
  5426. // match with the dst for copy prop.
  5427. // No point creating an unknown value for the src of a binary instr, as the dst will just be a different
  5428. // Don't create value for instruction without dst as well. The value doesn't go anywhere.
  5429. // if (src2 == nullptr) Disable copy prop for ScopedLdFld/ScopeStFld, etc., consider enabling that in the future
  5430. // Consider: Add opcode attribute to indicate whether the opcode would use the value or not
  5431. return instr->GetDst() != nullptr && instr->GetSrc2() == nullptr && !OpCodeAttr::DoNotTransfer(instr->m_opcode) && !instr->CallsAccessor();
  5432. }
  5433. Value*
  5434. GlobOpt::FindValue(Sym *sym)
  5435. {
  5436. return FindValue(this->blockData.symToValueMap, sym);
  5437. }
  5438. Value*
  5439. GlobOpt::FindValue(GlobHashTable *valueNumberMap, Sym *sym)
  5440. {
  5441. Assert(valueNumberMap);
  5442. if (sym->IsStackSym() && sym->AsStackSym()->IsTypeSpec())
  5443. {
  5444. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  5445. }
  5446. else if (sym->IsPropertySym())
  5447. {
  5448. return FindPropertyValue(valueNumberMap, sym->m_id);
  5449. }
  5450. if (sym->IsStackSym() && sym->AsStackSym()->IsFromByteCodeConstantTable())
  5451. {
  5452. return this->byteCodeConstantValueArray->Get(sym->m_id);
  5453. }
  5454. else
  5455. {
  5456. return FindValueFromHashTable(valueNumberMap, sym->m_id);
  5457. }
  5458. }
  5459. ValueNumber
  5460. GlobOpt::FindValueNumber(GlobHashTable *valueNumberMap, Sym *sym)
  5461. {
  5462. Value *val = FindValue(valueNumberMap, sym);
  5463. return val->GetValueNumber();
  5464. }
  5465. Value *
  5466. GlobOpt::FindPropertyValue(GlobHashTable *valueNumberMap, SymID symId)
  5467. {
  5468. Assert(this->func->m_symTable->Find(symId)->IsPropertySym());
  5469. if (!this->blockData.liveFields->Test(symId))
  5470. {
  5471. Assert(!IsHoistablePropertySym(symId));
  5472. return nullptr;
  5473. }
  5474. return FindValueFromHashTable(valueNumberMap, symId);
  5475. }
  5476. ValueNumber
  5477. GlobOpt::FindPropertyValueNumber(GlobHashTable *valueNumberMap, SymID symId)
  5478. {
  5479. Value *val = FindPropertyValue(valueNumberMap, symId);
  5480. return val->GetValueNumber();
  5481. }
  5482. Value *
  5483. GlobOpt::FindObjectTypeValue(StackSym* typeSym)
  5484. {
  5485. return FindObjectTypeValue(typeSym, this->blockData.symToValueMap);
  5486. }
  5487. Value *
  5488. GlobOpt::FindObjectTypeValue(StackSym* typeSym, BasicBlock* block)
  5489. {
  5490. return FindObjectTypeValue(typeSym->m_id, block);
  5491. }
  5492. Value *
  5493. GlobOpt::FindObjectTypeValue(SymID typeSymId, BasicBlock* block)
  5494. {
  5495. return FindObjectTypeValue(typeSymId, block->globOptData.symToValueMap, block->globOptData.liveFields);
  5496. }
  5497. Value *
  5498. GlobOpt::FindObjectTypeValue(StackSym* typeSym, GlobHashTable *valueNumberMap)
  5499. {
  5500. return FindObjectTypeValue(typeSym->m_id, valueNumberMap);
  5501. }
  5502. Value *
  5503. GlobOpt::FindObjectTypeValue(SymID typeSymId, GlobHashTable *valueNumberMap)
  5504. {
  5505. return FindObjectTypeValue(typeSymId, valueNumberMap, this->blockData.liveFields);
  5506. }
  5507. Value *
  5508. GlobOpt::FindObjectTypeValue(StackSym* typeSym, GlobHashTable *valueNumberMap, BVSparse<JitArenaAllocator>* liveFields)
  5509. {
  5510. return FindObjectTypeValue(typeSym->m_id, valueNumberMap, liveFields);
  5511. }
  5512. Value *
  5513. GlobOpt::FindObjectTypeValue(SymID typeSymId, GlobHashTable *valueNumberMap, BVSparse<JitArenaAllocator>* liveFields)
  5514. {
  5515. Assert(this->func->m_symTable->Find(typeSymId)->IsStackSym());
  5516. if (!liveFields->Test(typeSymId))
  5517. {
  5518. return nullptr;
  5519. }
  5520. Value* value = FindValueFromHashTable(valueNumberMap, typeSymId);
  5521. Assert(value == nullptr || value->GetValueInfo()->IsJsType());
  5522. return value;
  5523. }
  5524. Value *
  5525. GlobOpt::FindFuturePropertyValue(PropertySym *const propertySym)
  5526. {
  5527. Assert(propertySym);
  5528. // Try a direct lookup based on this sym
  5529. Value *const value = FindValue(propertySym);
  5530. if(value)
  5531. {
  5532. return value;
  5533. }
  5534. if(PHASE_OFF(Js::CopyPropPhase, func))
  5535. {
  5536. // Need to use copy-prop info to backtrack
  5537. return nullptr;
  5538. }
  5539. // Try to get the property object's value
  5540. StackSym *const objectSym = propertySym->m_stackSym;
  5541. Value *objectValue = FindValue(objectSym);
  5542. if(!objectValue)
  5543. {
  5544. if(!objectSym->IsSingleDef())
  5545. {
  5546. return nullptr;
  5547. }
  5548. switch(objectSym->m_instrDef->m_opcode)
  5549. {
  5550. case Js::OpCode::Ld_A:
  5551. case Js::OpCode::LdSlotArr:
  5552. case Js::OpCode::LdSlot:
  5553. // Allow only these op-codes for tracking the object sym's value transfer backwards. Other transfer op-codes
  5554. // could be included here if this function is used in scenarios that need them.
  5555. break;
  5556. default:
  5557. return nullptr;
  5558. }
  5559. // Try to get the property object's value from the src of the definition
  5560. IR::Opnd *const objectTransferSrc = objectSym->m_instrDef->GetSrc1();
  5561. if(!objectTransferSrc)
  5562. {
  5563. return nullptr;
  5564. }
  5565. if(objectTransferSrc->IsRegOpnd())
  5566. {
  5567. objectValue = FindValue(objectTransferSrc->AsRegOpnd()->m_sym);
  5568. }
  5569. else if(objectTransferSrc->IsSymOpnd())
  5570. {
  5571. Sym *const objectTransferSrcSym = objectTransferSrc->AsSymOpnd()->m_sym;
  5572. if(objectTransferSrcSym->IsStackSym())
  5573. {
  5574. objectValue = FindValue(objectTransferSrcSym);
  5575. }
  5576. else
  5577. {
  5578. // About to make a recursive call, so when jitting in the foreground, probe the stack
  5579. if(!func->IsBackgroundJIT())
  5580. {
  5581. PROBE_STACK(func->GetScriptContext(), Js::Constants::MinStackDefault);
  5582. }
  5583. objectValue = FindFuturePropertyValue(objectTransferSrcSym->AsPropertySym());
  5584. }
  5585. }
  5586. else
  5587. {
  5588. return nullptr;
  5589. }
  5590. if(!objectValue)
  5591. {
  5592. return nullptr;
  5593. }
  5594. }
  5595. // Try to use the property object's copy-prop sym and the property ID to find a mapped property sym, and get its value
  5596. StackSym *const objectCopyPropSym = GetCopyPropSym(nullptr, objectValue);
  5597. if(!objectCopyPropSym)
  5598. {
  5599. return nullptr;
  5600. }
  5601. PropertySym *const propertyCopyPropSym = PropertySym::Find(objectCopyPropSym->m_id, propertySym->m_propertyId, func);
  5602. if(!propertyCopyPropSym)
  5603. {
  5604. return nullptr;
  5605. }
  5606. return FindValue(propertyCopyPropSym);
  5607. }
  5608. Value *
  5609. GlobOpt::FindValueFromHashTable(GlobHashTable *valueNumberMap, SymID symId)
  5610. {
  5611. Value ** valuePtr = valueNumberMap->Get(symId);
  5612. if (valuePtr == nullptr)
  5613. {
  5614. return 0;
  5615. }
  5616. return (*valuePtr);
  5617. }
  5618. StackSym *
  5619. GlobOpt::GetCopyPropSym(Sym * sym, Value * value)
  5620. {
  5621. return GetCopyPropSym(this->currentBlock, sym, value);
  5622. }
  5623. StackSym *
  5624. GlobOpt::GetCopyPropSym(BasicBlock * block, Sym * sym, Value * value)
  5625. {
  5626. ValueInfo *valueInfo = value->GetValueInfo();
  5627. Sym * copySym = valueInfo->GetSymStore();
  5628. if (!copySym)
  5629. {
  5630. return nullptr;
  5631. }
  5632. // Only copy prop stackSym, as a propertySym wouldn't improve anything.
  5633. // SingleDef info isn't flow sensitive, so make sure the symbol is actually live.
  5634. if (copySym->IsStackSym() && copySym != sym)
  5635. {
  5636. Assert(!copySym->AsStackSym()->IsTypeSpec());
  5637. Value *copySymVal = this->FindValue(block->globOptData.symToValueMap, valueInfo->GetSymStore());
  5638. if (copySymVal && copySymVal->GetValueNumber() == value->GetValueNumber())
  5639. {
  5640. if (valueInfo->IsVarConstant() && !GlobOpt::IsLive(copySym, block))
  5641. {
  5642. // Because the addrConstantToValueMap isn't flow-based, the symStore of
  5643. // varConstants may not be live.
  5644. return nullptr;
  5645. }
  5646. return copySym->AsStackSym();
  5647. }
  5648. }
  5649. return nullptr;
  5650. }
  5651. // Constant prop if possible, otherwise if this value already resides in another
  5652. // symbol, reuse this previous symbol. This should help register allocation.
  5653. IR::Opnd *
  5654. GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd)
  5655. {
  5656. Assert(
  5657. parentIndirOpnd
  5658. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5659. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5660. if (this->IsLoopPrePass())
  5661. {
  5662. // Transformations are not legal in prepass...
  5663. return opnd;
  5664. }
  5665. if (!this->func->DoGlobOptsForGeneratorFunc())
  5666. {
  5667. // Don't copy prop in generator functions because non-bytecode temps that span a yield
  5668. // cannot be saved and restored by the current bail-out mechanics utilized by generator
  5669. // yield/resume.
  5670. // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
  5671. return opnd;
  5672. }
  5673. if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
  5674. {
  5675. // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
  5676. return opnd;
  5677. }
  5678. // Don't copy-prop link operands of ExtendedArgs
  5679. if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc2())
  5680. {
  5681. return opnd;
  5682. }
  5683. // Don't copy-prop operand of SIMD instr with ExtendedArg operands. Each instr should have its exclusive EA sequence.
  5684. if (
  5685. Js::IsSimd128Opcode(instr->m_opcode) &&
  5686. instr->GetSrc1() != nullptr &&
  5687. instr->GetSrc1()->IsRegOpnd() &&
  5688. instr->GetSrc2() == nullptr
  5689. )
  5690. {
  5691. StackSym *sym = instr->GetSrc1()->GetStackSym();
  5692. if (sym && sym->IsSingleDef() && sym->GetInstrDef()->m_opcode == Js::OpCode::ExtendArg_A)
  5693. {
  5694. return opnd;
  5695. }
  5696. }
  5697. ValueInfo *valueInfo = val->GetValueInfo();
  5698. // Constant prop?
  5699. int32 intConstantValue;
  5700. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  5701. {
  5702. if (PHASE_OFF(Js::ConstPropPhase, this->func))
  5703. {
  5704. return opnd;
  5705. }
  5706. if ((
  5707. instr->m_opcode == Js::OpCode::StElemI_A ||
  5708. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  5709. instr->m_opcode == Js::OpCode::StElemC
  5710. ) && instr->GetSrc1() == opnd)
  5711. {
  5712. // Disabling prop to src of native array store, because we were losing the chance to type specialize.
  5713. // Is it possible to type specialize this src if we allow constants, etc., to be prop'd here?
  5714. if (instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray())
  5715. {
  5716. return opnd;
  5717. }
  5718. }
  5719. if(opnd != instr->GetSrc1() && opnd != instr->GetSrc2())
  5720. {
  5721. if(PHASE_OFF(Js::IndirCopyPropPhase, instr->m_func))
  5722. {
  5723. return opnd;
  5724. }
  5725. // Const-prop an indir opnd's constant index into its offset
  5726. IR::Opnd *srcs[] = { instr->GetSrc1(), instr->GetSrc2(), instr->GetDst() };
  5727. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]); ++i)
  5728. {
  5729. const auto src = srcs[i];
  5730. if(!src || !src->IsIndirOpnd())
  5731. {
  5732. continue;
  5733. }
  5734. const auto indir = src->AsIndirOpnd();
  5735. if(opnd == indir->GetIndexOpnd())
  5736. {
  5737. Assert(indir->GetScale() == 0);
  5738. GOPT_TRACE_OPND(opnd, _u("Constant prop indir index into offset (value: %d)\n"), intConstantValue);
  5739. this->CaptureByteCodeSymUses(instr);
  5740. indir->SetOffset(intConstantValue);
  5741. indir->SetIndexOpnd(nullptr);
  5742. }
  5743. }
  5744. return opnd;
  5745. }
  5746. if (Js::TaggedInt::IsOverflow(intConstantValue))
  5747. {
  5748. return opnd;
  5749. }
  5750. IR::Opnd *constOpnd;
  5751. if (opnd->IsVar())
  5752. {
  5753. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked((int)intConstantValue), IR::AddrOpndKindConstantVar, instr->m_func);
  5754. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), addrOpnd->m_address, intConstantValue);
  5755. constOpnd = addrOpnd;
  5756. }
  5757. else
  5758. {
  5759. // Note: Jit loop body generates some i32 operands...
  5760. Assert(opnd->IsInt32() || opnd->IsInt64() || opnd->IsUInt32());
  5761. IRType opndType;
  5762. IntConstType constVal;
  5763. if (opnd->IsUInt32())
  5764. {
  5765. // avoid sign extension
  5766. constVal = (uint32)intConstantValue;
  5767. opndType = TyUint32;
  5768. }
  5769. else
  5770. {
  5771. constVal = intConstantValue;
  5772. opndType = TyInt32;
  5773. }
  5774. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(constVal, opndType, instr->m_func);
  5775. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), intOpnd->GetImmediateValue(instr->m_func), intConstantValue);
  5776. constOpnd = intOpnd;
  5777. }
  5778. #if ENABLE_DEBUG_CONFIG_OPTIONS
  5779. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  5780. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  5781. {
  5782. instr->DumpFieldCopyPropTestTrace();
  5783. }
  5784. #endif
  5785. this->CaptureByteCodeSymUses(instr);
  5786. opnd = instr->ReplaceSrc(opnd, constOpnd);
  5787. switch (instr->m_opcode)
  5788. {
  5789. case Js::OpCode::LdSlot:
  5790. case Js::OpCode::LdSlotArr:
  5791. case Js::OpCode::LdFld:
  5792. case Js::OpCode::LdFldForTypeOf:
  5793. case Js::OpCode::LdRootFldForTypeOf:
  5794. case Js::OpCode::LdFldForCallApplyTarget:
  5795. case Js::OpCode::LdRootFld:
  5796. case Js::OpCode::LdMethodFld:
  5797. case Js::OpCode::LdRootMethodFld:
  5798. case Js::OpCode::LdMethodFromFlags:
  5799. case Js::OpCode::ScopedLdMethodFld:
  5800. instr->m_opcode = Js::OpCode::Ld_A;
  5801. case Js::OpCode::Ld_A:
  5802. {
  5803. IR::Opnd * dst = instr->GetDst();
  5804. if (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsSingleDef())
  5805. {
  5806. dst->AsRegOpnd()->m_sym->SetIsIntConst((int)intConstantValue);
  5807. }
  5808. break;
  5809. }
  5810. case Js::OpCode::ArgOut_A:
  5811. case Js::OpCode::ArgOut_A_Inline:
  5812. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  5813. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  5814. if (instr->GetDst()->IsRegOpnd())
  5815. {
  5816. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  5817. instr->GetDst()->AsRegOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  5818. }
  5819. else
  5820. {
  5821. instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  5822. }
  5823. break;
  5824. case Js::OpCode::TypeofElem:
  5825. instr->m_opcode = Js::OpCode::Typeof;
  5826. break;
  5827. case Js::OpCode::StSlotChkUndecl:
  5828. if (instr->GetSrc2() == opnd)
  5829. {
  5830. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  5831. // due to the implicit read for ChkUndecl.
  5832. instr->m_opcode = Js::OpCode::StSlot;
  5833. instr->FreeSrc2();
  5834. opnd = nullptr;
  5835. }
  5836. break;
  5837. }
  5838. return opnd;
  5839. }
  5840. Sym *opndSym = nullptr;
  5841. if (opnd->IsRegOpnd())
  5842. {
  5843. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  5844. opndSym = regOpnd->m_sym;
  5845. }
  5846. else if (opnd->IsSymOpnd())
  5847. {
  5848. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  5849. opndSym = symOpnd->m_sym;
  5850. }
  5851. if (!opndSym)
  5852. {
  5853. return opnd;
  5854. }
  5855. if (PHASE_OFF(Js::CopyPropPhase, this->func))
  5856. {
  5857. this->SetSymStoreDirect(valueInfo, opndSym);
  5858. return opnd;
  5859. }
  5860. // We should have dealt with field hoist already
  5861. Assert(!GlobOpt::TransferSrcValue(instr) || !opndSym->IsPropertySym() ||
  5862. !this->IsHoistedPropertySym(opndSym->AsPropertySym()));
  5863. StackSym *copySym = this->GetCopyPropSym(opndSym, val);
  5864. if (copySym != nullptr)
  5865. {
  5866. // Copy prop.
  5867. return CopyPropReplaceOpnd(instr, opnd, copySym, parentIndirOpnd);
  5868. }
  5869. else
  5870. {
  5871. if (valueInfo->GetSymStore() && instr->m_opcode == Js::OpCode::Ld_A && instr->GetDst()->IsRegOpnd()
  5872. && valueInfo->GetSymStore() == instr->GetDst()->AsRegOpnd()->m_sym)
  5873. {
  5874. // Avoid resetting symStore after fieldHoisting:
  5875. // t1 = LdFld field <- set symStore to fieldHoistSym
  5876. // fieldHoistSym = Ld_A t1 <- we're looking at t1 now, but want to copy-prop fieldHoistSym forward
  5877. return opnd;
  5878. }
  5879. this->SetSymStoreDirect(valueInfo, opndSym);
  5880. }
  5881. return opnd;
  5882. }
  5883. IR::Opnd *
  5884. GlobOpt::CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd)
  5885. {
  5886. Assert(
  5887. parentIndirOpnd
  5888. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  5889. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  5890. Assert(GlobOpt::IsLive(copySym, this->currentBlock));
  5891. IR::RegOpnd *regOpnd;
  5892. StackSym *newSym = copySym;
  5893. GOPT_TRACE_OPND(opnd, _u("Copy prop s%d\n"), newSym->m_id);
  5894. #if ENABLE_DEBUG_CONFIG_OPTIONS
  5895. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  5896. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  5897. {
  5898. instr->DumpFieldCopyPropTestTrace();
  5899. }
  5900. #endif
  5901. this->CaptureByteCodeSymUses(instr);
  5902. if (opnd->IsRegOpnd())
  5903. {
  5904. regOpnd = opnd->AsRegOpnd();
  5905. regOpnd->m_sym = newSym;
  5906. regOpnd->SetIsJITOptimizedReg(true);
  5907. // The dead bit on the opnd is specific to the sym it is referencing. Since we replaced the sym, the bit is reset.
  5908. regOpnd->SetIsDead(false);
  5909. if(parentIndirOpnd)
  5910. {
  5911. return regOpnd;
  5912. }
  5913. }
  5914. else
  5915. {
  5916. // If this is an object type specialized field load inside a loop, and it produces a type value which wasn't live
  5917. // before, make sure the type check is left in the loop, because it may be the last type check in the loop protecting
  5918. // other fields which are not hoistable and are lexically upstream in the loop. If the check is not ultimately
  5919. // needed, the dead store pass will remove it.
  5920. if (this->currentBlock->loop != nullptr && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  5921. {
  5922. IR::PropertySymOpnd* propertySymOpnd = opnd->AsPropertySymOpnd();
  5923. if (CheckIfPropOpEmitsTypeCheck(instr, propertySymOpnd))
  5924. {
  5925. // We only set guarded properties in the dead store pass, so they shouldn't be set here yet. If they were
  5926. // we would need to move them from this operand to the operand which is being copy propagated.
  5927. Assert(propertySymOpnd->GetGuardedPropOps() == nullptr);
  5928. // We're creating a copy of this operand to be reused in the same spot in the flow, so we can copy all
  5929. // flow sensitive fields. However, we will do only a type check here (no property access) and only for
  5930. // the sake of downstream instructions, so the flags pertaining to this property access are irrelevant.
  5931. IR::PropertySymOpnd* checkObjTypeOpnd = CreateOpndForTypeCheckOnly(propertySymOpnd, instr->m_func);
  5932. IR::Instr* checkObjTypeInstr = IR::Instr::New(Js::OpCode::CheckObjType, instr->m_func);
  5933. checkObjTypeInstr->SetSrc1(checkObjTypeOpnd);
  5934. checkObjTypeInstr->SetByteCodeOffset(instr);
  5935. instr->InsertBefore(checkObjTypeInstr);
  5936. // Since we inserted this instruction before the one that is being processed in natural flow, we must process
  5937. // it for object type spec explicitly here.
  5938. FinishOptPropOp(checkObjTypeInstr, checkObjTypeOpnd);
  5939. Assert(!propertySymOpnd->IsTypeChecked());
  5940. checkObjTypeInstr = this->SetTypeCheckBailOut(checkObjTypeOpnd, checkObjTypeInstr, nullptr);
  5941. Assert(checkObjTypeInstr->HasBailOutInfo());
  5942. if (this->currentBlock->loop && !this->IsLoopPrePass())
  5943. {
  5944. // Try hoisting this checkObjType.
  5945. // But since this isn't the current instr being optimized, we need to play tricks with
  5946. // the byteCodeUse fields...
  5947. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  5948. PropertySym * currentPropertySymUse = this->propertySymUse;
  5949. PropertySym * tempPropertySymUse = NULL;
  5950. this->byteCodeUses = NULL;
  5951. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  5952. #if DBG
  5953. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  5954. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  5955. #endif
  5956. this->propertySymUse = NULL;
  5957. GlobOpt::TrackByteCodeSymUsed(checkObjTypeInstr, tempByteCodeUse, &tempPropertySymUse);
  5958. TryHoistInvariant(checkObjTypeInstr, this->currentBlock, NULL, this->FindValue(copySym), NULL, true);
  5959. this->byteCodeUses = currentBytecodeUses;
  5960. this->propertySymUse = currentPropertySymUse;
  5961. #if DBG
  5962. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  5963. #endif
  5964. }
  5965. }
  5966. }
  5967. if (opnd->IsSymOpnd() && opnd->GetIsDead())
  5968. {
  5969. // Take the property sym out of the live fields set
  5970. this->EndFieldLifetime(opnd->AsSymOpnd());
  5971. }
  5972. regOpnd = IR::RegOpnd::New(newSym, opnd->GetType(), instr->m_func);
  5973. regOpnd->SetIsJITOptimizedReg(true);
  5974. instr->ReplaceSrc(opnd, regOpnd);
  5975. }
  5976. switch (instr->m_opcode)
  5977. {
  5978. case Js::OpCode::Ld_A:
  5979. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  5980. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  5981. {
  5982. this->InsertByteCodeUses(instr, true);
  5983. instr->m_opcode = Js::OpCode::Nop;
  5984. }
  5985. break;
  5986. case Js::OpCode::LdSlot:
  5987. case Js::OpCode::LdSlotArr:
  5988. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  5989. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  5990. {
  5991. this->InsertByteCodeUses(instr, true);
  5992. instr->m_opcode = Js::OpCode::Nop;
  5993. }
  5994. else
  5995. {
  5996. instr->m_opcode = Js::OpCode::Ld_A;
  5997. }
  5998. break;
  5999. case Js::OpCode::StSlotChkUndecl:
  6000. if (instr->GetSrc2()->IsRegOpnd())
  6001. {
  6002. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  6003. // due to the implicit read for ChkUndecl.
  6004. instr->m_opcode = Js::OpCode::StSlot;
  6005. instr->FreeSrc2();
  6006. return nullptr;
  6007. }
  6008. break;
  6009. case Js::OpCode::LdFld:
  6010. case Js::OpCode::LdFldForTypeOf:
  6011. case Js::OpCode::LdRootFldForTypeOf:
  6012. case Js::OpCode::LdFldForCallApplyTarget:
  6013. case Js::OpCode::LdRootFld:
  6014. case Js::OpCode::LdMethodFld:
  6015. case Js::OpCode::LdRootMethodFld:
  6016. case Js::OpCode::ScopedLdMethodFld:
  6017. instr->m_opcode = Js::OpCode::Ld_A;
  6018. break;
  6019. case Js::OpCode::LdMethodFromFlags:
  6020. // The bailout is checked on the loop top and we don't need to check bailout again in loop.
  6021. instr->m_opcode = Js::OpCode::Ld_A;
  6022. instr->ClearBailOutInfo();
  6023. break;
  6024. case Js::OpCode::TypeofElem:
  6025. instr->m_opcode = Js::OpCode::Typeof;
  6026. break;
  6027. }
  6028. this->MarkTempLastUse(instr, regOpnd);
  6029. return regOpnd;
  6030. }
  6031. void
  6032. GlobOpt::MarkTempLastUse(IR::Instr *instr, IR::RegOpnd *regOpnd)
  6033. {
  6034. if (OpCodeAttr::NonTempNumberSources(instr->m_opcode))
  6035. {
  6036. // Turn off bit if opcode could cause the src to be aliased.
  6037. this->blockData.isTempSrc->Clear(regOpnd->m_sym->m_id);
  6038. }
  6039. else if (this->blockData.isTempSrc->Test(regOpnd->m_sym->m_id))
  6040. {
  6041. // We just mark things that are temp in the globopt phase.
  6042. // The backwards phase will turn this off if it is not the last use.
  6043. // The isTempSrc is freed at the end of each block, which is why the backwards phase can't
  6044. // just use it.
  6045. if (!PHASE_OFF(Js::BackwardPhase, this->func) && !this->IsLoopPrePass())
  6046. {
  6047. regOpnd->m_isTempLastUse = true;
  6048. }
  6049. }
  6050. }
  6051. ValueNumber
  6052. GlobOpt::NewValueNumber()
  6053. {
  6054. ValueNumber valueNumber = this->currentValue++;
  6055. if (valueNumber == 0)
  6056. {
  6057. Js::Throw::OutOfMemory();
  6058. }
  6059. return valueNumber;
  6060. }
  6061. Value *GlobOpt::NewValue(ValueInfo *const valueInfo)
  6062. {
  6063. return NewValue(NewValueNumber(), valueInfo);
  6064. }
  6065. Value *GlobOpt::NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo)
  6066. {
  6067. Assert(valueInfo);
  6068. return Value::New(alloc, valueNumber, valueInfo);
  6069. }
  6070. Value *GlobOpt::CopyValue(Value *const value)
  6071. {
  6072. return CopyValue(value, NewValueNumber());
  6073. }
  6074. Value *GlobOpt::CopyValue(Value *const value, const ValueNumber valueNumber)
  6075. {
  6076. Assert(value);
  6077. return value->Copy(alloc, valueNumber);
  6078. }
  6079. Value *
  6080. GlobOpt::NewGenericValue(const ValueType valueType)
  6081. {
  6082. return NewGenericValue(valueType, static_cast<IR::Opnd *>(nullptr));
  6083. }
  6084. Value *
  6085. GlobOpt::NewGenericValue(const ValueType valueType, IR::Opnd *const opnd)
  6086. {
  6087. // Shouldn't assign a likely-int value to something that is definitely not an int
  6088. Assert(!(valueType.IsLikelyInt() && opnd && opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->m_isNotInt));
  6089. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  6090. Value *val = NewValue(valueInfo);
  6091. TrackNewValueForKills(val);
  6092. this->InsertNewValue(val, opnd);
  6093. return val;
  6094. }
  6095. Value *
  6096. GlobOpt::NewGenericValue(const ValueType valueType, Sym *const sym)
  6097. {
  6098. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  6099. Value *val = NewValue(valueInfo);
  6100. TrackNewValueForKills(val);
  6101. this->SetValue(&this->blockData, val, sym);
  6102. return val;
  6103. }
  6104. Value *
  6105. GlobOpt::GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  6106. {
  6107. Value *value = nullptr;
  6108. Value *const cachedValue = this->intConstantToValueMap->Lookup(intConst, nullptr);
  6109. if(cachedValue)
  6110. {
  6111. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6112. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6113. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6114. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6115. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6116. // Otherwise, we have to create a new value with a new value number.
  6117. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  6118. if (symStore && IsLive(symStore, &blockData))
  6119. {
  6120. Value *const symStoreValue = FindValue(symStore);
  6121. int32 symStoreIntConstantValue;
  6122. if (symStoreValue &&
  6123. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  6124. symStoreValue->GetValueInfo()->TryGetIntConstantValue(&symStoreIntConstantValue) &&
  6125. symStoreIntConstantValue == intConst)
  6126. {
  6127. value = symStoreValue;
  6128. }
  6129. }
  6130. }
  6131. if (!value)
  6132. {
  6133. value = NewIntConstantValue(intConst, instr, !Js::TaggedInt::IsOverflow(intConst));
  6134. }
  6135. return this->InsertNewValue(value, opnd);
  6136. }
  6137. Value *
  6138. GlobOpt::NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable)
  6139. {
  6140. Value * value = NewValue(IntConstantValueInfo::New(this->alloc, intConst));
  6141. this->intConstantToValueMap->Item(intConst, value);
  6142. if (isTaggable &&
  6143. !PHASE_OFF(Js::HoistConstIntPhase, this->func))
  6144. {
  6145. // When creating a new int constant value, make sure it gets a symstore. If the int const doesn't have a symstore,
  6146. // any downstream instruction using the same int will have to create a new value (object) for the int.
  6147. // This gets in the way of CSE.
  6148. value = HoistConstantLoadAndPropagateValueBackward(Js::TaggedInt::ToVarUnchecked(intConst), instr, value);
  6149. if (!value->GetValueInfo()->GetSymStore() &&
  6150. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  6151. {
  6152. StackSym * sym = instr->GetDst()->GetStackSym();
  6153. Assert(sym);
  6154. if (sym->IsTypeSpec())
  6155. {
  6156. Assert(sym->IsInt32());
  6157. StackSym * varSym = sym->GetVarEquivSym(instr->m_func);
  6158. SetValue(&this->currentBlock->globOptData, value, varSym);
  6159. this->currentBlock->globOptData.liveInt32Syms->Set(varSym->m_id);
  6160. }
  6161. else
  6162. {
  6163. SetValue(&this->currentBlock->globOptData, value, sym);
  6164. this->currentBlock->globOptData.liveVarSyms->Set(sym->m_id);
  6165. }
  6166. }
  6167. }
  6168. return value;
  6169. }
  6170. ValueInfo *
  6171. GlobOpt::NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout)
  6172. {
  6173. if (min == max)
  6174. {
  6175. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  6176. // 'wasNegativeZeroPreventedByBailout'
  6177. return IntConstantValueInfo::New(this->alloc, max);
  6178. }
  6179. return IntRangeValueInfo::New(this->alloc, min, max, wasNegativeZeroPreventedByBailout);
  6180. }
  6181. ValueInfo *GlobOpt::NewIntRangeValueInfo(
  6182. const ValueInfo *const originalValueInfo,
  6183. const int32 min,
  6184. const int32 max) const
  6185. {
  6186. Assert(originalValueInfo);
  6187. ValueInfo *valueInfo;
  6188. if(min == max)
  6189. {
  6190. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  6191. // 'wasNegativeZeroPreventedByBailout'
  6192. valueInfo = IntConstantValueInfo::New(alloc, min);
  6193. }
  6194. else
  6195. {
  6196. valueInfo =
  6197. IntRangeValueInfo::New(
  6198. alloc,
  6199. min,
  6200. max,
  6201. min <= 0 && max >= 0 && originalValueInfo->WasNegativeZeroPreventedByBailout());
  6202. }
  6203. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  6204. return valueInfo;
  6205. }
  6206. Value *
  6207. GlobOpt::NewIntRangeValue(
  6208. const int32 min,
  6209. const int32 max,
  6210. const bool wasNegativeZeroPreventedByBailout,
  6211. IR::Opnd *const opnd)
  6212. {
  6213. ValueInfo *valueInfo = this->NewIntRangeValueInfo(min, max, wasNegativeZeroPreventedByBailout);
  6214. Value *val = NewValue(valueInfo);
  6215. if (opnd)
  6216. {
  6217. GOPT_TRACE_OPND(opnd, _u("Range %d (0x%X) to %d (0x%X)\n"), min, min, max, max);
  6218. }
  6219. this->InsertNewValue(val, opnd);
  6220. return val;
  6221. }
  6222. IntBoundedValueInfo *GlobOpt::NewIntBoundedValueInfo(
  6223. const ValueInfo *const originalValueInfo,
  6224. const IntBounds *const bounds) const
  6225. {
  6226. Assert(originalValueInfo);
  6227. bounds->Verify();
  6228. IntBoundedValueInfo *const valueInfo =
  6229. IntBoundedValueInfo::New(
  6230. originalValueInfo->Type(),
  6231. bounds,
  6232. (
  6233. bounds->ConstantLowerBound() <= 0 &&
  6234. bounds->ConstantUpperBound() >= 0 &&
  6235. originalValueInfo->WasNegativeZeroPreventedByBailout()
  6236. ),
  6237. alloc);
  6238. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  6239. return valueInfo;
  6240. }
  6241. Value *GlobOpt::NewIntBoundedValue(
  6242. const ValueType valueType,
  6243. const IntBounds *const bounds,
  6244. const bool wasNegativeZeroPreventedByBailout,
  6245. IR::Opnd *const opnd)
  6246. {
  6247. Value *const value = NewValue(IntBoundedValueInfo::New(valueType, bounds, wasNegativeZeroPreventedByBailout, alloc));
  6248. InsertNewValue(value, opnd);
  6249. return value;
  6250. }
  6251. Value *
  6252. GlobOpt::NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd)
  6253. {
  6254. FloatConstantValueInfo *valueInfo = FloatConstantValueInfo::New(this->alloc, floatValue);
  6255. Value *val = NewValue(valueInfo);
  6256. this->InsertNewValue(val, opnd);
  6257. return val;
  6258. }
  6259. Value *
  6260. GlobOpt::GetVarConstantValue(IR::AddrOpnd *addrOpnd)
  6261. {
  6262. bool isVar = addrOpnd->IsVar();
  6263. bool isString = isVar && addrOpnd->m_localAddress && JITJavascriptString::Is(addrOpnd->m_localAddress);
  6264. Value *val = nullptr;
  6265. Value *cachedValue;
  6266. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  6267. {
  6268. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6269. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6270. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6271. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6272. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6273. // Otherwise, we have to create a new value with a new value number.
  6274. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6275. if(symStore && IsLive(symStore, &blockData))
  6276. {
  6277. Value *const symStoreValue = FindValue(symStore);
  6278. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6279. {
  6280. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6281. if(symStoreValueInfo->IsVarConstant() && symStoreValueInfo->AsVarConstant()->VarValue() == addrOpnd->m_address)
  6282. {
  6283. val = symStoreValue;
  6284. }
  6285. }
  6286. }
  6287. }
  6288. else if (isString)
  6289. {
  6290. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  6291. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  6292. if (this->stringConstantToValueMap->TryGetValue(internalString, &cachedValue))
  6293. {
  6294. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6295. if (symStore && IsLive(symStore, &blockData))
  6296. {
  6297. Value *const symStoreValue = FindValue(symStore);
  6298. if (symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6299. {
  6300. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6301. if (symStoreValueInfo->IsVarConstant())
  6302. {
  6303. JITJavascriptString * cachedString = JITJavascriptString::FromVar(symStoreValue->GetValueInfo()->AsVarConstant()->VarValue(true));
  6304. Js::InternalString cachedInternalString(cachedString->GetString(), cachedString->GetLength());
  6305. if (Js::InternalStringComparer::Equals(internalString, cachedInternalString))
  6306. {
  6307. val = symStoreValue;
  6308. }
  6309. }
  6310. }
  6311. }
  6312. }
  6313. }
  6314. if(!val)
  6315. {
  6316. val = NewVarConstantValue(addrOpnd, isString);
  6317. }
  6318. addrOpnd->SetValueType(val->GetValueInfo()->Type());
  6319. return val;
  6320. }
  6321. Value *
  6322. GlobOpt::NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString)
  6323. {
  6324. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, addrOpnd->m_address, addrOpnd->GetValueType(), false, addrOpnd->m_localAddress);
  6325. Value * value = NewValue(valueInfo);
  6326. this->addrConstantToValueMap->Item(addrOpnd->m_address, value);
  6327. if (isString)
  6328. {
  6329. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  6330. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  6331. this->stringConstantToValueMap->Item(internalString, value);
  6332. }
  6333. return value;
  6334. }
  6335. Value *
  6336. GlobOpt::HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value)
  6337. {
  6338. if (this->IsLoopPrePass() ||
  6339. ((this->currentBlock == this->func->m_fg->blockList) &&
  6340. TransferSrcValue(origInstr)))
  6341. {
  6342. return value;
  6343. }
  6344. // Only hoisting taggable int const loads for now. Could be extended to other constants (floats, strings, addr opnds) if we see some benefit.
  6345. Assert(Js::TaggedInt::Is(varConst));
  6346. // Insert a load of the constant at the top of the function
  6347. StackSym * dstSym = StackSym::New(this->func);
  6348. IR::RegOpnd * constRegOpnd = IR::RegOpnd::New(dstSym, TyVar, this->func);
  6349. IR::Instr * loadInstr = IR::Instr::NewConstantLoad(constRegOpnd, (intptr_t)varConst, ValueType::GetInt(true), this->func);
  6350. this->func->m_fg->blockList->GetFirstInstr()->InsertAfter(loadInstr);
  6351. // Type-spec the load (Support for floats needs to be added when we start hoisting float constants).
  6352. bool typeSpecedToInt = false;
  6353. if (Js::TaggedInt::Is(varConst) && !IsTypeSpecPhaseOff(this->func))
  6354. {
  6355. typeSpecedToInt = true;
  6356. loadInstr->m_opcode = Js::OpCode::Ld_I4;
  6357. ToInt32Dst(loadInstr, loadInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  6358. loadInstr->GetDst()->GetStackSym()->SetIsConst();
  6359. }
  6360. else
  6361. {
  6362. this->currentBlock->globOptData.liveVarSyms->Set(dstSym->m_id);
  6363. }
  6364. // Add the value (object) to the current block's symToValueMap and propagate the value backward to all relevant blocks so it is available on merges.
  6365. value = this->InsertNewValue(value, constRegOpnd);
  6366. BVSparse<JitArenaAllocator>* GlobOptBlockData::*bv;
  6367. bv = typeSpecedToInt ? &GlobOptBlockData::liveInt32Syms : &GlobOptBlockData::liveVarSyms; // Will need to be expanded when we start hoisting float constants.
  6368. if (this->currentBlock != this->func->m_fg->blockList)
  6369. {
  6370. for (InvariantBlockBackwardIterator it(this, this->currentBlock, this->func->m_fg->blockList, nullptr);
  6371. it.IsValid();
  6372. it.MoveNext())
  6373. {
  6374. BasicBlock * block = it.Block();
  6375. (block->globOptData.*bv)->Set(dstSym->m_id);
  6376. Assert(!FindValue(block->globOptData.symToValueMap, dstSym));
  6377. Value *const valueCopy = CopyValue(value, value->GetValueNumber());
  6378. SetValue(&block->globOptData, valueCopy, dstSym);
  6379. }
  6380. }
  6381. return value;
  6382. }
  6383. Value *
  6384. GlobOpt::NewFixedFunctionValue(Js::JavascriptFunction *function, IR::AddrOpnd *addrOpnd)
  6385. {
  6386. Assert(function != nullptr);
  6387. Value *val = nullptr;
  6388. Value *cachedValue;
  6389. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  6390. {
  6391. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  6392. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  6393. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  6394. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  6395. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  6396. // Otherwise, we have to create a new value with a new value number.
  6397. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  6398. if(symStore && IsLive(symStore, &blockData))
  6399. {
  6400. Value *const symStoreValue = FindValue(symStore);
  6401. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  6402. {
  6403. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  6404. if(symStoreValueInfo->IsVarConstant())
  6405. {
  6406. VarConstantValueInfo *const symStoreVarConstantValueInfo = symStoreValueInfo->AsVarConstant();
  6407. if(symStoreVarConstantValueInfo->VarValue() == addrOpnd->m_address &&
  6408. symStoreVarConstantValueInfo->IsFunction())
  6409. {
  6410. val = symStoreValue;
  6411. }
  6412. }
  6413. }
  6414. }
  6415. }
  6416. if(!val)
  6417. {
  6418. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, function, addrOpnd->GetValueType(), true, addrOpnd->m_localAddress);
  6419. val = NewValue(valueInfo);
  6420. this->addrConstantToValueMap->AddNew(addrOpnd->m_address, val);
  6421. }
  6422. this->InsertNewValue(val, addrOpnd);
  6423. return val;
  6424. }
  6425. Value *
  6426. GlobOpt::InsertNewValue(Value *val, IR::Opnd *opnd)
  6427. {
  6428. return this->InsertNewValue(&this->blockData, val, opnd);
  6429. }
  6430. Value *
  6431. GlobOpt::InsertNewValue(GlobOptBlockData *blockData, Value *val, IR::Opnd *opnd)
  6432. {
  6433. return this->SetValue(blockData, val, opnd);
  6434. }
  6435. void
  6436. GlobOpt::SetValueToHashTable(GlobHashTable *valueNumberMap, Value *val, Sym *sym)
  6437. {
  6438. Value **pValue = valueNumberMap->FindOrInsertNew(sym);
  6439. *pValue = val;
  6440. }
  6441. StackSym *GlobOpt::GetTaggedIntConstantStackSym(const int32 intConstantValue) const
  6442. {
  6443. Assert(!Js::TaggedInt::IsOverflow(intConstantValue));
  6444. return intConstantToStackSymMap->Lookup(intConstantValue, nullptr);
  6445. }
  6446. StackSym *GlobOpt::GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const
  6447. {
  6448. StackSym *stackSym = GetTaggedIntConstantStackSym(intConstantValue);
  6449. if(stackSym)
  6450. {
  6451. return stackSym;
  6452. }
  6453. stackSym = StackSym::New(TyVar,func);
  6454. intConstantToStackSymMap->Add(intConstantValue, stackSym);
  6455. return stackSym;
  6456. }
  6457. Sym *
  6458. GlobOpt::SetSymStore(ValueInfo *valueInfo, Sym *sym)
  6459. {
  6460. if (sym->IsStackSym())
  6461. {
  6462. StackSym *stackSym = sym->AsStackSym();
  6463. if (stackSym->IsTypeSpec())
  6464. {
  6465. stackSym = stackSym->GetVarEquivSym(this->func);
  6466. sym = stackSym;
  6467. }
  6468. }
  6469. if (valueInfo->GetSymStore() == nullptr || valueInfo->GetSymStore()->IsPropertySym())
  6470. {
  6471. SetSymStoreDirect(valueInfo, sym);
  6472. }
  6473. return sym;
  6474. }
  6475. void
  6476. GlobOpt::SetSymStoreDirect(ValueInfo * valueInfo, Sym * sym)
  6477. {
  6478. Sym * prevSymStore = valueInfo->GetSymStore();
  6479. if (prevSymStore && prevSymStore->IsStackSym() &&
  6480. prevSymStore->AsStackSym()->HasByteCodeRegSlot())
  6481. {
  6482. this->SetChangedSym(prevSymStore->m_id);
  6483. }
  6484. valueInfo->SetSymStore(sym);
  6485. }
  6486. void
  6487. GlobOpt::SetChangedSym(SymID symId)
  6488. {
  6489. // this->currentBlock might not be the one which contain the changing symId,
  6490. // like hoisting invariant, but more changed symId is overly conservative and safe.
  6491. // symId in the hoisted to block is marked as JITOptimizedReg so it does't affect bailout.
  6492. GlobOptBlockData * globOptData = &this->currentBlock->globOptData;
  6493. if (globOptData->changedSyms)
  6494. {
  6495. globOptData = &this->currentBlock->globOptData;
  6496. globOptData->changedSyms->Set(symId);
  6497. if (globOptData->capturedValuesCandidate != nullptr)
  6498. {
  6499. this->changedSymsAfterIncBailoutCandidate->Set(symId);
  6500. }
  6501. }
  6502. // else could be hit only in MergeValues and it is handled by MergeCapturedValues
  6503. }
  6504. void
  6505. GlobOpt::SetValue(GlobOptBlockData *blockData, Value *val, Sym * sym)
  6506. {
  6507. ValueInfo *valueInfo = val->GetValueInfo();
  6508. sym = this->SetSymStore(valueInfo, sym);
  6509. bool isStackSym = sym->IsStackSym();
  6510. if (isStackSym && sym->AsStackSym()->IsFromByteCodeConstantTable())
  6511. {
  6512. // Put the constants in a global array. This will minimize the per-block info.
  6513. this->byteCodeConstantValueArray->Set(sym->m_id, val);
  6514. this->byteCodeConstantValueNumbersBv->Set(val->GetValueNumber());
  6515. }
  6516. else
  6517. {
  6518. SetValueToHashTable(blockData->symToValueMap, val, sym);
  6519. if (isStackSym && sym->AsStackSym()->HasByteCodeRegSlot())
  6520. {
  6521. this->SetChangedSym(sym->m_id);
  6522. }
  6523. }
  6524. }
  6525. Value *
  6526. GlobOpt::SetValue(GlobOptBlockData *blockData, Value *val, IR::Opnd *opnd)
  6527. {
  6528. if (opnd)
  6529. {
  6530. Sym *sym;
  6531. switch (opnd->GetKind())
  6532. {
  6533. case IR::OpndKindSym:
  6534. sym = opnd->AsSymOpnd()->m_sym;
  6535. break;
  6536. case IR::OpndKindReg:
  6537. sym = opnd->AsRegOpnd()->m_sym;
  6538. break;
  6539. default:
  6540. sym = nullptr;
  6541. }
  6542. if (sym)
  6543. {
  6544. SetValue(blockData, val, sym);
  6545. }
  6546. }
  6547. return val;
  6548. }
  6549. // Figure out the Value of this dst.
  6550. Value *
  6551. GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
  6552. {
  6553. IR::Instr *&instr = *pInstr;
  6554. IR::Opnd *dst = instr->GetDst();
  6555. Value *dstVal = nullptr;
  6556. Sym *sym;
  6557. if (instr->CallsSetter())
  6558. {
  6559. return nullptr;
  6560. }
  6561. if (dst == nullptr)
  6562. {
  6563. return nullptr;
  6564. }
  6565. switch (dst->GetKind())
  6566. {
  6567. case IR::OpndKindSym:
  6568. sym = dst->AsSymOpnd()->m_sym;
  6569. break;
  6570. case IR::OpndKindReg:
  6571. sym = dst->AsRegOpnd()->m_sym;
  6572. if (OpCodeAttr::TempNumberProducing(instr->m_opcode))
  6573. {
  6574. this->blockData.isTempSrc->Set(sym->m_id);
  6575. }
  6576. else if (OpCodeAttr::TempNumberTransfer(instr->m_opcode))
  6577. {
  6578. IR::Opnd *src1 = instr->GetSrc1();
  6579. if (src1->IsRegOpnd() && this->blockData.isTempSrc->Test(src1->AsRegOpnd()->m_sym->m_id))
  6580. {
  6581. StackSym *src1Sym = src1->AsRegOpnd()->m_sym;
  6582. // isTempSrc is used for marking isTempLastUse, which is used to generate AddLeftDead()
  6583. // calls instead of the normal Add helpers. It tells the runtime that concats can use string
  6584. // builders.
  6585. // We need to be careful in the case where src1 points to a string builder and is getting aliased.
  6586. // Clear the bit on src and dst of the transfer instr in this case, unless we can prove src1
  6587. // isn't pointing at a string builder, like if it is single def and the def instr is not an Add,
  6588. // but TempProducing.
  6589. if (src1Sym->IsSingleDef() && src1Sym->m_instrDef->m_opcode != Js::OpCode::Add_A
  6590. && OpCodeAttr::TempNumberProducing(src1Sym->m_instrDef->m_opcode))
  6591. {
  6592. this->blockData.isTempSrc->Set(sym->m_id);
  6593. }
  6594. else
  6595. {
  6596. this->blockData.isTempSrc->Clear(src1->AsRegOpnd()->m_sym->m_id);
  6597. this->blockData.isTempSrc->Clear(sym->m_id);
  6598. }
  6599. }
  6600. else
  6601. {
  6602. this->blockData.isTempSrc->Clear(sym->m_id);
  6603. }
  6604. }
  6605. else
  6606. {
  6607. this->blockData.isTempSrc->Clear(sym->m_id);
  6608. }
  6609. break;
  6610. case IR::OpndKindIndir:
  6611. return nullptr;
  6612. default:
  6613. return nullptr;
  6614. }
  6615. int32 min1, max1, min2, max2, newMin, newMax;
  6616. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  6617. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  6618. switch (instr->m_opcode)
  6619. {
  6620. case Js::OpCode::Conv_PrimStr:
  6621. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  6622. "Creator of this instruction should have set the type");
  6623. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsPrimitive())
  6624. {
  6625. break;
  6626. }
  6627. instr->m_opcode = Js::OpCode::Conv_Str;
  6628. // fall-through
  6629. case Js::OpCode::Conv_Str:
  6630. // This opcode is commented out since we don't track regex information in GlobOpt now.
  6631. //case Js::OpCode::Coerce_Regex:
  6632. case Js::OpCode::Coerce_Str:
  6633. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  6634. "Creator of this instruction should have set the type");
  6635. // fall-through
  6636. case Js::OpCode::Coerce_StrOrRegex:
  6637. // We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
  6638. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString())
  6639. {
  6640. break;
  6641. }
  6642. instr->m_opcode = Js::OpCode::Ld_A;
  6643. // fall-through
  6644. case Js::OpCode::BytecodeArgOutCapture:
  6645. case Js::OpCode::InitConst:
  6646. case Js::OpCode::LdAsmJsFunc:
  6647. case Js::OpCode::Ld_A:
  6648. case Js::OpCode::Ld_I4:
  6649. // Propagate sym attributes across the reg copy.
  6650. if (!this->IsLoopPrePass() && instr->GetSrc1()->IsRegOpnd())
  6651. {
  6652. if (dst->AsRegOpnd()->m_sym->IsSingleDef())
  6653. {
  6654. dst->AsRegOpnd()->m_sym->CopySymAttrs(instr->GetSrc1()->AsRegOpnd()->m_sym);
  6655. }
  6656. }
  6657. if (instr->IsProfiledInstr())
  6658. {
  6659. const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  6660. if(!(
  6661. profiledValueType.IsLikelyInt() &&
  6662. (
  6663. (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) ||
  6664. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  6665. )
  6666. ))
  6667. {
  6668. if(!src1ValueInfo)
  6669. {
  6670. dstVal = this->NewGenericValue(profiledValueType, dst);
  6671. }
  6672. else if(src1ValueInfo->IsUninitialized())
  6673. {
  6674. if(IsLoopPrePass())
  6675. {
  6676. dstVal = this->NewGenericValue(profiledValueType, dst);
  6677. }
  6678. else
  6679. {
  6680. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  6681. // can improve the original value type.
  6682. src1ValueInfo->Type() = profiledValueType;
  6683. instr->GetSrc1()->SetValueType(profiledValueType);
  6684. }
  6685. }
  6686. }
  6687. }
  6688. if (dstVal == nullptr)
  6689. {
  6690. // Ld_A is just transferring the value
  6691. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6692. }
  6693. break;
  6694. case Js::OpCode::ExtendArg_A:
  6695. {
  6696. // SIMD_JS
  6697. // We avoid transforming EAs to Lds to keep the IR shape consistent and avoid CSEing of EAs.
  6698. // CSEOptimize only assigns a Value to the EA dst, and doesn't turn it to a Ld. If this happened, we shouldn't assign a new Value here.
  6699. if (DoCSE())
  6700. {
  6701. IR::Opnd * currDst = instr->GetDst();
  6702. Value * currDstVal = this->FindValue(currDst->GetStackSym());
  6703. if (currDstVal != nullptr)
  6704. {
  6705. return currDstVal;
  6706. }
  6707. }
  6708. break;
  6709. }
  6710. case Js::OpCode::CheckFixedFld:
  6711. AssertMsg(false, "CheckFixedFld doesn't have a dst, so we should never get here");
  6712. break;
  6713. case Js::OpCode::LdSlot:
  6714. case Js::OpCode::LdSlotArr:
  6715. case Js::OpCode::LdFld:
  6716. case Js::OpCode::LdFldForTypeOf:
  6717. case Js::OpCode::LdFldForCallApplyTarget:
  6718. // Do not transfer value type on ldFldForTypeOf to prevent copy-prop to LdRootFld in case the field doesn't exist since LdRootFldForTypeOf does not throw
  6719. //case Js::OpCode::LdRootFldForTypeOf:
  6720. case Js::OpCode::LdRootFld:
  6721. case Js::OpCode::LdMethodFld:
  6722. case Js::OpCode::LdRootMethodFld:
  6723. case Js::OpCode::ScopedLdMethodFld:
  6724. case Js::OpCode::LdMethodFromFlags:
  6725. if (instr->IsProfiledInstr())
  6726. {
  6727. ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  6728. if(!(profiledValueType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  6729. {
  6730. if(!src1ValueInfo)
  6731. {
  6732. dstVal = this->NewGenericValue(profiledValueType, dst);
  6733. }
  6734. else if(src1ValueInfo->IsUninitialized())
  6735. {
  6736. if(IsLoopPrePass() && (!dst->IsRegOpnd() || !dst->AsRegOpnd()->m_sym->IsSingleDef() || DoFieldHoisting()))
  6737. {
  6738. dstVal = this->NewGenericValue(profiledValueType, dst);
  6739. }
  6740. else
  6741. {
  6742. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  6743. // can improve the original value type.
  6744. src1ValueInfo->Type() = profiledValueType;
  6745. instr->GetSrc1()->SetValueType(profiledValueType);
  6746. }
  6747. }
  6748. }
  6749. }
  6750. if (dstVal == nullptr)
  6751. {
  6752. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6753. }
  6754. if(!this->IsLoopPrePass())
  6755. {
  6756. // We cannot transfer value if the field hasn't been copy prop'd because we don't generate
  6757. // an implicit call bailout between those values if we don't have "live fields" unless, we are hoisting the field.
  6758. PropertySym *propertySym = instr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
  6759. StackSym * fieldHoistSym;
  6760. Loop * loop = this->FindFieldHoistStackSym(this->currentBlock->loop, propertySym->m_id, &fieldHoistSym, instr);
  6761. ValueInfo *dstValueInfo = (dstVal ? dstVal->GetValueInfo() : nullptr);
  6762. // Update symStore for field hoisting
  6763. if (loop != nullptr && (dstValueInfo != nullptr))
  6764. {
  6765. this->SetSymStoreDirect(dstValueInfo, fieldHoistSym);
  6766. }
  6767. // Update symStore if it isn't a stackSym
  6768. if (dstVal && (!dstValueInfo->GetSymStore() || !dstValueInfo->GetSymStore()->IsStackSym()))
  6769. {
  6770. Assert(dst->IsRegOpnd());
  6771. this->SetSymStoreDirect(dstValueInfo, dst->AsRegOpnd()->m_sym);
  6772. }
  6773. if (src1Val != dstVal)
  6774. {
  6775. this->SetValue(&this->blockData, dstVal, instr->GetSrc1());
  6776. }
  6777. }
  6778. break;
  6779. case Js::OpCode::LdC_A_R8:
  6780. case Js::OpCode::LdC_A_I4:
  6781. case Js::OpCode::ArgIn_A:
  6782. dstVal = src1Val;
  6783. break;
  6784. case Js::OpCode::LdStr:
  6785. if (src1Val == nullptr)
  6786. {
  6787. src1Val = NewGenericValue(ValueType::String, dst);
  6788. }
  6789. dstVal = src1Val;
  6790. break;
  6791. // LdElemUndef only assign undef if the field doesn't exist.
  6792. // So we don't actually know what the value is, so we can't really copy prop it.
  6793. //case Js::OpCode::LdElemUndef:
  6794. case Js::OpCode::StSlot:
  6795. case Js::OpCode::StSlotChkUndecl:
  6796. case Js::OpCode::StFld:
  6797. case Js::OpCode::StRootFld:
  6798. case Js::OpCode::StFldStrict:
  6799. case Js::OpCode::StRootFldStrict:
  6800. if (DoFieldCopyProp())
  6801. {
  6802. if (src1Val == nullptr)
  6803. {
  6804. // src1 may have no value if it's not a valid var, e.g., NULL for let/const initialization.
  6805. // Consider creating generic values for such things.
  6806. return nullptr;
  6807. }
  6808. AssertMsg(!src2Val, "Bad src Values...");
  6809. Assert(sym->IsPropertySym());
  6810. SymID symId = sym->m_id;
  6811. Assert(instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl || !this->blockData.liveFields->Test(symId));
  6812. if (IsHoistablePropertySym(symId))
  6813. {
  6814. // We have changed the value of a hoistable field, load afterwards shouldn't get hoisted,
  6815. // but we will still copy prop the pre-assign sym to it if we have a live value.
  6816. Assert((instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl) && this->blockData.liveFields->Test(symId));
  6817. this->blockData.hoistableFields->Clear(symId);
  6818. }
  6819. this->blockData.liveFields->Set(symId);
  6820. if (!this->IsLoopPrePass() && dst->GetIsDead())
  6821. {
  6822. // Take the property sym out of the live fields set (with special handling for loops).
  6823. this->EndFieldLifetime(dst->AsSymOpnd());
  6824. }
  6825. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  6826. }
  6827. else
  6828. {
  6829. return nullptr;
  6830. }
  6831. break;
  6832. case Js::OpCode::Conv_Num:
  6833. if(src1ValueInfo->IsNumber())
  6834. {
  6835. dstVal = ValueNumberTransferDst(instr, src1Val);
  6836. }
  6837. else
  6838. {
  6839. return NewGenericValue(src1ValueInfo->Type().ToDefiniteAnyNumber(), dst);
  6840. }
  6841. break;
  6842. case Js::OpCode::Not_A:
  6843. {
  6844. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  6845. {
  6846. min1 = INT32_MIN;
  6847. max1 = INT32_MAX;
  6848. }
  6849. this->PropagateIntRangeForNot(min1, max1, &newMin, &newMax);
  6850. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  6851. }
  6852. case Js::OpCode::Xor_A:
  6853. case Js::OpCode::Or_A:
  6854. case Js::OpCode::And_A:
  6855. case Js::OpCode::Shl_A:
  6856. case Js::OpCode::Shr_A:
  6857. case Js::OpCode::ShrU_A:
  6858. {
  6859. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  6860. {
  6861. min1 = INT32_MIN;
  6862. max1 = INT32_MAX;
  6863. }
  6864. if (!src2Val || !src2ValueInfo->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec()))
  6865. {
  6866. min2 = INT32_MIN;
  6867. max2 = INT32_MAX;
  6868. }
  6869. if (instr->m_opcode == Js::OpCode::ShrU_A &&
  6870. min1 < 0 &&
  6871. IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  6872. {
  6873. // Src1 may be too large to represent as a signed int32, and src2 may be zero.
  6874. // Since the result can therefore be too large to represent as a signed int32,
  6875. // include Number in the value type.
  6876. return CreateDstUntransferredValue(
  6877. ValueType::AnyNumber.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
  6878. }
  6879. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  6880. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  6881. }
  6882. case Js::OpCode::Incr_A:
  6883. case Js::OpCode::Decr_A:
  6884. {
  6885. ValueType valueType;
  6886. if(src1Val)
  6887. {
  6888. valueType = src1Val->GetValueInfo()->Type().ToDefiniteAnyNumber();
  6889. }
  6890. else
  6891. {
  6892. valueType = ValueType::Number;
  6893. }
  6894. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  6895. }
  6896. case Js::OpCode::Add_A:
  6897. {
  6898. ValueType valueType;
  6899. if (src1Val && src1ValueInfo->IsLikelyNumber() && src2Val && src2ValueInfo->IsLikelyNumber())
  6900. {
  6901. if(src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())
  6902. {
  6903. // When doing aggressiveIntType, just assume the result is likely going to be int
  6904. // if both input is int.
  6905. const bool isLikelyTagged = src1ValueInfo->IsLikelyTaggedInt() && src2ValueInfo->IsLikelyTaggedInt();
  6906. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6907. {
  6908. // If both of them are numbers then we can definitely say that the result is a number.
  6909. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  6910. }
  6911. else
  6912. {
  6913. // This is only likely going to be int but can be a string as well.
  6914. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  6915. }
  6916. }
  6917. else
  6918. {
  6919. // We can only be certain of any thing if both of them are numbers.
  6920. // Otherwise, the result could be string.
  6921. if (src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6922. {
  6923. if (src1ValueInfo->IsFloat() || src2ValueInfo->IsFloat())
  6924. {
  6925. // If one of them is a float, the result probably is a float instead of just int
  6926. // but should always be a number.
  6927. valueType = ValueType::Float;
  6928. }
  6929. else
  6930. {
  6931. // Could be int, could be number
  6932. valueType = ValueType::Number;
  6933. }
  6934. }
  6935. else if (src1ValueInfo->IsLikelyFloat() || src2ValueInfo->IsLikelyFloat())
  6936. {
  6937. // Result is likely a float (but can be anything)
  6938. valueType = ValueType::Float.ToLikely();
  6939. }
  6940. else
  6941. {
  6942. // Otherwise it is a likely int or float (but can be anything)
  6943. valueType = ValueType::Number.ToLikely();
  6944. }
  6945. }
  6946. }
  6947. else if((src1Val && src1ValueInfo->IsString()) || (src2Val && src2ValueInfo->IsString()))
  6948. {
  6949. // String + anything should always result in a string
  6950. valueType = ValueType::String;
  6951. }
  6952. else if((src1Val && src1ValueInfo->IsNotString() && src1ValueInfo->IsPrimitive())
  6953. && (src2Val && src2ValueInfo->IsNotString() && src2ValueInfo->IsPrimitive()))
  6954. {
  6955. // If src1 and src2 are not strings and primitive, add should yield a number.
  6956. valueType = ValueType::Number;
  6957. }
  6958. else if((src1Val && src1ValueInfo->IsLikelyString()) || (src2Val && src2ValueInfo->IsLikelyString()))
  6959. {
  6960. // likelystring + anything should always result in a likelystring
  6961. valueType = ValueType::String.ToLikely();
  6962. }
  6963. else
  6964. {
  6965. // Number or string. Could make the value a merge of Number and String, but Uninitialized is more useful at the moment.
  6966. Assert(valueType.IsUninitialized());
  6967. }
  6968. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  6969. }
  6970. case Js::OpCode::Div_A:
  6971. {
  6972. ValueType divValueType = GetDivValueType(instr, src1Val, src2Val, false);
  6973. if (divValueType.IsLikelyInt() || divValueType.IsFloat())
  6974. {
  6975. return CreateDstUntransferredValue(divValueType, instr, src1Val, src2Val);
  6976. }
  6977. }
  6978. // fall-through
  6979. case Js::OpCode::Sub_A:
  6980. case Js::OpCode::Mul_A:
  6981. case Js::OpCode::Rem_A:
  6982. {
  6983. ValueType valueType;
  6984. if( src1Val &&
  6985. src1ValueInfo->IsLikelyInt() &&
  6986. src2Val &&
  6987. src2ValueInfo->IsLikelyInt() &&
  6988. instr->m_opcode != Js::OpCode::Div_A)
  6989. {
  6990. const bool isLikelyTagged =
  6991. src1ValueInfo->IsLikelyTaggedInt() && (src2ValueInfo->IsLikelyTaggedInt() || instr->m_opcode == Js::OpCode::Rem_A);
  6992. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  6993. {
  6994. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  6995. }
  6996. else
  6997. {
  6998. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  6999. }
  7000. }
  7001. else if ((src1Val && src1ValueInfo->IsLikelyFloat()) || (src2Val && src2ValueInfo->IsLikelyFloat()))
  7002. {
  7003. // This should ideally be NewNumberAndLikelyFloatValue since we know the result is a number but not sure if it will
  7004. // be a float value. However, that Number/LikelyFloat value type doesn't exist currently and all the necessary
  7005. // checks are done for float values (tagged int checks, etc.) so it's sufficient to just create a float value here.
  7006. valueType = ValueType::Float;
  7007. }
  7008. else
  7009. {
  7010. valueType = ValueType::Number;
  7011. }
  7012. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  7013. }
  7014. case Js::OpCode::CallI:
  7015. Assert(dst->IsRegOpnd());
  7016. return NewGenericValue(dst->AsRegOpnd()->GetValueType(), dst);
  7017. case Js::OpCode::LdElemI_A:
  7018. {
  7019. dstVal = ValueNumberLdElemDst(pInstr, src1Val);
  7020. const ValueType baseValueType(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  7021. if( (
  7022. baseValueType.IsLikelyNativeArray() ||
  7023. #ifdef _M_IX86
  7024. (
  7025. !AutoSystemInfo::Data.SSE2Available() &&
  7026. baseValueType.IsLikelyObject() &&
  7027. (
  7028. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  7029. baseValueType.GetObjectType() == ObjectType::Float64Array
  7030. )
  7031. )
  7032. #else
  7033. false
  7034. #endif
  7035. ) &&
  7036. instr->GetDst()->IsVar() &&
  7037. instr->HasBailOutInfo())
  7038. {
  7039. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  7040. // path. Note that the removed bailouts should not be necessary for correctness.
  7041. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7042. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  7043. {
  7044. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  7045. }
  7046. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  7047. {
  7048. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  7049. }
  7050. if(bailOutKind)
  7051. {
  7052. instr->SetBailOutKind(bailOutKind);
  7053. }
  7054. else
  7055. {
  7056. instr->ClearBailOutInfo();
  7057. }
  7058. }
  7059. return dstVal;
  7060. }
  7061. case Js::OpCode::LdMethodElem:
  7062. // Not worth profiling this, just assume it's likely object (should be likely function but ValueType does not track
  7063. // functions currently, so using ObjectType::Object instead)
  7064. dstVal = NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely(), dst);
  7065. if(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray() && instr->HasBailOutInfo())
  7066. {
  7067. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  7068. // path. Note that the removed bailouts should not be necessary for correctness.
  7069. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  7070. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  7071. {
  7072. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  7073. }
  7074. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  7075. {
  7076. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  7077. }
  7078. if(bailOutKind)
  7079. {
  7080. instr->SetBailOutKind(bailOutKind);
  7081. }
  7082. else
  7083. {
  7084. instr->ClearBailOutInfo();
  7085. }
  7086. }
  7087. return dstVal;
  7088. case Js::OpCode::StElemI_A:
  7089. case Js::OpCode::StElemI_A_Strict:
  7090. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  7091. break;
  7092. case Js::OpCode::LdLen_A:
  7093. if (instr->IsProfiledInstr())
  7094. {
  7095. const ValueType profiledValueType(instr->AsProfiledInstr()->u.ldElemInfo->GetElementType());
  7096. if(!(profiledValueType.IsLikelyInt() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  7097. {
  7098. return this->NewGenericValue(profiledValueType, dst);
  7099. }
  7100. }
  7101. break;
  7102. case Js::OpCode::BrOnEmpty:
  7103. case Js::OpCode::BrOnNotEmpty:
  7104. Assert(dst->IsRegOpnd());
  7105. Assert(dst->GetValueType().IsString());
  7106. return this->NewGenericValue(ValueType::String, dst);
  7107. case Js::OpCode::IsInst:
  7108. case Js::OpCode::LdTrue:
  7109. case Js::OpCode::LdFalse:
  7110. return this->NewGenericValue(ValueType::Boolean, dst);
  7111. case Js::OpCode::LdUndef:
  7112. return this->NewGenericValue(ValueType::Undefined, dst);
  7113. case Js::OpCode::LdC_A_Null:
  7114. return this->NewGenericValue(ValueType::Null, dst);
  7115. case Js::OpCode::LdThis:
  7116. if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
  7117. (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
  7118. {
  7119. return this->NewGenericValue(ValueType::GetObject(ObjectType::Object), dst);
  7120. }
  7121. break;
  7122. case Js::OpCode::Typeof:
  7123. return this->NewGenericValue(ValueType::String, dst);
  7124. break;
  7125. }
  7126. #ifdef ENABLE_SIMDJS
  7127. // SIMD_JS
  7128. if (Js::IsSimd128Opcode(instr->m_opcode) && !func->GetJITFunctionBody()->IsAsmJsMode())
  7129. {
  7130. ThreadContext::SimdFuncSignature simdFuncSignature;
  7131. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, simdFuncSignature);
  7132. return this->NewGenericValue(simdFuncSignature.returnType, dst);
  7133. }
  7134. #endif
  7135. if (dstVal == nullptr)
  7136. {
  7137. return this->NewGenericValue(dst->GetValueType(), dst);
  7138. }
  7139. return this->SetValue(&this->blockData, dstVal, dst);
  7140. }
  7141. Value *
  7142. GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
  7143. {
  7144. IR::Instr *&instr = *pInstr;
  7145. IR::Opnd *dst = instr->GetDst();
  7146. Value *dstVal = nullptr;
  7147. int32 newMin, newMax;
  7148. ValueInfo *srcValueInfo = (srcVal ? srcVal->GetValueInfo() : nullptr);
  7149. ValueType profiledElementType;
  7150. if (instr->IsProfiledInstr())
  7151. {
  7152. profiledElementType = instr->AsProfiledInstr()->u.ldElemInfo->GetElementType();
  7153. if(!(profiledElementType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) &&
  7154. srcVal &&
  7155. srcValueInfo->IsUninitialized())
  7156. {
  7157. if(IsLoopPrePass())
  7158. {
  7159. dstVal = NewGenericValue(profiledElementType, dst);
  7160. }
  7161. else
  7162. {
  7163. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  7164. // can improve the original value type.
  7165. srcValueInfo->Type() = profiledElementType;
  7166. instr->GetSrc1()->SetValueType(profiledElementType);
  7167. }
  7168. }
  7169. }
  7170. IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
  7171. const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
  7172. if (instr->DoStackArgsOpt(this->func) ||
  7173. !(
  7174. baseValueType.IsLikelyOptimizedTypedArray() ||
  7175. (baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
  7176. ) ||
  7177. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  7178. // Don't do type spec on native array with a history of accessing gaps, as this is a bailout
  7179. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  7180. !ShouldExpectConventionalArrayIndexValue(src))
  7181. {
  7182. if(DoTypedArrayTypeSpec() && !IsLoopPrePass())
  7183. {
  7184. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access.\n"));
  7185. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  7186. {
  7187. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  7188. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7189. baseValueType.ToString(baseValueTypeStr);
  7190. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because %s.\n"),
  7191. this->func->GetJITFunctionBody()->GetDisplayName(),
  7192. this->func->GetDebugNumberSet(debugStringBuffer),
  7193. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  7194. baseValueTypeStr,
  7195. instr->DoStackArgsOpt(this->func) ? _u("instruction uses the arguments object") :
  7196. baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
  7197. Output::Flush();
  7198. }
  7199. }
  7200. if(!dstVal)
  7201. {
  7202. if(srcVal)
  7203. {
  7204. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7205. }
  7206. else
  7207. {
  7208. dstVal = NewGenericValue(profiledElementType, dst);
  7209. }
  7210. }
  7211. return dstVal;
  7212. }
  7213. Assert(instr->GetSrc1()->IsIndirOpnd());
  7214. IRType toType = TyVar;
  7215. IR::BailOutKind bailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  7216. switch(baseValueType.GetObjectType())
  7217. {
  7218. case ObjectType::Int8Array:
  7219. case ObjectType::Int8VirtualArray:
  7220. case ObjectType::Int8MixedArray:
  7221. newMin = Int8ConstMin;
  7222. newMax = Int8ConstMax;
  7223. goto IntArrayCommon;
  7224. case ObjectType::Uint8Array:
  7225. case ObjectType::Uint8VirtualArray:
  7226. case ObjectType::Uint8MixedArray:
  7227. case ObjectType::Uint8ClampedArray:
  7228. case ObjectType::Uint8ClampedVirtualArray:
  7229. case ObjectType::Uint8ClampedMixedArray:
  7230. newMin = Uint8ConstMin;
  7231. newMax = Uint8ConstMax;
  7232. goto IntArrayCommon;
  7233. case ObjectType::Int16Array:
  7234. case ObjectType::Int16VirtualArray:
  7235. case ObjectType::Int16MixedArray:
  7236. newMin = Int16ConstMin;
  7237. newMax = Int16ConstMax;
  7238. goto IntArrayCommon;
  7239. case ObjectType::Uint16Array:
  7240. case ObjectType::Uint16VirtualArray:
  7241. case ObjectType::Uint16MixedArray:
  7242. newMin = Uint16ConstMin;
  7243. newMax = Uint16ConstMax;
  7244. goto IntArrayCommon;
  7245. case ObjectType::Int32Array:
  7246. case ObjectType::Int32VirtualArray:
  7247. case ObjectType::Int32MixedArray:
  7248. case ObjectType::Uint32Array: // int-specialized loads from uint32 arrays will bail out on values that don't fit in an int32
  7249. case ObjectType::Uint32VirtualArray:
  7250. case ObjectType::Uint32MixedArray:
  7251. Int32Array:
  7252. newMin = Int32ConstMin;
  7253. newMax = Int32ConstMax;
  7254. goto IntArrayCommon;
  7255. IntArrayCommon:
  7256. Assert(dst->IsRegOpnd());
  7257. // If int type spec is disabled, it is ok to load int values as they can help float type spec, and merging int32 with float64 => float64.
  7258. // But if float type spec is also disabled, we'll have problems because float64 merged with var => float64...
  7259. if (!this->DoAggressiveIntTypeSpec() && !this->DoFloatTypeSpec())
  7260. {
  7261. if (!dstVal)
  7262. {
  7263. if (srcVal)
  7264. {
  7265. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7266. }
  7267. else
  7268. {
  7269. dstVal = NewGenericValue(profiledElementType, dst);
  7270. }
  7271. }
  7272. return dstVal;
  7273. }
  7274. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, bailOutKind, newMin, newMax, &dstVal);
  7275. toType = TyInt32;
  7276. break;
  7277. case ObjectType::Float32Array:
  7278. case ObjectType::Float32VirtualArray:
  7279. case ObjectType::Float32MixedArray:
  7280. case ObjectType::Float64Array:
  7281. case ObjectType::Float64VirtualArray:
  7282. case ObjectType::Float64MixedArray:
  7283. Float64Array:
  7284. Assert(dst->IsRegOpnd());
  7285. // If float type spec is disabled, don't load float64 values
  7286. if (!this->DoFloatTypeSpec())
  7287. {
  7288. if (!dstVal)
  7289. {
  7290. if (srcVal)
  7291. {
  7292. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  7293. }
  7294. else
  7295. {
  7296. dstVal = NewGenericValue(profiledElementType, dst);
  7297. }
  7298. }
  7299. return dstVal;
  7300. }
  7301. TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, &dstVal);
  7302. toType = TyFloat64;
  7303. break;
  7304. default:
  7305. Assert(baseValueType.IsLikelyNativeArray());
  7306. bailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  7307. if(baseValueType.HasIntElements())
  7308. {
  7309. goto Int32Array;
  7310. }
  7311. Assert(baseValueType.HasFloatElements());
  7312. goto Float64Array;
  7313. }
  7314. if(!dstVal)
  7315. {
  7316. dstVal = NewGenericValue(profiledElementType, dst);
  7317. }
  7318. Assert(toType != TyVar);
  7319. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  7320. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  7321. {
  7322. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  7323. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7324. baseValueType.ToString(baseValueTypeStr);
  7325. char dstValTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  7326. dstVal->GetValueInfo()->Type().ToString(dstValTypeStr);
  7327. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s producing %S"),
  7328. this->func->GetJITFunctionBody()->GetDisplayName(),
  7329. this->func->GetDebugNumberSet(debugStringBuffer),
  7330. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  7331. baseValueTypeStr,
  7332. toType == TyInt32 ? _u("int32") : _u("float64"),
  7333. dstValTypeStr);
  7334. #if DBG_DUMP
  7335. Output::Print(_u(" ("));
  7336. dstVal->Dump();
  7337. Output::Print(_u(").\n"));
  7338. #else
  7339. Output::Print(_u(".\n"));
  7340. #endif
  7341. Output::Flush();
  7342. }
  7343. if(!this->IsLoopPrePass())
  7344. {
  7345. if(instr->HasBailOutInfo())
  7346. {
  7347. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  7348. Assert(
  7349. (
  7350. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  7351. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  7352. ) &&
  7353. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  7354. if(bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  7355. {
  7356. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  7357. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  7358. // bails out for the right reason.
  7359. instr->SetBailOutKind(
  7360. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  7361. }
  7362. else
  7363. {
  7364. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  7365. // calls to occur, so it must be merged in to eliminate generating the helper call
  7366. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  7367. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  7368. }
  7369. }
  7370. else
  7371. {
  7372. GenerateBailAtOperation(&instr, bailOutKind);
  7373. }
  7374. }
  7375. return dstVal;
  7376. }
  7377. ValueType
  7378. GlobOpt::GetPrepassValueTypeForDst(
  7379. const ValueType desiredValueType,
  7380. IR::Instr *const instr,
  7381. Value *const src1Value,
  7382. Value *const src2Value,
  7383. bool *const isValueInfoPreciseRef) const
  7384. {
  7385. // Values with definite types can be created in the loop prepass only when it is guaranteed that the value type will be the
  7386. // same on any iteration of the loop. The heuristics currently used are:
  7387. // - If the source sym is not live on the back-edge, then it acquires a new value for each iteration of the loop, so
  7388. // that value type can be definite
  7389. // - Consider: A better solution for this is to track values that originate in this loop, which can have definite value
  7390. // types. That catches more cases, should look into that in the future.
  7391. // - If the source sym has a constant value that doesn't change for the duration of the function
  7392. // - The operation always results in a definite value type. For instance, signed bitwise operations always result in an
  7393. // int32, conv_num and ++ always result in a number, etc.
  7394. // - For operations that always result in an int32, the resulting int range is precise only if the source syms pass
  7395. // the above heuristics. Otherwise, the range must be expanded to the full int32 range.
  7396. Assert(IsLoopPrePass());
  7397. Assert(instr);
  7398. if(isValueInfoPreciseRef)
  7399. {
  7400. *isValueInfoPreciseRef = false;
  7401. }
  7402. if(!desiredValueType.IsDefinite())
  7403. {
  7404. return desiredValueType;
  7405. }
  7406. if((instr->GetSrc1() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Value)) ||
  7407. (instr->GetSrc2() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Value)))
  7408. {
  7409. // If the desired value type is not precise, the value type of the destination is derived from the value types of the
  7410. // sources. Since the value type of a source sym is not definite, the destination value type also cannot be definite.
  7411. if(desiredValueType.IsInt() && OpCodeAttr::IsInt32(instr->m_opcode))
  7412. {
  7413. // The op always produces an int32, but not always a tagged int
  7414. return ValueType::GetInt(desiredValueType.IsLikelyTaggedInt());
  7415. }
  7416. if(desiredValueType.IsNumber() && OpCodeAttr::ProducesNumber(instr->m_opcode))
  7417. {
  7418. // The op always produces a number, but not always an int
  7419. return desiredValueType.ToDefiniteAnyNumber();
  7420. }
  7421. return desiredValueType.ToLikely();
  7422. }
  7423. if(isValueInfoPreciseRef)
  7424. {
  7425. // The produced value info is derived from the sources, which have precise value infos
  7426. *isValueInfoPreciseRef = true;
  7427. }
  7428. return desiredValueType;
  7429. }
  7430. bool
  7431. GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue) const
  7432. {
  7433. Assert(IsLoopPrePass());
  7434. Assert(src);
  7435. if(!src->IsRegOpnd() || !srcValue)
  7436. {
  7437. return false;
  7438. }
  7439. ValueInfo *const srcValueInfo = srcValue->GetValueInfo();
  7440. if(!srcValueInfo->IsDefinite())
  7441. {
  7442. return false;
  7443. }
  7444. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  7445. Assert(!srcSym->IsTypeSpec());
  7446. int32 intConstantValue;
  7447. return
  7448. srcSym->IsFromByteCodeConstantTable() ||
  7449. (
  7450. srcValueInfo->TryGetIntConstantValue(&intConstantValue) &&
  7451. !Js::TaggedInt::IsOverflow(intConstantValue) &&
  7452. GetTaggedIntConstantStackSym(intConstantValue) == srcSym
  7453. ) ||
  7454. !currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(srcSym->m_id);
  7455. }
  7456. Value *GlobOpt::CreateDstUntransferredIntValue(
  7457. const int32 min,
  7458. const int32 max,
  7459. IR::Instr *const instr,
  7460. Value *const src1Value,
  7461. Value *const src2Value)
  7462. {
  7463. Assert(instr);
  7464. Assert(instr->GetDst());
  7465. Assert(OpCodeAttr::ProducesNumber(instr->m_opcode)
  7466. || (instr->m_opcode == Js::OpCode::Add_A && src1Value->GetValueInfo()->IsNumber()
  7467. && src2Value->GetValueInfo()->IsNumber()));
  7468. ValueType valueType(ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  7469. Assert(valueType.IsInt());
  7470. bool isValueInfoPrecise;
  7471. if(IsLoopPrePass())
  7472. {
  7473. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  7474. }
  7475. else
  7476. {
  7477. isValueInfoPrecise = true;
  7478. }
  7479. IR::Opnd *const dst = instr->GetDst();
  7480. if(isValueInfoPrecise)
  7481. {
  7482. Assert(valueType == ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  7483. Assert(!(dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsTypeSpec()));
  7484. return NewIntRangeValue(min, max, false, dst);
  7485. }
  7486. return NewGenericValue(valueType, dst);
  7487. }
  7488. Value *
  7489. GlobOpt::CreateDstUntransferredValue(
  7490. const ValueType desiredValueType,
  7491. IR::Instr *const instr,
  7492. Value *const src1Value,
  7493. Value *const src2Value)
  7494. {
  7495. Assert(instr);
  7496. Assert(instr->GetDst());
  7497. Assert(!desiredValueType.IsInt()); // use CreateDstUntransferredIntValue instead
  7498. ValueType valueType(desiredValueType);
  7499. if(IsLoopPrePass())
  7500. {
  7501. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value);
  7502. }
  7503. return NewGenericValue(valueType, instr->GetDst());
  7504. }
  7505. Value *
  7506. GlobOpt::ValueNumberTransferDst(IR::Instr *const instr, Value * src1Val)
  7507. {
  7508. Value *dstVal = this->IsLoopPrePass() ? this->ValueNumberTransferDstInPrepass(instr, src1Val) : src1Val;
  7509. // Don't copy-prop a temp over a user symbol. This is likely to extend the temp's lifetime, as the user symbol
  7510. // is more likely to already have later references.
  7511. // REVIEW: Enabling this does cause perf issues...
  7512. #if 0
  7513. if (dstVal != src1Val)
  7514. {
  7515. return dstVal;
  7516. }
  7517. Sym *dstSym = dst->GetStackSym();
  7518. if (dstVal && dstSym && dstSym->IsStackSym() && !dstSym->AsStackSym()->m_isBytecodeTmp)
  7519. {
  7520. Sym *dstValSym = dstVal->GetValueInfo()->GetSymStore();
  7521. if (dstValSym && dstValSym->AsStackSym()->m_isBytecodeTmp /* src->GetIsDead()*/)
  7522. {
  7523. dstVal->GetValueInfo()->SetSymStore(dstSym);
  7524. }
  7525. }
  7526. #endif
  7527. return dstVal;
  7528. }
  7529. bool
  7530. GlobOpt::IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue)
  7531. {
  7532. if (this->DoFieldHoisting())
  7533. {
  7534. return false;
  7535. }
  7536. if (src->IsRegOpnd())
  7537. {
  7538. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  7539. if (srcSym->IsFromByteCodeConstantTable())
  7540. {
  7541. return true;
  7542. }
  7543. ValueInfo *srcValueInfo = srcValue->GetValueInfo();
  7544. int32 srcIntConstantValue;
  7545. if (srcValueInfo->TryGetIntConstantValue(&srcIntConstantValue) && !Js::TaggedInt::IsOverflow(srcIntConstantValue)
  7546. && GetTaggedIntConstantStackSym(srcIntConstantValue) == srcSym)
  7547. {
  7548. return true;
  7549. }
  7550. }
  7551. return false;
  7552. }
  7553. Value *
  7554. GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val)
  7555. {
  7556. Value *dstVal = nullptr;
  7557. if (!src1Val)
  7558. {
  7559. return nullptr;
  7560. }
  7561. bool isValueInfoPrecise;
  7562. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7563. // TODO: This conflicts with new values created by the type specialization code
  7564. // We should re-enable if we change that code to avoid the new values.
  7565. #if 0
  7566. if (this->IsSafeToTransferInPrePass(instr->GetSrc1(), src1Val))
  7567. {
  7568. return src1Val;
  7569. }
  7570. if (this->IsPREInstrCandidateLoad(instr->m_opcode) && instr->GetDst())
  7571. {
  7572. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  7573. for (Loop *curLoop = this->currentBlock->loop; curLoop; curLoop = curLoop->parent)
  7574. {
  7575. if (curLoop->fieldPRESymStore->Test(dstSym->m_id))
  7576. {
  7577. return src1Val;
  7578. }
  7579. }
  7580. }
  7581. if (!this->DoFieldHoisting())
  7582. {
  7583. if (instr->GetDst()->IsRegOpnd())
  7584. {
  7585. StackSym *stackSym = instr->GetDst()->AsRegOpnd()->m_sym;
  7586. if (stackSym->IsSingleDef() || this->IsLive(stackSym, this->prePassLoop->landingPad))
  7587. {
  7588. IntConstantBounds src1IntConstantBounds;
  7589. if (src1ValueInfo->TryGetIntConstantBounds(&src1IntConstantBounds) &&
  7590. !(
  7591. src1IntConstantBounds.LowerBound() == INT32_MIN &&
  7592. src1IntConstantBounds.UpperBound() == INT32_MAX
  7593. ))
  7594. {
  7595. const ValueType valueType(
  7596. GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  7597. if (isValueInfoPrecise)
  7598. {
  7599. return src1Val;
  7600. }
  7601. }
  7602. else
  7603. {
  7604. return src1Val;
  7605. }
  7606. }
  7607. }
  7608. }
  7609. #endif
  7610. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  7611. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  7612. // first pass when transferring a value that is live on the back-edge.
  7613. // In prepass we are going to copy the value but with a different value number
  7614. // for aggressive int type spec.
  7615. const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  7616. if(isValueInfoPrecise || (valueType == src1ValueInfo->Type() && src1ValueInfo->IsGeneric()))
  7617. {
  7618. Assert(valueType == src1ValueInfo->Type());
  7619. dstVal = CopyValue(src1Val);
  7620. TrackCopiedValueForKills(dstVal);
  7621. }
  7622. else
  7623. {
  7624. dstVal = NewGenericValue(valueType);
  7625. dstVal->GetValueInfo()->SetSymStore(src1ValueInfo->GetSymStore());
  7626. }
  7627. return dstVal;
  7628. }
  7629. void
  7630. GlobOpt::PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32* pNewMax)
  7631. {
  7632. int32 tmp;
  7633. Int32Math::Not(minimum, pNewMin);
  7634. *pNewMax = *pNewMin;
  7635. Int32Math::Not(maximum, &tmp);
  7636. *pNewMin = min(*pNewMin, tmp);
  7637. *pNewMax = max(*pNewMax, tmp);
  7638. }
  7639. void
  7640. GlobOpt::PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  7641. int32 min2, int32 max2, int32 *pNewMin, int32* pNewMax)
  7642. {
  7643. int32 min, max, tmp, tmp2;
  7644. min = INT32_MIN;
  7645. max = INT32_MAX;
  7646. switch (instr->m_opcode)
  7647. {
  7648. case Js::OpCode::Xor_A:
  7649. case Js::OpCode::Or_A:
  7650. // Find range with highest high order bit
  7651. tmp = ::max((uint32)min1, (uint32)max1);
  7652. tmp2 = ::max((uint32)min2, (uint32)max2);
  7653. if ((uint32)tmp > (uint32)tmp2)
  7654. {
  7655. max = tmp;
  7656. }
  7657. else
  7658. {
  7659. max = tmp2;
  7660. }
  7661. if (max < 0)
  7662. {
  7663. min = INT32_MIN; // REVIEW: conservative...
  7664. max = INT32_MAX;
  7665. }
  7666. else
  7667. {
  7668. // Turn values like 0x1010 into 0x1111
  7669. max = 1 << Math::Log2(max);
  7670. max = (uint32)(max << 1) - 1;
  7671. min = 0;
  7672. }
  7673. break;
  7674. case Js::OpCode::And_A:
  7675. if (min1 == INT32_MIN && min2 == INT32_MIN)
  7676. {
  7677. // Shortcut
  7678. break;
  7679. }
  7680. // Find range with lowest higher bit
  7681. tmp = ::max((uint32)min1, (uint32)max1);
  7682. tmp2 = ::max((uint32)min2, (uint32)max2);
  7683. if ((uint32)tmp < (uint32)tmp2)
  7684. {
  7685. min = min1;
  7686. max = max1;
  7687. }
  7688. else
  7689. {
  7690. min = min2;
  7691. max = max2;
  7692. }
  7693. // To compute max, look if min has higher high bit
  7694. if ((uint32)min > (uint32)max)
  7695. {
  7696. max = min;
  7697. }
  7698. // If max is negative, max let's assume it could be -1, so result in MAX_INT
  7699. if (max < 0)
  7700. {
  7701. max = INT32_MAX;
  7702. }
  7703. // If min is positive, the resulting min is zero
  7704. if (min >= 0)
  7705. {
  7706. min = 0;
  7707. }
  7708. else
  7709. {
  7710. min = INT32_MIN;
  7711. }
  7712. break;
  7713. case Js::OpCode::Shl_A:
  7714. {
  7715. // Shift count
  7716. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  7717. {
  7718. min2 = 0;
  7719. max2 = 0x1F;
  7720. }
  7721. else
  7722. {
  7723. min2 &= 0x1F;
  7724. max2 &= 0x1F;
  7725. }
  7726. int32 min1FreeTopBitCount = min1 ? (sizeof(int32) * 8) - (Math::Log2(min1) + 1) : (sizeof(int32) * 8);
  7727. int32 max1FreeTopBitCount = max1 ? (sizeof(int32) * 8) - (Math::Log2(max1) + 1) : (sizeof(int32) * 8);
  7728. if (min1FreeTopBitCount <= max2 || max1FreeTopBitCount <= max2)
  7729. {
  7730. // If the shift is going to touch the sign bit return the max range
  7731. min = INT32_MIN;
  7732. max = INT32_MAX;
  7733. }
  7734. else
  7735. {
  7736. // Compute max
  7737. // Turn values like 0x1010 into 0x1111
  7738. if (min1)
  7739. {
  7740. min1 = 1 << Math::Log2(min1);
  7741. min1 = (min1 << 1) - 1;
  7742. }
  7743. if (max1)
  7744. {
  7745. max1 = 1 << Math::Log2(max1);
  7746. max1 = (uint32)(max1 << 1) - 1;
  7747. }
  7748. if (max1 > 0)
  7749. {
  7750. int32 nrTopBits = (sizeof(int32) * 8) - Math::Log2(max1);
  7751. if (nrTopBits < ::min(max2, 30))
  7752. max = INT32_MAX;
  7753. else
  7754. max = ::max((max1 << ::min(max2, 30)) & ~0x80000000, (min1 << min2) & ~0x80000000);
  7755. }
  7756. else
  7757. {
  7758. max = (max1 << min2) & ~0x80000000;
  7759. }
  7760. // Compute min
  7761. if (min1 < 0)
  7762. {
  7763. min = ::min(min1 << max2, max1 << max2);
  7764. }
  7765. else
  7766. {
  7767. min = ::min(min1 << min2, max1 << max2);
  7768. }
  7769. // Turn values like 0x1110 into 0x1000
  7770. if (min)
  7771. {
  7772. min = 1 << Math::Log2(min);
  7773. }
  7774. }
  7775. }
  7776. break;
  7777. case Js::OpCode::Shr_A:
  7778. // Shift count
  7779. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  7780. {
  7781. min2 = 0;
  7782. max2 = 0x1F;
  7783. }
  7784. else
  7785. {
  7786. min2 &= 0x1F;
  7787. max2 &= 0x1F;
  7788. }
  7789. // Compute max
  7790. if (max1 < 0)
  7791. {
  7792. max = max1 >> max2;
  7793. }
  7794. else
  7795. {
  7796. max = max1 >> min2;
  7797. }
  7798. // Compute min
  7799. if (min1 < 0)
  7800. {
  7801. min = min1 >> min2;
  7802. }
  7803. else
  7804. {
  7805. min = min1 >> max2;
  7806. }
  7807. break;
  7808. case Js::OpCode::ShrU_A:
  7809. // shift count is constant zero
  7810. if ((min2 == max2) && (max2 & 0x1f) == 0)
  7811. {
  7812. // We can't encode uint32 result, so it has to be used as int32 only or the original value is positive.
  7813. Assert(instr->ignoreIntOverflow || min1 >= 0);
  7814. // We can transfer the signed int32 range.
  7815. min = min1;
  7816. max = max1;
  7817. break;
  7818. }
  7819. const IntConstantBounds src2NewBounds = IntConstantBounds(min2, max2).And_0x1f();
  7820. // Zero is only allowed if result is always a signed int32 or always used as a signed int32
  7821. Assert(min1 >= 0 || instr->ignoreIntOverflow || !src2NewBounds.Contains(0));
  7822. min2 = src2NewBounds.LowerBound();
  7823. max2 = src2NewBounds.UpperBound();
  7824. Assert(min2 <= max2);
  7825. // zero shift count is only allowed if result is used as int32 and/or value is positive
  7826. Assert(min2 > 0 || instr->ignoreIntOverflow || min1 >= 0);
  7827. uint32 umin1 = (uint32)min1;
  7828. uint32 umax1 = (uint32)max1;
  7829. if (umin1 > umax1)
  7830. {
  7831. uint32 temp = umax1;
  7832. umax1 = umin1;
  7833. umin1 = temp;
  7834. }
  7835. Assert(min2 >= 0 && max2 < 32);
  7836. // Compute max
  7837. if (min1 < 0)
  7838. {
  7839. umax1 = UINT32_MAX;
  7840. }
  7841. max = umax1 >> min2;
  7842. // Compute min
  7843. if (min1 <= 0 && max1 >=0)
  7844. {
  7845. min = 0;
  7846. }
  7847. else
  7848. {
  7849. min = umin1 >> max2;
  7850. }
  7851. // We should be able to fit uint32 range as int32
  7852. Assert(instr->ignoreIntOverflow || (min >= 0 && max >= 0) );
  7853. if (min > max)
  7854. {
  7855. // can only happen if shift count can be zero
  7856. Assert(min2 == 0 && (instr->ignoreIntOverflow || min1 >= 0));
  7857. min = Int32ConstMin;
  7858. max = Int32ConstMax;
  7859. }
  7860. break;
  7861. }
  7862. *pNewMin = min;
  7863. *pNewMax = max;
  7864. }
  7865. IR::Instr *
  7866. GlobOpt::TypeSpecialization(
  7867. IR::Instr *instr,
  7868. Value **pSrc1Val,
  7869. Value **pSrc2Val,
  7870. Value **pDstVal,
  7871. bool *redoTypeSpecRef,
  7872. bool *const forceInvariantHoistingRef)
  7873. {
  7874. Value *&src1Val = *pSrc1Val;
  7875. Value *&src2Val = *pSrc2Val;
  7876. *redoTypeSpecRef = false;
  7877. Assert(!*forceInvariantHoistingRef);
  7878. this->ignoredIntOverflowForCurrentInstr = false;
  7879. this->ignoredNegativeZeroForCurrentInstr = false;
  7880. // - Int32 values that can't be tagged are created as float constant values instead because a JavascriptNumber var is needed
  7881. // for that value at runtime. For the purposes of type specialization, recover the int32 values so that they will be
  7882. // treated as ints.
  7883. // - If int overflow does not matter for the instruction, we can additionally treat uint32 values as int32 values because
  7884. // the value resulting from the operation will eventually be converted to int32 anyway
  7885. Value *const src1OriginalVal = src1Val;
  7886. Value *const src2OriginalVal = src2Val;
  7887. #ifdef ENABLE_SIMDJS
  7888. // SIMD_JS
  7889. if (TypeSpecializeSimd128(instr, pSrc1Val, pSrc2Val, pDstVal))
  7890. {
  7891. return instr;
  7892. }
  7893. #endif
  7894. if(!instr->ShouldCheckForIntOverflow())
  7895. {
  7896. if(src1Val && src1Val->GetValueInfo()->IsFloatConstant())
  7897. {
  7898. int32 int32Value;
  7899. bool isInt32;
  7900. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  7901. src1Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  7902. &int32Value,
  7903. &isInt32))
  7904. {
  7905. src1Val = GetIntConstantValue(int32Value, instr);
  7906. if(!isInt32)
  7907. {
  7908. this->ignoredIntOverflowForCurrentInstr = true;
  7909. }
  7910. }
  7911. }
  7912. if(src2Val && src2Val->GetValueInfo()->IsFloatConstant())
  7913. {
  7914. int32 int32Value;
  7915. bool isInt32;
  7916. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  7917. src2Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  7918. &int32Value,
  7919. &isInt32))
  7920. {
  7921. src2Val = GetIntConstantValue(int32Value, instr);
  7922. if(!isInt32)
  7923. {
  7924. this->ignoredIntOverflowForCurrentInstr = true;
  7925. }
  7926. }
  7927. }
  7928. }
  7929. const AutoRestoreVal autoRestoreSrc1Val(src1OriginalVal, &src1Val);
  7930. const AutoRestoreVal autoRestoreSrc2Val(src2OriginalVal, &src2Val);
  7931. if (src1Val && instr->GetSrc2() == nullptr)
  7932. {
  7933. // Unary
  7934. // Note make sure that native array StElemI gets to TypeSpecializeStElem. Do this for typed arrays, too?
  7935. int32 intConstantValue;
  7936. if (!this->IsLoopPrePass() &&
  7937. !instr->IsBranchInstr() &&
  7938. src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) &&
  7939. !(
  7940. // Nothing to fold for element stores. Go into type specialization to see if they can at least be specialized.
  7941. instr->m_opcode == Js::OpCode::StElemI_A ||
  7942. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  7943. instr->m_opcode == Js::OpCode::StElemC ||
  7944. instr->m_opcode == Js::OpCode::MultiBr ||
  7945. instr->m_opcode == Js::OpCode::InlineArrayPop
  7946. ))
  7947. {
  7948. if (OptConstFoldUnary(&instr, intConstantValue, src1Val == src1OriginalVal, pDstVal))
  7949. {
  7950. return instr;
  7951. }
  7952. }
  7953. else if (this->TypeSpecializeUnary(
  7954. &instr,
  7955. &src1Val,
  7956. pDstVal,
  7957. src1OriginalVal,
  7958. redoTypeSpecRef,
  7959. forceInvariantHoistingRef))
  7960. {
  7961. return instr;
  7962. }
  7963. else if(*redoTypeSpecRef)
  7964. {
  7965. return instr;
  7966. }
  7967. }
  7968. else if (instr->GetSrc2() && !instr->IsBranchInstr())
  7969. {
  7970. // Binary
  7971. if (!this->IsLoopPrePass())
  7972. {
  7973. // OptConstFoldBinary doesn't do type spec, so only deal with things we are sure are int (IntConstant and IntRange)
  7974. // and not just likely ints TypeSpecializeBinary will deal with type specializing them and fold them again
  7975. IntConstantBounds src1IntConstantBounds, src2IntConstantBounds;
  7976. if (src1Val && src1Val->GetValueInfo()->TryGetIntConstantBounds(&src1IntConstantBounds))
  7977. {
  7978. if (src2Val && src2Val->GetValueInfo()->TryGetIntConstantBounds(&src2IntConstantBounds))
  7979. {
  7980. if (this->OptConstFoldBinary(&instr, src1IntConstantBounds, src2IntConstantBounds, pDstVal))
  7981. {
  7982. return instr;
  7983. }
  7984. }
  7985. }
  7986. }
  7987. }
  7988. if (instr->GetSrc2() && this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2OriginalVal, redoTypeSpecRef))
  7989. {
  7990. if (!this->IsLoopPrePass() &&
  7991. instr->m_opcode != Js::OpCode::Nop &&
  7992. instr->m_opcode != Js::OpCode::Br && // We may have const fold a branch
  7993. // Cannot const-peep if the result of the operation is required for a bailout check
  7994. !(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnResultConditions))
  7995. {
  7996. if (src1Val && src1Val->GetValueInfo()->HasIntConstantValue())
  7997. {
  7998. if (this->OptConstPeep(instr, instr->GetSrc1(), pDstVal, src1Val->GetValueInfo()))
  7999. {
  8000. return instr;
  8001. }
  8002. }
  8003. else if (src2Val && src2Val->GetValueInfo()->HasIntConstantValue())
  8004. {
  8005. if (this->OptConstPeep(instr, instr->GetSrc2(), pDstVal, src2Val->GetValueInfo()))
  8006. {
  8007. return instr;
  8008. }
  8009. }
  8010. }
  8011. return instr;
  8012. }
  8013. else if(*redoTypeSpecRef)
  8014. {
  8015. return instr;
  8016. }
  8017. if (instr->IsBranchInstr() && !this->IsLoopPrePass())
  8018. {
  8019. if (this->OptConstFoldBranch(instr, src1Val, src2Val, pDstVal))
  8020. {
  8021. return instr;
  8022. }
  8023. }
  8024. // We didn't type specialize, make sure the srcs are unspecialized
  8025. IR::Opnd *src1 = instr->GetSrc1();
  8026. if (src1)
  8027. {
  8028. instr = this->ToVarUses(instr, src1, false, src1Val);
  8029. IR::Opnd *src2 = instr->GetSrc2();
  8030. if (src2)
  8031. {
  8032. instr = this->ToVarUses(instr, src2, false, src2Val);
  8033. }
  8034. }
  8035. IR::Opnd *dst = instr->GetDst();
  8036. if (dst)
  8037. {
  8038. instr = this->ToVarUses(instr, dst, true, nullptr);
  8039. // Handling for instructions other than built-ins that may require only dst type specialization
  8040. // should be added here.
  8041. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode) && !GetIsAsmJSFunc()) // don't need to do typespec for asmjs
  8042. {
  8043. this->TypeSpecializeInlineBuiltInDst(&instr, pDstVal);
  8044. return instr;
  8045. }
  8046. // Clear the int specialized bit on the dst.
  8047. if (dst->IsRegOpnd())
  8048. {
  8049. IR::RegOpnd *dstRegOpnd = dst->AsRegOpnd();
  8050. if (!dstRegOpnd->m_sym->IsTypeSpec())
  8051. {
  8052. this->ToVarRegOpnd(dstRegOpnd, this->currentBlock);
  8053. }
  8054. else if (dstRegOpnd->m_sym->IsInt32())
  8055. {
  8056. this->ToInt32Dst(instr, dstRegOpnd, this->currentBlock);
  8057. }
  8058. else if (dstRegOpnd->m_sym->IsUInt32() && GetIsAsmJSFunc())
  8059. {
  8060. this->ToUInt32Dst(instr, dstRegOpnd, this->currentBlock);
  8061. }
  8062. else if (dstRegOpnd->m_sym->IsFloat64())
  8063. {
  8064. this->ToFloat64Dst(instr, dstRegOpnd, this->currentBlock);
  8065. }
  8066. }
  8067. else if (dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym())
  8068. {
  8069. this->ToVarStackSym(dst->AsSymOpnd()->m_sym->AsStackSym(), this->currentBlock);
  8070. }
  8071. }
  8072. return instr;
  8073. }
  8074. bool
  8075. GlobOpt::OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *valuInfo)
  8076. {
  8077. int32 value;
  8078. IR::Opnd *src;
  8079. IR::Opnd *nonConstSrc = (constSrc == instr->GetSrc1() ? instr->GetSrc2() : instr->GetSrc1());
  8080. // Try to find the value from value info first
  8081. if (valuInfo->TryGetIntConstantValue(&value))
  8082. {
  8083. }
  8084. else if (constSrc->IsAddrOpnd())
  8085. {
  8086. IR::AddrOpnd *addrOpnd = constSrc->AsAddrOpnd();
  8087. #ifdef _M_X64
  8088. Assert(addrOpnd->IsVar() || Math::FitsInDWord((size_t)addrOpnd->m_address));
  8089. #else
  8090. Assert(sizeof(value) == sizeof(addrOpnd->m_address));
  8091. #endif
  8092. if (addrOpnd->IsVar())
  8093. {
  8094. value = Js::TaggedInt::ToInt32(addrOpnd->m_address);
  8095. }
  8096. else
  8097. {
  8098. // We asserted that the address will fit in a DWORD above
  8099. value = ::Math::PointerCastToIntegral<int32>(constSrc->AsAddrOpnd()->m_address);
  8100. }
  8101. }
  8102. else if (constSrc->IsIntConstOpnd())
  8103. {
  8104. value = constSrc->AsIntConstOpnd()->AsInt32();
  8105. }
  8106. else
  8107. {
  8108. return false;
  8109. }
  8110. switch(instr->m_opcode)
  8111. {
  8112. // Can't do all Add_A because of string concats.
  8113. // Sub_A cannot be transformed to a NEG_A because 0 - 0 != -0
  8114. case Js::OpCode::Add_A:
  8115. src = nonConstSrc;
  8116. if (!src->GetValueType().IsInt())
  8117. {
  8118. // 0 + -0 != -0
  8119. // "Foo" + 0 != "Foo
  8120. return false;
  8121. }
  8122. // fall-through
  8123. case Js::OpCode::Add_Ptr:
  8124. case Js::OpCode::Add_I4:
  8125. if (value != 0)
  8126. {
  8127. return false;
  8128. }
  8129. if (constSrc == instr->GetSrc1())
  8130. {
  8131. src = instr->GetSrc2();
  8132. }
  8133. else
  8134. {
  8135. src = instr->GetSrc1();
  8136. }
  8137. break;
  8138. case Js::OpCode::Mul_A:
  8139. case Js::OpCode::Mul_I4:
  8140. if (value == 0)
  8141. {
  8142. // -0 * 0 != 0
  8143. return false;
  8144. }
  8145. else if (value == 1)
  8146. {
  8147. src = nonConstSrc;
  8148. }
  8149. else
  8150. {
  8151. return false;
  8152. }
  8153. break;
  8154. case Js::OpCode::Div_A:
  8155. if (value == 1 && constSrc == instr->GetSrc2())
  8156. {
  8157. src = instr->GetSrc1();
  8158. }
  8159. else
  8160. {
  8161. return false;
  8162. }
  8163. break;
  8164. case Js::OpCode::Or_I4:
  8165. if (value == -1)
  8166. {
  8167. src = constSrc;
  8168. }
  8169. else if (value == 0)
  8170. {
  8171. src = nonConstSrc;
  8172. }
  8173. else
  8174. {
  8175. return false;
  8176. }
  8177. break;
  8178. case Js::OpCode::And_I4:
  8179. if (value == -1)
  8180. {
  8181. src = nonConstSrc;
  8182. }
  8183. else if (value == 0)
  8184. {
  8185. src = constSrc;
  8186. }
  8187. else
  8188. {
  8189. return false;
  8190. }
  8191. break;
  8192. case Js::OpCode::Shl_I4:
  8193. case Js::OpCode::ShrU_I4:
  8194. case Js::OpCode::Shr_I4:
  8195. if (value != 0 || constSrc != instr->GetSrc2())
  8196. {
  8197. return false;
  8198. }
  8199. src = instr->GetSrc1();
  8200. break;
  8201. default:
  8202. return false;
  8203. }
  8204. this->CaptureByteCodeSymUses(instr);
  8205. if (src == instr->GetSrc1())
  8206. {
  8207. instr->FreeSrc2();
  8208. }
  8209. else
  8210. {
  8211. Assert(src == instr->GetSrc2());
  8212. instr->ReplaceSrc1(instr->UnlinkSrc2());
  8213. }
  8214. instr->m_opcode = Js::OpCode::Ld_A;
  8215. return true;
  8216. }
  8217. Js::Var // TODO: michhol OOP JIT, shouldn't play with Vars
  8218. GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
  8219. {
  8220. ValueInfo *valueInfo = val->GetValueInfo();
  8221. if (valueInfo->IsVarConstant() && valueInfo->IsPrimitive())
  8222. {
  8223. return valueInfo->AsVarConstant()->VarValue();
  8224. }
  8225. if (opnd->IsAddrOpnd())
  8226. {
  8227. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  8228. if (addrOpnd->IsVar())
  8229. {
  8230. return addrOpnd->m_address;
  8231. }
  8232. }
  8233. else if (opnd->IsIntConstOpnd())
  8234. {
  8235. if (!Js::TaggedInt::IsOverflow(opnd->AsIntConstOpnd()->AsInt32()))
  8236. {
  8237. return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
  8238. }
  8239. }
  8240. else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
  8241. {
  8242. if (valueInfo->IsBoolean())
  8243. {
  8244. IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
  8245. if (defInstr->m_opcode != Js::OpCode::Ld_A || !defInstr->GetSrc1()->IsAddrOpnd())
  8246. {
  8247. return nullptr;
  8248. }
  8249. Assert(defInstr->GetSrc1()->AsAddrOpnd()->IsVar());
  8250. return defInstr->GetSrc1()->AsAddrOpnd()->m_address;
  8251. }
  8252. else if (valueInfo->IsUndefined())
  8253. {
  8254. return (Js::Var)this->func->GetScriptContextInfo()->GetUndefinedAddr();
  8255. }
  8256. else if (valueInfo->IsNull())
  8257. {
  8258. return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
  8259. }
  8260. }
  8261. return nullptr;
  8262. }
  8263. bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
  8264. {
  8265. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8266. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8267. return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
  8268. (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
  8269. }
  8270. bool
  8271. GlobOpt::OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal)
  8272. {
  8273. if (!src1Val)
  8274. {
  8275. return false;
  8276. }
  8277. Js::Var src1Var = this->GetConstantVar(instr->GetSrc1(), src1Val);
  8278. Js::Var src2Var = nullptr;
  8279. if (instr->GetSrc2())
  8280. {
  8281. if (!src2Val)
  8282. {
  8283. return false;
  8284. }
  8285. src2Var = this->GetConstantVar(instr->GetSrc2(), src2Val);
  8286. }
  8287. // Make sure GetConstantVar only returns primitives.
  8288. // TODO: OOP JIT, enabled these asserts
  8289. //Assert(!src1Var || !Js::JavascriptOperators::IsObject(src1Var));
  8290. //Assert(!src2Var || !Js::JavascriptOperators::IsObject(src2Var));
  8291. BOOL result;
  8292. int32 constVal;
  8293. switch (instr->m_opcode)
  8294. {
  8295. case Js::OpCode::BrEq_A:
  8296. case Js::OpCode::BrNotNeq_A:
  8297. if (!src1Var || !src2Var)
  8298. {
  8299. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  8300. {
  8301. result = false;
  8302. }
  8303. else
  8304. {
  8305. return false;
  8306. }
  8307. }
  8308. else
  8309. {
  8310. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8311. {
  8312. // TODO: OOP JIT, const folding
  8313. return false;
  8314. }
  8315. result = Js::JavascriptOperators::Equal(src1Var, src2Var, this->func->GetScriptContext());
  8316. }
  8317. break;
  8318. case Js::OpCode::BrNeq_A:
  8319. case Js::OpCode::BrNotEq_A:
  8320. if (!src1Var || !src2Var)
  8321. {
  8322. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  8323. {
  8324. result = true;
  8325. }
  8326. else
  8327. {
  8328. return false;
  8329. }
  8330. }
  8331. else
  8332. {
  8333. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8334. {
  8335. // TODO: OOP JIT, const folding
  8336. return false;
  8337. }
  8338. result = Js::JavascriptOperators::NotEqual(src1Var, src2Var, this->func->GetScriptContext());
  8339. }
  8340. break;
  8341. case Js::OpCode::BrSrEq_A:
  8342. case Js::OpCode::BrSrNotNeq_A:
  8343. if (!src1Var || !src2Var)
  8344. {
  8345. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8346. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8347. if (
  8348. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  8349. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  8350. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  8351. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  8352. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  8353. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  8354. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  8355. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  8356. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  8357. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  8358. )
  8359. {
  8360. result = false;
  8361. }
  8362. else
  8363. {
  8364. return false;
  8365. }
  8366. }
  8367. else
  8368. {
  8369. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8370. {
  8371. // TODO: OOP JIT, const folding
  8372. return false;
  8373. }
  8374. result = Js::JavascriptOperators::StrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  8375. }
  8376. break;
  8377. case Js::OpCode::BrSrNeq_A:
  8378. case Js::OpCode::BrSrNotEq_A:
  8379. if (!src1Var || !src2Var)
  8380. {
  8381. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  8382. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  8383. if (
  8384. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  8385. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  8386. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  8387. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  8388. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  8389. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  8390. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  8391. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  8392. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  8393. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  8394. )
  8395. {
  8396. result = true;
  8397. }
  8398. else
  8399. {
  8400. return false;
  8401. }
  8402. }
  8403. else
  8404. {
  8405. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8406. {
  8407. // TODO: OOP JIT, const folding
  8408. return false;
  8409. }
  8410. result = Js::JavascriptOperators::NotStrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  8411. }
  8412. break;
  8413. case Js::OpCode::BrFalse_A:
  8414. case Js::OpCode::BrTrue_A:
  8415. {
  8416. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  8417. if(src1ValueInfo->IsNull() || src1ValueInfo->IsUndefined())
  8418. {
  8419. result = instr->m_opcode == Js::OpCode::BrFalse_A;
  8420. break;
  8421. }
  8422. if(src1ValueInfo->IsObject() && src1ValueInfo->GetObjectType() > ObjectType::Object)
  8423. {
  8424. // Specific object types that are tracked are equivalent to 'true'
  8425. result = instr->m_opcode == Js::OpCode::BrTrue_A;
  8426. break;
  8427. }
  8428. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  8429. {
  8430. // TODO: OOP JIT, const folding
  8431. return false;
  8432. }
  8433. if (!src1Var)
  8434. {
  8435. return false;
  8436. }
  8437. result = Js::JavascriptConversion::ToBoolean(src1Var, this->func->GetScriptContext());
  8438. if(instr->m_opcode == Js::OpCode::BrFalse_A)
  8439. {
  8440. result = !result;
  8441. }
  8442. break;
  8443. }
  8444. case Js::OpCode::BrFalse_I4:
  8445. // this path would probably work outside of asm.js, but we should verify that if we ever hit this scenario
  8446. Assert(GetIsAsmJSFunc());
  8447. constVal = 0;
  8448. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&constVal) && constVal != 0)
  8449. {
  8450. instr->FreeSrc1();
  8451. if (instr->GetSrc2())
  8452. {
  8453. instr->FreeSrc2();
  8454. }
  8455. instr->m_opcode = Js::OpCode::Nop;
  8456. return true;
  8457. }
  8458. return false;
  8459. default:
  8460. return false;
  8461. }
  8462. this->OptConstFoldBr(!!result, instr);
  8463. return true;
  8464. }
  8465. bool
  8466. GlobOpt::OptConstFoldUnary(
  8467. IR::Instr * *pInstr,
  8468. const int32 intConstantValue,
  8469. const bool isUsingOriginalSrc1Value,
  8470. Value **pDstVal)
  8471. {
  8472. IR::Instr * &instr = *pInstr;
  8473. int32 value = 0;
  8474. IR::Opnd *constOpnd;
  8475. bool isInt = true;
  8476. bool doSetDstVal = true;
  8477. FloatConstType fValue = 0.0;
  8478. if (!DoConstFold())
  8479. {
  8480. return false;
  8481. }
  8482. if (instr->GetDst() && !instr->GetDst()->IsRegOpnd())
  8483. {
  8484. return false;
  8485. }
  8486. switch(instr->m_opcode)
  8487. {
  8488. case Js::OpCode::Neg_A:
  8489. if (intConstantValue == 0)
  8490. {
  8491. // Could fold to -0.0
  8492. return false;
  8493. }
  8494. if (Int32Math::Neg(intConstantValue, &value))
  8495. {
  8496. return false;
  8497. }
  8498. break;
  8499. case Js::OpCode::Not_A:
  8500. Int32Math::Not(intConstantValue, &value);
  8501. break;
  8502. case Js::OpCode::Ld_A:
  8503. if (instr->HasBailOutInfo())
  8504. {
  8505. //The profile data for switch expr can be string and in GlobOpt we realize it is an int.
  8506. if(instr->GetBailOutKind() == IR::BailOutExpectingString)
  8507. {
  8508. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  8509. }
  8510. Assert(instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  8511. instr->ClearBailOutInfo();
  8512. }
  8513. value = intConstantValue;
  8514. if(isUsingOriginalSrc1Value)
  8515. {
  8516. doSetDstVal = false; // Let OptDst do it by copying src1Val
  8517. }
  8518. break;
  8519. case Js::OpCode::Conv_Num:
  8520. case Js::OpCode::LdC_A_I4:
  8521. value = intConstantValue;
  8522. if(isUsingOriginalSrc1Value)
  8523. {
  8524. doSetDstVal = false; // Let OptDst do it by copying src1Val
  8525. }
  8526. break;
  8527. case Js::OpCode::Incr_A:
  8528. if (Int32Math::Inc(intConstantValue, &value))
  8529. {
  8530. return false;
  8531. }
  8532. break;
  8533. case Js::OpCode::Decr_A:
  8534. if (Int32Math::Dec(intConstantValue, &value))
  8535. {
  8536. return false;
  8537. }
  8538. break;
  8539. case Js::OpCode::InlineMathAcos:
  8540. fValue = Js::Math::Acos((double)intConstantValue);
  8541. isInt = false;
  8542. break;
  8543. case Js::OpCode::InlineMathAsin:
  8544. fValue = Js::Math::Asin((double)intConstantValue);
  8545. isInt = false;
  8546. break;
  8547. case Js::OpCode::InlineMathAtan:
  8548. fValue = Js::Math::Atan((double)intConstantValue);
  8549. isInt = false;
  8550. break;
  8551. case Js::OpCode::InlineMathCos:
  8552. fValue = Js::Math::Cos((double)intConstantValue);
  8553. isInt = false;
  8554. break;
  8555. case Js::OpCode::InlineMathExp:
  8556. fValue = Js::Math::Exp((double)intConstantValue);
  8557. isInt = false;
  8558. break;
  8559. case Js::OpCode::InlineMathLog:
  8560. fValue = Js::Math::Log((double)intConstantValue);
  8561. isInt = false;
  8562. break;
  8563. case Js::OpCode::InlineMathSin:
  8564. fValue = Js::Math::Sin((double)intConstantValue);
  8565. isInt = false;
  8566. break;
  8567. case Js::OpCode::InlineMathSqrt:
  8568. fValue = ::sqrt((double)intConstantValue);
  8569. isInt = false;
  8570. break;
  8571. case Js::OpCode::InlineMathTan:
  8572. fValue = ::tan((double)intConstantValue);
  8573. isInt = false;
  8574. break;
  8575. case Js::OpCode::InlineMathFround:
  8576. fValue = (double) (float) intConstantValue;
  8577. isInt = false;
  8578. break;
  8579. case Js::OpCode::InlineMathAbs:
  8580. if (intConstantValue == INT32_MIN)
  8581. {
  8582. if (instr->GetDst()->IsInt32())
  8583. {
  8584. // if dst is an int (e.g. in asm.js), we should coerce it, not convert to float
  8585. value = static_cast<int32>(2147483648U);
  8586. }
  8587. else
  8588. {
  8589. // Rejit with AggressiveIntTypeSpecDisabled for Math.abs(INT32_MIN) because it causes dst
  8590. // to be float type which could be different with previous type spec result in LoopPrePass
  8591. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  8592. }
  8593. }
  8594. else
  8595. {
  8596. value = ::abs(intConstantValue);
  8597. }
  8598. break;
  8599. case Js::OpCode::InlineMathClz:
  8600. DWORD clz;
  8601. if (_BitScanReverse(&clz, intConstantValue))
  8602. {
  8603. value = 31 - clz;
  8604. }
  8605. else
  8606. {
  8607. value = 32;
  8608. }
  8609. instr->ClearBailOutInfo();
  8610. break;
  8611. case Js::OpCode::Ctz:
  8612. Assert(func->GetJITFunctionBody()->IsWasmFunction());
  8613. Assert(!instr->HasBailOutInfo());
  8614. DWORD ctz;
  8615. if (_BitScanForward(&ctz, intConstantValue))
  8616. {
  8617. value = ctz;
  8618. }
  8619. else
  8620. {
  8621. value = 32;
  8622. }
  8623. break;
  8624. case Js::OpCode::InlineMathFloor:
  8625. value = intConstantValue;
  8626. instr->ClearBailOutInfo();
  8627. break;
  8628. case Js::OpCode::InlineMathCeil:
  8629. value = intConstantValue;
  8630. instr->ClearBailOutInfo();
  8631. break;
  8632. case Js::OpCode::InlineMathRound:
  8633. value = intConstantValue;
  8634. instr->ClearBailOutInfo();
  8635. break;
  8636. case Js::OpCode::ToVar:
  8637. if (Js::TaggedInt::IsOverflow(intConstantValue))
  8638. {
  8639. return false;
  8640. }
  8641. else
  8642. {
  8643. value = intConstantValue;
  8644. instr->ClearBailOutInfo();
  8645. break;
  8646. }
  8647. default:
  8648. return false;
  8649. }
  8650. this->CaptureByteCodeSymUses(instr);
  8651. Assert(!instr->HasBailOutInfo()); // If we are, in fact, successful in constant folding the instruction, there is no point in having the bailoutinfo around anymore.
  8652. // Make sure that it is cleared if it was initially present.
  8653. if (!isInt)
  8654. {
  8655. value = (int32)fValue;
  8656. if (fValue == (double)value)
  8657. {
  8658. isInt = true;
  8659. }
  8660. }
  8661. if (isInt)
  8662. {
  8663. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  8664. GOPT_TRACE(_u("Constant folding to %d\n"), value);
  8665. }
  8666. else
  8667. {
  8668. constOpnd = IR::FloatConstOpnd::New(fValue, TyFloat64, instr->m_func);
  8669. GOPT_TRACE(_u("Constant folding to %f\n"), fValue);
  8670. }
  8671. instr->ReplaceSrc1(constOpnd);
  8672. this->OptSrc(constOpnd, &instr);
  8673. IR::Opnd *dst = instr->GetDst();
  8674. Assert(dst->IsRegOpnd());
  8675. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  8676. if (isInt)
  8677. {
  8678. if (dstSym->IsSingleDef())
  8679. {
  8680. dstSym->SetIsIntConst(value);
  8681. }
  8682. if (doSetDstVal)
  8683. {
  8684. *pDstVal = GetIntConstantValue(value, instr, dst);
  8685. }
  8686. if (IsTypeSpecPhaseOff(this->func))
  8687. {
  8688. instr->m_opcode = Js::OpCode::LdC_A_I4;
  8689. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  8690. }
  8691. else
  8692. {
  8693. instr->m_opcode = Js::OpCode::Ld_I4;
  8694. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  8695. StackSym * currDstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  8696. if (currDstSym->IsSingleDef())
  8697. {
  8698. currDstSym->SetIsIntConst(value);
  8699. }
  8700. }
  8701. }
  8702. else
  8703. {
  8704. *pDstVal = NewFloatConstantValue(fValue, dst);
  8705. if (IsTypeSpecPhaseOff(this->func))
  8706. {
  8707. instr->m_opcode = Js::OpCode::LdC_A_R8;
  8708. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  8709. }
  8710. else
  8711. {
  8712. instr->m_opcode = Js::OpCode::LdC_F8_R8;
  8713. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  8714. }
  8715. }
  8716. return true;
  8717. }
  8718. //------------------------------------------------------------------------------------------------------
  8719. // Type specialization
  8720. //------------------------------------------------------------------------------------------------------
  8721. bool
  8722. GlobOpt::IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val)
  8723. {
  8724. Assert(src);
  8725. Assert(val);
  8726. ValueInfo *valueInfo = val->GetValueInfo();
  8727. Assert(valueInfo->IsLikelyInt());
  8728. // If it is not known that the operand is definitely an int, the operand is not already type-specialized, and it's not live
  8729. // in the loop landing pad (if we're in a loop), it's probably not worth type-specializing this instruction. The common case
  8730. // where type-specializing this would be bad is where the operations are entirely on properties or array elements, where the
  8731. // ratio of FromVars and ToVars to the number of actual operations is high, and the conversions would dominate the time
  8732. // spent. On the other hand, if we're using a function formal parameter more than once, it would probably be worth
  8733. // type-specializing it, hence the IsDead check on the operands.
  8734. return
  8735. valueInfo->IsInt() ||
  8736. valueInfo->HasIntConstantValue(true) ||
  8737. !src->GetIsDead() ||
  8738. !src->IsRegOpnd() ||
  8739. this->IsInt32TypeSpecialized(src->AsRegOpnd()->m_sym, this->currentBlock) ||
  8740. (this->currentBlock->loop && this->IsLive(src->AsRegOpnd()->m_sym, this->currentBlock->loop->landingPad));
  8741. }
  8742. bool
  8743. GlobOpt::IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst)
  8744. {
  8745. Assert(dst);
  8746. const auto sym = dst->AsRegOpnd()->m_sym;
  8747. return
  8748. this->IsInt32TypeSpecialized(sym, this->currentBlock) ||
  8749. (this->currentBlock->loop && this->IsLive(sym, this->currentBlock->loop->landingPad));
  8750. }
  8751. bool
  8752. GlobOpt::IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val)
  8753. {
  8754. Assert(instr);
  8755. const auto src1 = instr->GetSrc1();
  8756. const auto src2 = instr->GetSrc2();
  8757. // In addition to checking each operand and the destination, if for any reason we only have to do a maximum of two
  8758. // conversions instead of the worst-case 3 conversions, it's probably worth specializing.
  8759. if (IsWorthSpecializingToInt32DueToSrc(src1, src1Val) ||
  8760. (src2Val && IsWorthSpecializingToInt32DueToSrc(src2, src2Val)))
  8761. {
  8762. return true;
  8763. }
  8764. IR::Opnd *dst = instr->GetDst();
  8765. if (!dst || IsWorthSpecializingToInt32DueToDst(dst))
  8766. {
  8767. return true;
  8768. }
  8769. if (dst->IsEqual(src1) || (src2Val && (dst->IsEqual(src2) || src1->IsEqual(src2))))
  8770. {
  8771. return true;
  8772. }
  8773. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  8774. // Skip useless Ld_A's
  8775. do
  8776. {
  8777. switch (instrNext->m_opcode)
  8778. {
  8779. case Js::OpCode::Ld_A:
  8780. if (!dst->IsEqual(instrNext->GetSrc1()))
  8781. {
  8782. goto done;
  8783. }
  8784. dst = instrNext->GetDst();
  8785. break;
  8786. case Js::OpCode::LdFld:
  8787. case Js::OpCode::LdRootFld:
  8788. case Js::OpCode::LdRootFldForTypeOf:
  8789. case Js::OpCode::LdFldForTypeOf:
  8790. case Js::OpCode::LdElemI_A:
  8791. case Js::OpCode::ByteCodeUses:
  8792. break;
  8793. default:
  8794. goto done;
  8795. }
  8796. instrNext = instrNext->GetNextRealInstrOrLabel();
  8797. } while (true);
  8798. done:
  8799. // If the next instr could also be type specialized, then it is probably worth it.
  8800. if ((instrNext->GetSrc1() && dst->IsEqual(instrNext->GetSrc1())) || (instrNext->GetSrc2() && dst->IsEqual(instrNext->GetSrc2())))
  8801. {
  8802. switch (instrNext->m_opcode)
  8803. {
  8804. case Js::OpCode::Add_A:
  8805. case Js::OpCode::Sub_A:
  8806. case Js::OpCode::Mul_A:
  8807. case Js::OpCode::Div_A:
  8808. case Js::OpCode::Rem_A:
  8809. case Js::OpCode::Xor_A:
  8810. case Js::OpCode::And_A:
  8811. case Js::OpCode::Or_A:
  8812. case Js::OpCode::Shl_A:
  8813. case Js::OpCode::Shr_A:
  8814. case Js::OpCode::Incr_A:
  8815. case Js::OpCode::Decr_A:
  8816. case Js::OpCode::Neg_A:
  8817. case Js::OpCode::Not_A:
  8818. case Js::OpCode::Conv_Num:
  8819. case Js::OpCode::BrEq_I4:
  8820. case Js::OpCode::BrTrue_I4:
  8821. case Js::OpCode::BrFalse_I4:
  8822. case Js::OpCode::BrGe_I4:
  8823. case Js::OpCode::BrGt_I4:
  8824. case Js::OpCode::BrLt_I4:
  8825. case Js::OpCode::BrLe_I4:
  8826. case Js::OpCode::BrNeq_I4:
  8827. return true;
  8828. }
  8829. }
  8830. return false;
  8831. }
  8832. bool
  8833. GlobOpt::TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal)
  8834. {
  8835. Assert(src1Val->GetValueInfo()->IsNumber());
  8836. if (this->IsLoopPrePass())
  8837. {
  8838. return false;
  8839. }
  8840. switch (instr->m_opcode)
  8841. {
  8842. case Js::OpCode::Conv_Num:
  8843. // Optimize Conv_Num away since we know this is a number
  8844. instr->m_opcode = Js::OpCode::Ld_A;
  8845. return false;
  8846. }
  8847. return false;
  8848. }
  8849. bool
  8850. GlobOpt::TypeSpecializeUnary(
  8851. IR::Instr **pInstr,
  8852. Value **pSrc1Val,
  8853. Value **pDstVal,
  8854. Value *const src1OriginalVal,
  8855. bool *redoTypeSpecRef,
  8856. bool *const forceInvariantHoistingRef)
  8857. {
  8858. Assert(pSrc1Val);
  8859. Value *&src1Val = *pSrc1Val;
  8860. Assert(src1Val);
  8861. // We don't need to do typespec for asmjs
  8862. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  8863. {
  8864. return false;
  8865. }
  8866. IR::Instr *&instr = *pInstr;
  8867. int32 min, max;
  8868. // Inline built-ins explicitly specify how srcs/dst must be specialized.
  8869. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  8870. {
  8871. TypeSpecializeInlineBuiltInUnary(pInstr, &src1Val, pDstVal, src1OriginalVal, redoTypeSpecRef);
  8872. return true;
  8873. }
  8874. // Consider: If type spec wasn't completely done, make sure that we don't type-spec the dst 2nd time.
  8875. if(instr->m_opcode == Js::OpCode::LdLen_A && TypeSpecializeLdLen(&instr, &src1Val, pDstVal, forceInvariantHoistingRef))
  8876. {
  8877. return true;
  8878. }
  8879. if (!src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, this->DoAggressiveIntTypeSpec()))
  8880. {
  8881. src1Val = src1OriginalVal;
  8882. if (src1Val->GetValueInfo()->IsLikelyFloat())
  8883. {
  8884. // Try to type specialize to float
  8885. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  8886. }
  8887. else if (src1Val->GetValueInfo()->IsNumber())
  8888. {
  8889. return TypeSpecializeNumberUnary(instr, src1Val, pDstVal);
  8890. }
  8891. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  8892. }
  8893. return this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, min, max, src1OriginalVal, redoTypeSpecRef);
  8894. }
  8895. // Returns true if the built-in requested type specialization, and no further action needed,
  8896. // otherwise returns false.
  8897. void
  8898. GlobOpt::TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef)
  8899. {
  8900. IR::Instr *&instr = *pInstr;
  8901. Assert(pSrc1Val);
  8902. Value *&src1Val = *pSrc1Val;
  8903. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  8904. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  8905. Assert(builtInId != Js::BuiltinFunction::None);
  8906. // Consider using different bailout for float/int FromVars, so that when the arg cannot be converted to number we don't disable
  8907. // type spec for other parts of the big function but rather just don't inline that built-in instr.
  8908. // E.g. could do that if the value is not likelyInt/likelyFloat.
  8909. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  8910. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_Args) == Js::BuiltInFlags::BIF_TypeSpecUnaryToFloat;
  8911. if (areAllArgsAlwaysFloat)
  8912. {
  8913. // InlineMathAcos, InlineMathAsin, InlineMathAtan, InlineMathCos, InlineMathExp, InlineMathLog, InlineMathSin, InlineMathSqrt, InlineMathTan.
  8914. Assert(this->DoFloatTypeSpec());
  8915. // Type-spec the src.
  8916. src1Val = src1OriginalVal;
  8917. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  8918. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized to float, but something failed during the process.");
  8919. // Type-spec the dst.
  8920. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  8921. }
  8922. else if (instr->m_opcode == Js::OpCode::InlineMathAbs)
  8923. {
  8924. // Consider the case when the value is unknown - because of bailout in abs we may disable type spec for the whole function which is too much.
  8925. // First, try int.
  8926. int minVal, maxVal;
  8927. bool shouldTypeSpecToInt = src1Val->GetValueInfo()->GetIntValMinMax(&minVal, &maxVal, /* doAggressiveIntTypeSpec = */ true);
  8928. if (shouldTypeSpecToInt)
  8929. {
  8930. Assert(this->DoAggressiveIntTypeSpec());
  8931. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, minVal, maxVal, src1OriginalVal, redoTypeSpecRef, true);
  8932. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (int), but something failed during the process.");
  8933. if (!this->IsLoopPrePass())
  8934. {
  8935. // Create bailout for INT_MIN which does not have corresponding int value on the positive side.
  8936. // Check int range: if we know the range is out of overflow, we do not need the bail out at all.
  8937. if (minVal == INT32_MIN)
  8938. {
  8939. GenerateBailAtOperation(&instr, IR::BailOnIntMin);
  8940. }
  8941. }
  8942. // Account for ::abs(INT_MIN) == INT_MIN (which is less than 0).
  8943. maxVal = ::max(
  8944. ::abs(Int32Math::NearestInRangeTo(minVal, INT_MIN + 1, INT_MAX)),
  8945. ::abs(Int32Math::NearestInRangeTo(maxVal, INT_MIN + 1, INT_MAX)));
  8946. minVal = minVal >= 0 ? minVal : 0;
  8947. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, nullptr, IR::BailOutInvalid, minVal, maxVal, pDstVal);
  8948. }
  8949. else
  8950. {
  8951. // If we couldn't do int, do float.
  8952. Assert(this->DoFloatTypeSpec());
  8953. src1Val = src1OriginalVal;
  8954. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, true);
  8955. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (float), but something failed during the process.");
  8956. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  8957. }
  8958. }
  8959. else if (instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::InlineMathRound)
  8960. {
  8961. // Type specialize src to float
  8962. src1Val = src1OriginalVal;
  8963. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  8964. AssertMsg(retVal, "For inline Math.floor and Math.ceil the src has to be type-specialized to float, but something failed during the process.");
  8965. // Type specialize dst to int
  8966. this->TypeSpecializeIntDst(
  8967. instr,
  8968. instr->m_opcode,
  8969. nullptr,
  8970. src1Val,
  8971. nullptr,
  8972. IR::BailOutInvalid,
  8973. INT32_MIN,
  8974. INT32_MAX,
  8975. pDstVal);
  8976. }
  8977. else if(instr->m_opcode == Js::OpCode::InlineArrayPop)
  8978. {
  8979. IR::Opnd *const thisOpnd = instr->GetSrc1();
  8980. Assert(thisOpnd);
  8981. // Ensure src1 (Array) is a var
  8982. this->ToVarUses(instr, thisOpnd, false, src1Val);
  8983. if(!this->IsLoopPrePass() && thisOpnd->GetValueType().IsLikelyNativeArray())
  8984. {
  8985. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during the run time.
  8986. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  8987. }
  8988. if(!instr->GetDst())
  8989. {
  8990. return;
  8991. }
  8992. // Try Type Specializing the element (return item from Pop) based on the array's profile data.
  8993. if(thisOpnd->GetValueType().IsLikelyNativeIntArray())
  8994. {
  8995. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  8996. }
  8997. else if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  8998. {
  8999. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  9000. }
  9001. else
  9002. {
  9003. // We reached here so the Element is not yet type specialized. Ensure element is a var
  9004. if(instr->GetDst()->IsRegOpnd())
  9005. {
  9006. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  9007. }
  9008. }
  9009. }
  9010. else if (instr->m_opcode == Js::OpCode::InlineMathClz)
  9011. {
  9012. Assert(this->DoAggressiveIntTypeSpec());
  9013. Assert(this->DoLossyIntTypeSpec());
  9014. //Type specialize to int
  9015. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, INT32_MIN, INT32_MAX, src1OriginalVal, redoTypeSpecRef);
  9016. AssertMsg(retVal, "For clz32, the arg has to be type-specialized to int.");
  9017. }
  9018. else
  9019. {
  9020. AssertMsg(FALSE, "Unsupported built-in!");
  9021. }
  9022. }
  9023. void
  9024. GlobOpt::TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal)
  9025. {
  9026. IR::Instr *&instr = *pInstr;
  9027. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9028. switch(instr->m_opcode)
  9029. {
  9030. case Js::OpCode::InlineMathAtan2:
  9031. {
  9032. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  9033. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  9034. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
  9035. Assert(areAllArgsAlwaysFloat);
  9036. Assert(this->DoFloatTypeSpec());
  9037. // Type-spec the src1, src2 and dst.
  9038. src1Val = src1OriginalVal;
  9039. src2Val = src2OriginalVal;
  9040. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9041. AssertMsg(retVal, "For pow and atnan2 the args have to be type-specialized to float, but something failed during the process.");
  9042. break;
  9043. }
  9044. case Js::OpCode::InlineMathPow:
  9045. {
  9046. #ifndef _M_ARM
  9047. if (src2Val->GetValueInfo()->IsLikelyInt())
  9048. {
  9049. bool lossy = false;
  9050. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, lossy);
  9051. IR::Opnd* src1 = instr->GetSrc1();
  9052. int32 valueMin, valueMax;
  9053. if (src1Val->GetValueInfo()->IsLikelyInt() &&
  9054. this->DoPowIntIntTypeSpec() &&
  9055. src2Val->GetValueInfo()->GetIntValMinMax(&valueMin, &valueMax, this->DoAggressiveIntTypeSpec()) &&
  9056. valueMin >= 0)
  9057. {
  9058. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  9059. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  9060. if(!this->IsLoopPrePass())
  9061. {
  9062. GenerateBailAtOperation(&instr, IR::BailOutOnPowIntIntOverflow);
  9063. }
  9064. }
  9065. else
  9066. {
  9067. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  9068. TypeSpecializeFloatDst(instr, nullptr, src1Val, src2Val, pDstVal);
  9069. }
  9070. }
  9071. else
  9072. {
  9073. #endif
  9074. this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9075. #ifndef _M_ARM
  9076. }
  9077. #endif
  9078. break;
  9079. }
  9080. case Js::OpCode::InlineMathImul:
  9081. {
  9082. Assert(this->DoAggressiveIntTypeSpec());
  9083. Assert(this->DoLossyIntTypeSpec());
  9084. //Type specialize to int
  9085. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, false /* skipDst */);
  9086. AssertMsg(retVal, "For imul, the args have to be type-specialized to int but something failed during the process.");
  9087. break;
  9088. }
  9089. case Js::OpCode::InlineMathMin:
  9090. case Js::OpCode::InlineMathMax:
  9091. {
  9092. if(src1Val->GetValueInfo()->IsLikelyInt() && src2Val->GetValueInfo()->IsLikelyInt())
  9093. {
  9094. // Compute resulting range info
  9095. int32 min1, max1, min2, max2, newMin, newMax;
  9096. Assert(this->DoAggressiveIntTypeSpec());
  9097. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  9098. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  9099. if (instr->m_opcode == Js::OpCode::InlineMathMin)
  9100. {
  9101. newMin = min(min1, min2);
  9102. newMax = min(max1, max2);
  9103. }
  9104. else
  9105. {
  9106. Assert(instr->m_opcode == Js::OpCode::InlineMathMax);
  9107. newMin = max(min1, min2);
  9108. newMax = max(max1, max2);
  9109. }
  9110. // Type specialize to int
  9111. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, newMin, newMax, false /* skipDst */);
  9112. AssertMsg(retVal, "For min and max, the args have to be type-specialized to int if any one of the sources is an int, but something failed during the process.");
  9113. }
  9114. // Couldn't type specialize to int, type specialize to float
  9115. else
  9116. {
  9117. Assert(this->DoFloatTypeSpec());
  9118. src1Val = src1OriginalVal;
  9119. src2Val = src2OriginalVal;
  9120. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  9121. AssertMsg(retVal, "For min and max, the args have to be type-specialized to float if any one of the sources is a float, but something failed during the process.");
  9122. }
  9123. break;
  9124. }
  9125. case Js::OpCode::InlineArrayPush:
  9126. {
  9127. IR::Opnd *const thisOpnd = instr->GetSrc1();
  9128. Assert(thisOpnd);
  9129. if(instr->GetDst() && instr->GetDst()->IsRegOpnd())
  9130. {
  9131. // Set the dst as live here, as the built-ins return early from the TypeSpecialization functions - before the dst is marked as live.
  9132. // Also, we are not specializing the dst separately and we are skipping the dst to be handled when we specialize the instruction above.
  9133. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  9134. }
  9135. // Ensure src1 (Array) is a var
  9136. this->ToVarUses(instr, thisOpnd, false, src1Val);
  9137. if(!this->IsLoopPrePass())
  9138. {
  9139. if(thisOpnd->GetValueType().IsLikelyNativeArray())
  9140. {
  9141. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during run time.
  9142. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  9143. }
  9144. else
  9145. {
  9146. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  9147. }
  9148. }
  9149. // Try Type Specializing the element based on the array's profile data.
  9150. if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  9151. {
  9152. src1Val = src1OriginalVal;
  9153. src2Val = src2OriginalVal;
  9154. }
  9155. if((thisOpnd->GetValueType().IsLikelyNativeIntArray() && this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, true))
  9156. || (thisOpnd->GetValueType().IsLikelyNativeFloatArray() && this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal)))
  9157. {
  9158. break;
  9159. }
  9160. // The Element is not yet type specialized. Ensure element is a var
  9161. this->ToVarUses(instr, instr->GetSrc2(), false, src2Val);
  9162. break;
  9163. }
  9164. }
  9165. }
  9166. void
  9167. GlobOpt::TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal)
  9168. {
  9169. IR::Instr *&instr = *pInstr;
  9170. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9171. if (instr->m_opcode == Js::OpCode::InlineMathRandom)
  9172. {
  9173. Assert(this->DoFloatTypeSpec());
  9174. // Type specialize dst to float
  9175. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  9176. }
  9177. }
  9178. bool
  9179. GlobOpt::TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal)
  9180. {
  9181. // It has been determined that this instruction cannot be int-specialized. We need to determine whether to attempt to
  9182. // float-specialize the instruction, or leave it unspecialized.
  9183. #if !INT32VAR
  9184. Value*& src1Val = *pSrc1Val;
  9185. if(src1Val->GetValueInfo()->IsLikelyUntaggedInt())
  9186. {
  9187. // An input range is completely outside the range of an int31. Even if the operation may overflow, it is
  9188. // unlikely to overflow on these operations, so we leave it unspecialized on 64-bit platforms. However, on
  9189. // 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is significantly slower to
  9190. // use in an unspecialized operation compared to a tagged int. So, try to float-specialize the instruction.
  9191. src1Val = src1OriginalVal;
  9192. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  9193. }
  9194. #endif
  9195. return false;
  9196. }
  9197. bool
  9198. GlobOpt::TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst /* = false */)
  9199. {
  9200. // Consider moving the code for int type spec-ing binary functions here.
  9201. IR::Instr *&instr = *pInstr;
  9202. bool lossy = false;
  9203. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9204. {
  9205. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  9206. {
  9207. int32 intConstantValue;
  9208. bool isIntConstMissingItem = src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue);
  9209. if(isIntConstMissingItem)
  9210. {
  9211. isIntConstMissingItem = Js::SparseArraySegment<int>::IsMissingItem(&intConstantValue);
  9212. }
  9213. // Don't specialize if the element is not likelyInt or an IntConst which is a missing item value.
  9214. if(!(src2Val->GetValueInfo()->IsLikelyInt()) || isIntConstMissingItem)
  9215. {
  9216. return false;
  9217. }
  9218. // We don't want to specialize both the source operands, though it is a binary instr.
  9219. IR::Opnd * elementOpnd = instr->GetSrc2();
  9220. this->ToInt32(instr, elementOpnd, this->currentBlock, src2Val, nullptr, lossy);
  9221. }
  9222. else
  9223. {
  9224. IR::Opnd *src1 = instr->GetSrc1();
  9225. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  9226. IR::Opnd *src2 = instr->GetSrc2();
  9227. this->ToInt32(instr, src2, this->currentBlock, src2Val, nullptr, lossy);
  9228. }
  9229. if(!skipDst)
  9230. {
  9231. IR::Opnd *dst = instr->GetDst();
  9232. if (dst)
  9233. {
  9234. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, min, max, pDstVal);
  9235. }
  9236. }
  9237. return true;
  9238. }
  9239. else
  9240. {
  9241. AssertMsg(false, "Yet to move code for other binary functions here");
  9242. return false;
  9243. }
  9244. }
  9245. bool
  9246. GlobOpt::TypeSpecializeIntUnary(
  9247. IR::Instr **pInstr,
  9248. Value **pSrc1Val,
  9249. Value **pDstVal,
  9250. int32 min,
  9251. int32 max,
  9252. Value *const src1OriginalVal,
  9253. bool *redoTypeSpecRef,
  9254. bool skipDst /* = false */)
  9255. {
  9256. IR::Instr *&instr = *pInstr;
  9257. Assert(pSrc1Val);
  9258. Value *&src1Val = *pSrc1Val;
  9259. bool isTransfer = false;
  9260. Js::OpCode opcode;
  9261. int32 newMin, newMax;
  9262. bool lossy = false;
  9263. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  9264. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  9265. bool ignoredNegativeZero = false;
  9266. bool checkTypeSpecWorth = false;
  9267. if(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  9268. {
  9269. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9270. }
  9271. AddSubConstantInfo addSubConstantInfo;
  9272. switch(instr->m_opcode)
  9273. {
  9274. case Js::OpCode::Ld_A:
  9275. if (instr->GetSrc1()->IsRegOpnd())
  9276. {
  9277. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9278. if (this->IsInt32TypeSpecialized(sym, this->currentBlock) == false)
  9279. {
  9280. // Type specializing an Ld_A isn't worth it, unless the src
  9281. // is already type specialized.
  9282. return false;
  9283. }
  9284. }
  9285. newMin = min;
  9286. newMax = max;
  9287. opcode = Js::OpCode::Ld_I4;
  9288. isTransfer = true;
  9289. break;
  9290. case Js::OpCode::Conv_Num:
  9291. newMin = min;
  9292. newMax = max;
  9293. opcode = Js::OpCode::Ld_I4;
  9294. isTransfer = true;
  9295. break;
  9296. case Js::OpCode::LdC_A_I4:
  9297. newMin = newMax = instr->GetSrc1()->AsIntConstOpnd()->AsInt32();
  9298. opcode = Js::OpCode::Ld_I4;
  9299. break;
  9300. case Js::OpCode::Neg_A:
  9301. if (min <= 0 && max >= 0)
  9302. {
  9303. if(instr->ShouldCheckForNegativeZero())
  9304. {
  9305. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  9306. if(!DoAggressiveIntTypeSpec())
  9307. {
  9308. // May result in -0
  9309. // Consider adding a dynamic check for src1 == 0
  9310. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9311. }
  9312. if(min == 0 && max == 0)
  9313. {
  9314. // Always results in -0
  9315. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9316. }
  9317. bailOutKind |= IR::BailOutOnNegativeZero;
  9318. }
  9319. else
  9320. {
  9321. ignoredNegativeZero = true;
  9322. }
  9323. }
  9324. if (Int32Math::Neg(min, &newMax))
  9325. {
  9326. if(instr->ShouldCheckForIntOverflow())
  9327. {
  9328. if(!DoAggressiveIntTypeSpec())
  9329. {
  9330. // May overflow
  9331. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9332. }
  9333. if(min == max)
  9334. {
  9335. // Always overflows
  9336. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9337. }
  9338. bailOutKind |= IR::BailOutOnOverflow;
  9339. newMax = INT32_MAX;
  9340. }
  9341. else
  9342. {
  9343. ignoredIntOverflow = true;
  9344. }
  9345. }
  9346. if (Int32Math::Neg(max, &newMin))
  9347. {
  9348. if(instr->ShouldCheckForIntOverflow())
  9349. {
  9350. if(!DoAggressiveIntTypeSpec())
  9351. {
  9352. // May overflow
  9353. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9354. }
  9355. bailOutKind |= IR::BailOutOnOverflow;
  9356. newMin = INT32_MAX;
  9357. }
  9358. else
  9359. {
  9360. ignoredIntOverflow = true;
  9361. }
  9362. }
  9363. if(!instr->ShouldCheckForIntOverflow() && newMin > newMax)
  9364. {
  9365. // When ignoring overflow, the range needs to account for overflow. Since MIN_INT is the only int32 value that
  9366. // overflows on Neg, and the value resulting from overflow is also MIN_INT, if calculating only the new min or new
  9367. // max overflowed but not both, then the new min will be greater than the new max. In that case we need to consider
  9368. // the full range of int32s as possible resulting values.
  9369. newMin = INT32_MIN;
  9370. newMax = INT32_MAX;
  9371. }
  9372. opcode = Js::OpCode::Neg_I4;
  9373. checkTypeSpecWorth = true;
  9374. break;
  9375. case Js::OpCode::Not_A:
  9376. if(!DoLossyIntTypeSpec())
  9377. {
  9378. return false;
  9379. }
  9380. this->PropagateIntRangeForNot(min, max, &newMin, &newMax);
  9381. opcode = Js::OpCode::Not_I4;
  9382. lossy = true;
  9383. break;
  9384. case Js::OpCode::Incr_A:
  9385. do // while(false)
  9386. {
  9387. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  9388. {
  9389. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  9390. return
  9391. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  9392. src1ValueInfo->IsIntBounded() &&
  9393. src1ValueInfo->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(1);
  9394. };
  9395. if (Int32Math::Inc(min, &newMin))
  9396. {
  9397. if(CannotOverflowBasedOnRelativeBounds())
  9398. {
  9399. newMin = INT32_MAX;
  9400. }
  9401. else if(instr->ShouldCheckForIntOverflow())
  9402. {
  9403. // Always overflows
  9404. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9405. }
  9406. else
  9407. {
  9408. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  9409. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  9410. // we use the full range of int32s.
  9411. ignoredIntOverflow = true;
  9412. newMin = INT32_MIN;
  9413. newMax = INT32_MAX;
  9414. break;
  9415. }
  9416. }
  9417. if (Int32Math::Inc(max, &newMax))
  9418. {
  9419. if(CannotOverflowBasedOnRelativeBounds())
  9420. {
  9421. newMax = INT32_MAX;
  9422. }
  9423. else if(instr->ShouldCheckForIntOverflow())
  9424. {
  9425. if(!DoAggressiveIntTypeSpec())
  9426. {
  9427. // May overflow
  9428. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9429. }
  9430. bailOutKind |= IR::BailOutOnOverflow;
  9431. newMax = INT32_MAX;
  9432. }
  9433. else
  9434. {
  9435. // See comment about ignoring overflow above
  9436. ignoredIntOverflow = true;
  9437. newMin = INT32_MIN;
  9438. newMax = INT32_MAX;
  9439. break;
  9440. }
  9441. }
  9442. } while(false);
  9443. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  9444. {
  9445. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, 1);
  9446. }
  9447. opcode = Js::OpCode::Add_I4;
  9448. if (!this->IsLoopPrePass())
  9449. {
  9450. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  9451. }
  9452. checkTypeSpecWorth = true;
  9453. break;
  9454. case Js::OpCode::Decr_A:
  9455. do // while(false)
  9456. {
  9457. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  9458. {
  9459. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  9460. return
  9461. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  9462. src1ValueInfo->IsIntBounded() &&
  9463. src1ValueInfo->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(1);
  9464. };
  9465. if (Int32Math::Dec(max, &newMax))
  9466. {
  9467. if(CannotOverflowBasedOnRelativeBounds())
  9468. {
  9469. newMax = INT32_MIN;
  9470. }
  9471. else if(instr->ShouldCheckForIntOverflow())
  9472. {
  9473. // Always overflows
  9474. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9475. }
  9476. else
  9477. {
  9478. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  9479. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints, we
  9480. // use the full range of int32s.
  9481. ignoredIntOverflow = true;
  9482. newMin = INT32_MIN;
  9483. newMax = INT32_MAX;
  9484. break;
  9485. }
  9486. }
  9487. if (Int32Math::Dec(min, &newMin))
  9488. {
  9489. if(CannotOverflowBasedOnRelativeBounds())
  9490. {
  9491. newMin = INT32_MIN;
  9492. }
  9493. else if(instr->ShouldCheckForIntOverflow())
  9494. {
  9495. if(!DoAggressiveIntTypeSpec())
  9496. {
  9497. // May overflow
  9498. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  9499. }
  9500. bailOutKind |= IR::BailOutOnOverflow;
  9501. newMin = INT32_MIN;
  9502. }
  9503. else
  9504. {
  9505. // See comment about ignoring overflow above
  9506. ignoredIntOverflow = true;
  9507. newMin = INT32_MIN;
  9508. newMax = INT32_MAX;
  9509. break;
  9510. }
  9511. }
  9512. } while(false);
  9513. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  9514. {
  9515. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, -1);
  9516. }
  9517. opcode = Js::OpCode::Sub_I4;
  9518. if (!this->IsLoopPrePass())
  9519. {
  9520. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  9521. }
  9522. checkTypeSpecWorth = true;
  9523. break;
  9524. case Js::OpCode::BrFalse_A:
  9525. case Js::OpCode::BrTrue_A:
  9526. {
  9527. if(DoConstFold() && !IsLoopPrePass() && TryOptConstFoldBrFalse(instr, src1Val, min, max))
  9528. {
  9529. return true;
  9530. }
  9531. bool specialize = true;
  9532. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  9533. {
  9534. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9535. if (this->IsInt32TypeSpecialized(sym, this->currentBlock) == false)
  9536. {
  9537. // Type specializing a BrTrue_A/BrFalse_A isn't worth it, unless the src
  9538. // is already type specialized
  9539. specialize = false;
  9540. }
  9541. }
  9542. if(instr->m_opcode == Js::OpCode::BrTrue_A)
  9543. {
  9544. UpdateIntBoundsForNotEqualBranch(src1Val, nullptr, 0);
  9545. opcode = Js::OpCode::BrTrue_I4;
  9546. }
  9547. else
  9548. {
  9549. UpdateIntBoundsForEqualBranch(src1Val, nullptr, 0);
  9550. opcode = Js::OpCode::BrFalse_I4;
  9551. }
  9552. if(!specialize)
  9553. {
  9554. return false;
  9555. }
  9556. newMin = 2; newMax = 1; // We'll assert if we make a range where min > max
  9557. break;
  9558. }
  9559. case Js::OpCode::MultiBr:
  9560. newMin = min;
  9561. newMax = max;
  9562. opcode = instr->m_opcode;
  9563. break;
  9564. case Js::OpCode::StElemI_A:
  9565. case Js::OpCode::StElemI_A_Strict:
  9566. case Js::OpCode::StElemC:
  9567. if(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyAnyArrayWithNativeFloatValues())
  9568. {
  9569. src1Val = src1OriginalVal;
  9570. }
  9571. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  9572. case Js::OpCode::NewScArray:
  9573. case Js::OpCode::NewScArrayWithMissingValues:
  9574. case Js::OpCode::InitFld:
  9575. case Js::OpCode::InitRootFld:
  9576. case Js::OpCode::StSlot:
  9577. case Js::OpCode::StSlotChkUndecl:
  9578. #if !FLOATVAR
  9579. case Js::OpCode::StSlotBoxTemp:
  9580. #endif
  9581. case Js::OpCode::StFld:
  9582. case Js::OpCode::StRootFld:
  9583. case Js::OpCode::StFldStrict:
  9584. case Js::OpCode::StRootFldStrict:
  9585. case Js::OpCode::ArgOut_A:
  9586. case Js::OpCode::ArgOut_A_Inline:
  9587. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  9588. case Js::OpCode::ArgOut_A_Dynamic:
  9589. case Js::OpCode::ArgOut_A_FromStackArgs:
  9590. case Js::OpCode::ArgOut_A_SpreadArg:
  9591. // For this one we need to implement type specialization
  9592. //case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9593. case Js::OpCode::Ret:
  9594. case Js::OpCode::LdElemUndef:
  9595. case Js::OpCode::LdElemUndefScoped:
  9596. return false;
  9597. default:
  9598. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9599. {
  9600. newMin = min;
  9601. newMax = max;
  9602. opcode = instr->m_opcode;
  9603. break; // Note: we must keep checkTypeSpecWorth = false to make sure we never return false from this function.
  9604. }
  9605. return false;
  9606. }
  9607. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it (won't
  9608. // leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value needs to be
  9609. // guaranteed to be an int
  9610. if(checkTypeSpecWorth &&
  9611. !ignoredIntOverflow &&
  9612. !ignoredNegativeZero &&
  9613. instr->ShouldCheckForIntOverflow() &&
  9614. !IsWorthSpecializingToInt32(instr, src1Val))
  9615. {
  9616. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  9617. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  9618. // the following cases.
  9619. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to be
  9620. // an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  9621. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  9622. // that case, especially if the dst sym is live on the back-edge.
  9623. if(bailOutKind == IR::BailOutInvalid &&
  9624. instr->GetDst() &&
  9625. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  9626. {
  9627. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, nullptr);
  9628. }
  9629. if(instr->GetSrc2())
  9630. {
  9631. instr->FreeSrc2();
  9632. }
  9633. return false;
  9634. }
  9635. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  9636. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  9637. {
  9638. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  9639. Value *src1IndirIndexVal = nullptr;
  9640. Value *src2Val = nullptr;
  9641. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  9642. {
  9643. *redoTypeSpecRef = true;
  9644. return false;
  9645. }
  9646. }
  9647. const Js::OpCode originalOpCode = instr->m_opcode;
  9648. if (!this->IsLoopPrePass())
  9649. {
  9650. // No re-write on prepass
  9651. instr->m_opcode = opcode;
  9652. }
  9653. Value *src1ValueToSpecialize = src1Val;
  9654. if(lossy)
  9655. {
  9656. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  9657. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  9658. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  9659. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  9660. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  9661. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  9662. src1ValueToSpecialize = src1OriginalVal;
  9663. }
  9664. // Make sure the srcs are specialized
  9665. IR::Opnd *src1 = instr->GetSrc1();
  9666. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, lossy);
  9667. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  9668. {
  9669. GenerateBailAtOperation(&instr, bailOutKind);
  9670. }
  9671. if (!skipDst)
  9672. {
  9673. IR::Opnd *dst = instr->GetDst();
  9674. if (dst)
  9675. {
  9676. AssertMsg(!(isTransfer && !this->IsLoopPrePass()) || min == newMin && max == newMax, "If this is just a copy, old/new min/max should be the same");
  9677. TypeSpecializeIntDst(
  9678. instr,
  9679. originalOpCode,
  9680. isTransfer ? src1Val : nullptr,
  9681. src1Val,
  9682. nullptr,
  9683. bailOutKind,
  9684. newMin,
  9685. newMax,
  9686. pDstVal,
  9687. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  9688. }
  9689. }
  9690. if(bailOutKind == IR::BailOutInvalid)
  9691. {
  9692. GOPT_TRACE(_u("Type specialized to INT\n"));
  9693. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9694. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9695. {
  9696. Output::Print(_u("Type specialized to INT: "));
  9697. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9698. }
  9699. #endif
  9700. }
  9701. else
  9702. {
  9703. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  9704. if(bailOutKind & IR::BailOutOnOverflow)
  9705. {
  9706. GOPT_TRACE(_u(" Overflow\n"));
  9707. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9708. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9709. {
  9710. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  9711. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9712. }
  9713. #endif
  9714. }
  9715. if(bailOutKind & IR::BailOutOnNegativeZero)
  9716. {
  9717. GOPT_TRACE(_u(" Zero\n"));
  9718. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9719. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  9720. {
  9721. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  9722. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9723. }
  9724. #endif
  9725. }
  9726. }
  9727. return true;
  9728. }
  9729. void
  9730. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9731. {
  9732. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, ValueType::GetInt(IntConstantBounds(newMin, newMax).IsLikelyTaggable()), newMin, newMax, pDstVal, addSubConstantInfo);
  9733. }
  9734. void
  9735. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9736. {
  9737. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, valueType, 0, 0, pDstVal, addSubConstantInfo);
  9738. }
  9739. void
  9740. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  9741. {
  9742. Assert(valueType.IsInt() || (valueType.IsNumber() && valueType.IsLikelyInt() && newMin == 0 && newMax == 0));
  9743. Assert(!valToTransfer || valToTransfer == src1Value);
  9744. Assert(!addSubConstantInfo || addSubConstantInfo->HasInfo());
  9745. IR::Opnd *dst = instr->GetDst();
  9746. Assert(dst);
  9747. bool isValueInfoPrecise;
  9748. if(IsLoopPrePass())
  9749. {
  9750. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  9751. }
  9752. else
  9753. {
  9754. isValueInfoPrecise = true;
  9755. }
  9756. // If dst has a circular reference in a loop, it probably won't get specialized. Don't mark the dst as type-specialized on
  9757. // the pre-pass. With aggressive int spec though, it will take care of bailing out if necessary so there's no need to assume
  9758. // that the dst will be a var even if it's live on the back-edge. Also if the op always produces an int32, then there's no
  9759. // ambiguity in the dst's value type even in the prepass.
  9760. if (!DoAggressiveIntTypeSpec() && this->IsLoopPrePass() && !valueType.IsInt())
  9761. {
  9762. if (dst->IsRegOpnd())
  9763. {
  9764. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  9765. }
  9766. return;
  9767. }
  9768. const IntBounds *dstBounds = nullptr;
  9769. if(addSubConstantInfo && !addSubConstantInfo->SrcValueIsLikelyConstant() && DoTrackRelativeIntBounds())
  9770. {
  9771. Assert(!ignoredIntOverflowForCurrentInstr);
  9772. // Track bounds for add or sub with a constant. For instance, consider (b = a + 2). The value of 'b' should track that
  9773. // it is equal to (the value of 'a') + 2. Additionally, the value of 'b' should inherit the bounds of 'a', offset by
  9774. // the constant value.
  9775. if(!valueType.IsInt() || !isValueInfoPrecise)
  9776. {
  9777. newMin = INT32_MIN;
  9778. newMax = INT32_MAX;
  9779. }
  9780. dstBounds =
  9781. IntBounds::Add(
  9782. addSubConstantInfo->SrcValue(),
  9783. addSubConstantInfo->Offset(),
  9784. isValueInfoPrecise,
  9785. IntConstantBounds(newMin, newMax),
  9786. alloc);
  9787. }
  9788. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  9789. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  9790. // pre-pass.
  9791. if (valToTransfer)
  9792. {
  9793. // If this is just a copy, no need for creating a new value.
  9794. Assert(!addSubConstantInfo);
  9795. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  9796. this->InsertNewValue(*pDstVal, dst);
  9797. }
  9798. else if (valueType.IsInt() && isValueInfoPrecise)
  9799. {
  9800. bool wasNegativeZeroPreventedByBailout = false;
  9801. if(newMin <= 0 && newMax >= 0)
  9802. {
  9803. switch(originalOpCode)
  9804. {
  9805. case Js::OpCode::Add_A:
  9806. // -0 + -0 == -0
  9807. Assert(src1Value);
  9808. Assert(src2Value);
  9809. wasNegativeZeroPreventedByBailout =
  9810. src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout() &&
  9811. src2Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  9812. break;
  9813. case Js::OpCode::Sub_A:
  9814. // -0 - 0 == -0
  9815. Assert(src1Value);
  9816. wasNegativeZeroPreventedByBailout = src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  9817. break;
  9818. case Js::OpCode::Neg_A:
  9819. case Js::OpCode::Mul_A:
  9820. case Js::OpCode::Div_A:
  9821. case Js::OpCode::Rem_A:
  9822. wasNegativeZeroPreventedByBailout = !!(bailOutKind & IR::BailOutOnNegativeZero);
  9823. break;
  9824. }
  9825. }
  9826. *pDstVal =
  9827. dstBounds
  9828. ? NewIntBoundedValue(valueType, dstBounds, wasNegativeZeroPreventedByBailout, nullptr)
  9829. : NewIntRangeValue(newMin, newMax, wasNegativeZeroPreventedByBailout, nullptr);
  9830. }
  9831. else
  9832. {
  9833. *pDstVal = dstBounds ? NewIntBoundedValue(valueType, dstBounds, false, nullptr) : NewGenericValue(valueType);
  9834. }
  9835. if(addSubConstantInfo || updateInductionVariableValueNumber)
  9836. {
  9837. TrackIntSpecializedAddSubConstant(instr, addSubConstantInfo, *pDstVal, !!dstBounds);
  9838. }
  9839. SetValue(&blockData, *pDstVal, dst);
  9840. AssertMsg(dst->IsRegOpnd(), "What else?");
  9841. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9842. }
  9843. bool
  9844. GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef)
  9845. {
  9846. IR::Instr *&instr = *pInstr;
  9847. int32 min1 = INT32_MIN, max1 = INT32_MAX, min2 = INT32_MIN, max2 = INT32_MAX, newMin, newMax, tmp;
  9848. Js::OpCode opcode;
  9849. IR::Opnd *src1, *src2;
  9850. Value *&src1Val = *pSrc1Val;
  9851. Value *&src2Val = *pSrc2Val;
  9852. // We don't need to do typespec for asmjs
  9853. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  9854. {
  9855. return false;
  9856. }
  9857. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9858. {
  9859. this->TypeSpecializeInlineBuiltInBinary(pInstr, src1Val, src2Val, pDstVal, src1OriginalVal, src2OriginalVal);
  9860. return true;
  9861. }
  9862. if (src1Val)
  9863. {
  9864. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  9865. }
  9866. if (src2Val)
  9867. {
  9868. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  9869. }
  9870. // Type specialize binary operators to int32
  9871. bool src1Lossy = true;
  9872. bool src2Lossy = true;
  9873. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  9874. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  9875. bool ignoredNegativeZero = false;
  9876. bool skipSrc2 = false;
  9877. bool skipDst = false;
  9878. bool needsBoolConv = false;
  9879. AddSubConstantInfo addSubConstantInfo;
  9880. switch (instr->m_opcode)
  9881. {
  9882. case Js::OpCode::Or_A:
  9883. if (!DoLossyIntTypeSpec())
  9884. {
  9885. return false;
  9886. }
  9887. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9888. opcode = Js::OpCode::Or_I4;
  9889. break;
  9890. case Js::OpCode::And_A:
  9891. if (!DoLossyIntTypeSpec())
  9892. {
  9893. return false;
  9894. }
  9895. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9896. opcode = Js::OpCode::And_I4;
  9897. break;
  9898. case Js::OpCode::Xor_A:
  9899. if (!DoLossyIntTypeSpec())
  9900. {
  9901. return false;
  9902. }
  9903. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9904. opcode = Js::OpCode::Xor_I4;
  9905. break;
  9906. case Js::OpCode::Shl_A:
  9907. if (!DoLossyIntTypeSpec())
  9908. {
  9909. return false;
  9910. }
  9911. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9912. opcode = Js::OpCode::Shl_I4;
  9913. break;
  9914. case Js::OpCode::Shr_A:
  9915. if (!DoLossyIntTypeSpec())
  9916. {
  9917. return false;
  9918. }
  9919. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9920. opcode = Js::OpCode::Shr_I4;
  9921. break;
  9922. case Js::OpCode::ShrU_A:
  9923. if (!DoLossyIntTypeSpec())
  9924. {
  9925. return false;
  9926. }
  9927. if (min1 < 0 && IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  9928. {
  9929. // Src1 may be too large to represent as a signed int32, and src2 may be zero. Unless the resulting value is only
  9930. // used as a signed int32 (hence allowing us to ignore the result's sign), don't specialize the instruction.
  9931. if (!instr->ignoreIntOverflow)
  9932. return false;
  9933. ignoredIntOverflow = true;
  9934. }
  9935. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  9936. opcode = Js::OpCode::ShrU_I4;
  9937. break;
  9938. case Js::OpCode::BrUnLe_A:
  9939. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9940. // int already, so require that both sources are likely int for folding.
  9941. if (DoConstFold() &&
  9942. !IsLoopPrePass() &&
  9943. TryOptConstFoldBrUnsignedGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  9944. {
  9945. return true;
  9946. }
  9947. if (min1 >= 0 && min2 >= 0)
  9948. {
  9949. // Only handle positive values since this is unsigned...
  9950. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  9951. // (INT32_MIN, INT32_MAX), so we're good.
  9952. Assert(src1Val);
  9953. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  9954. Assert(src2Val);
  9955. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  9956. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  9957. }
  9958. if (!DoLossyIntTypeSpec())
  9959. {
  9960. return false;
  9961. }
  9962. newMin = newMax = 0;
  9963. opcode = Js::OpCode::BrUnLe_I4;
  9964. break;
  9965. case Js::OpCode::BrUnLt_A:
  9966. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9967. // int already, so require that both sources are likely int for folding.
  9968. if (DoConstFold() &&
  9969. !IsLoopPrePass() &&
  9970. TryOptConstFoldBrUnsignedLessThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  9971. {
  9972. return true;
  9973. }
  9974. if (min1 >= 0 && min2 >= 0)
  9975. {
  9976. // Only handle positive values since this is unsigned...
  9977. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  9978. // (INT32_MIN, INT32_MAX), so we're good.
  9979. Assert(src1Val);
  9980. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  9981. Assert(src2Val);
  9982. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  9983. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  9984. }
  9985. if (!DoLossyIntTypeSpec())
  9986. {
  9987. return false;
  9988. }
  9989. newMin = newMax = 0;
  9990. opcode = Js::OpCode::BrUnLt_I4;
  9991. break;
  9992. case Js::OpCode::BrUnGe_A:
  9993. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  9994. // int already, so require that both sources are likely int for folding.
  9995. if (DoConstFold() &&
  9996. !IsLoopPrePass() &&
  9997. TryOptConstFoldBrUnsignedLessThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  9998. {
  9999. return true;
  10000. }
  10001. if (min1 >= 0 && min2 >= 0)
  10002. {
  10003. // Only handle positive values since this is unsigned...
  10004. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  10005. // (INT32_MIN, INT32_MAX), so we're good.
  10006. Assert(src1Val);
  10007. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  10008. Assert(src2Val);
  10009. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  10010. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  10011. }
  10012. if (!DoLossyIntTypeSpec())
  10013. {
  10014. return false;
  10015. }
  10016. newMin = newMax = 0;
  10017. opcode = Js::OpCode::BrUnGe_I4;
  10018. break;
  10019. case Js::OpCode::BrUnGt_A:
  10020. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  10021. // int already, so require that both sources are likely int for folding.
  10022. if (DoConstFold() &&
  10023. !IsLoopPrePass() &&
  10024. TryOptConstFoldBrUnsignedGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10025. {
  10026. return true;
  10027. }
  10028. if (min1 >= 0 && min2 >= 0)
  10029. {
  10030. // Only handle positive values since this is unsigned...
  10031. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  10032. // (INT32_MIN, INT32_MAX), so we're good.
  10033. Assert(src1Val);
  10034. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  10035. Assert(src2Val);
  10036. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  10037. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  10038. }
  10039. if (!DoLossyIntTypeSpec())
  10040. {
  10041. return false;
  10042. }
  10043. newMin = newMax = 0;
  10044. opcode = Js::OpCode::BrUnGt_I4;
  10045. break;
  10046. case Js::OpCode::CmUnLe_A:
  10047. if (!DoLossyIntTypeSpec())
  10048. {
  10049. return false;
  10050. }
  10051. newMin = 0;
  10052. newMax = 1;
  10053. opcode = Js::OpCode::CmUnLe_I4;
  10054. needsBoolConv = true;
  10055. break;
  10056. case Js::OpCode::CmUnLt_A:
  10057. if (!DoLossyIntTypeSpec())
  10058. {
  10059. return false;
  10060. }
  10061. newMin = 0;
  10062. newMax = 1;
  10063. opcode = Js::OpCode::CmUnLt_I4;
  10064. needsBoolConv = true;
  10065. break;
  10066. case Js::OpCode::CmUnGe_A:
  10067. if (!DoLossyIntTypeSpec())
  10068. {
  10069. return false;
  10070. }
  10071. newMin = 0;
  10072. newMax = 1;
  10073. opcode = Js::OpCode::CmUnGe_I4;
  10074. needsBoolConv = true;
  10075. break;
  10076. case Js::OpCode::CmUnGt_A:
  10077. if (!DoLossyIntTypeSpec())
  10078. {
  10079. return false;
  10080. }
  10081. newMin = 0;
  10082. newMax = 1;
  10083. opcode = Js::OpCode::CmUnGt_I4;
  10084. needsBoolConv = true;
  10085. break;
  10086. case Js::OpCode::Expo_A:
  10087. {
  10088. src1Val = src1OriginalVal;
  10089. src2Val = src2OriginalVal;
  10090. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10091. }
  10092. case Js::OpCode::Div_A:
  10093. {
  10094. ValueType specializedValueType = GetDivValueType(instr, src1Val, src2Val, true);
  10095. if (specializedValueType.IsFloat())
  10096. {
  10097. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  10098. // Note: We should really constant fold cst1%cst2...
  10099. src1Val = src1OriginalVal;
  10100. src2Val = src2OriginalVal;
  10101. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10102. }
  10103. #ifdef _M_ARM
  10104. if (!AutoSystemInfo::Data.ArmDivAvailable())
  10105. {
  10106. return false;
  10107. }
  10108. #endif
  10109. if (specializedValueType.IsInt())
  10110. {
  10111. if (max2 == 0x80000000 || (min2 == 0 && max2 == 00))
  10112. {
  10113. return false;
  10114. }
  10115. if (min1 == 0x80000000 && min2 <= -1 && max2 >= -1)
  10116. {
  10117. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  10118. // Or we know we are dividing by zero (which is weird to have because the profile data
  10119. // say we got an int)
  10120. bailOutKind = IR::BailOutOnDivOfMinInt;
  10121. }
  10122. src1Lossy = false; // Detect -0 on the sources
  10123. src2Lossy = false;
  10124. opcode = Js::OpCode::Div_I4;
  10125. bailOutKind |= IR::BailOnDivResultNotInt;
  10126. if (max2 >= 0 && min2 <= 0)
  10127. {
  10128. // Need to check for divide by zero if the denominator range includes 0
  10129. bailOutKind |= IR::BailOutOnDivByZero;
  10130. }
  10131. if (max1 >= 0 && min1 <= 0)
  10132. {
  10133. // Numerator contains 0 so the result contains 0
  10134. newMin = 0;
  10135. newMax = 0;
  10136. if (min2 < 0)
  10137. {
  10138. // Denominator may be negative, so the result could be negative 0
  10139. if (instr->ShouldCheckForNegativeZero())
  10140. {
  10141. bailOutKind |= IR::BailOutOnNegativeZero;
  10142. }
  10143. else
  10144. {
  10145. ignoredNegativeZero = true;
  10146. }
  10147. }
  10148. }
  10149. else
  10150. {
  10151. // Initialize to invalid value, one of the condition below will update it correctly
  10152. newMin = INT_MAX;
  10153. newMax = INT_MIN;
  10154. }
  10155. // Deal with the positive and negative range separately for both the numerator and the denominator,
  10156. // and integrate to the overall min and max.
  10157. // If the result is positive (positive/positive or negative/negative):
  10158. // The min should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  10159. // divided by ---------------------------------------------------------------
  10160. // largest magnitude denominator (positive_Max2 | negative_Min2)
  10161. //
  10162. // The max should be the largest magnitude numerator (positive_Max1 | negative_Max1)
  10163. // divided by ---------------------------------------------------------------
  10164. // smallest magnitude denominator (positive_Min2 | negative_Max2)
  10165. // If the result is negative (positive/negative or positive/negative):
  10166. // The min should be the largest magnitude numerator (positive_Max1 | negative_Min1)
  10167. // divided by ---------------------------------------------------------------
  10168. // smallest magnitude denominator (negative_Max2 | positive_Min2)
  10169. //
  10170. // The max should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  10171. // divided by ---------------------------------------------------------------
  10172. // largest magnitude denominator (negative_Min2 | positive_Max2)
  10173. // Consider: The range can be slightly more precise if we take care of the rounding
  10174. if (max1 > 0)
  10175. {
  10176. // Take only the positive numerator range
  10177. int32 positive_Min1 = max(1, min1);
  10178. int32 positive_Max1 = max1;
  10179. if (max2 > 0)
  10180. {
  10181. // Take only the positive denominator range
  10182. int32 positive_Min2 = max(1, min2);
  10183. int32 positive_Max2 = max2;
  10184. // Positive / Positive
  10185. int32 quadrant1_Min = positive_Min1 <= positive_Max2? 1 : positive_Min1 / positive_Max2;
  10186. int32 quadrant1_Max = positive_Max1 <= positive_Min2? 1 : positive_Max1 / positive_Min2;
  10187. Assert(1 <= quadrant1_Min && quadrant1_Min <= quadrant1_Max);
  10188. // The result should positive
  10189. newMin = min(newMin, quadrant1_Min);
  10190. newMax = max(newMax, quadrant1_Max);
  10191. }
  10192. if (min2 < 0)
  10193. {
  10194. // Take only the negative denominator range
  10195. int32 negative_Min2 = min2;
  10196. int32 negative_Max2 = min(-1, max2);
  10197. // Positive / Negative
  10198. int32 quadrant2_Min = -positive_Max1 >= negative_Max2? -1 : positive_Max1 / negative_Max2;
  10199. int32 quadrant2_Max = -positive_Min1 >= negative_Min2? -1 : positive_Min1 / negative_Min2;
  10200. // The result should negative
  10201. Assert(quadrant2_Min <= quadrant2_Max && quadrant2_Max <= -1);
  10202. newMin = min(newMin, quadrant2_Min);
  10203. newMax = max(newMax, quadrant2_Max);
  10204. }
  10205. }
  10206. if (min1 < 0)
  10207. {
  10208. // Take only the native numerator range
  10209. int32 negative_Min1 = min1;
  10210. int32 negative_Max1 = min(-1, max1);
  10211. if (max2 > 0)
  10212. {
  10213. // Take only the positive denominator range
  10214. int32 positive_Min2 = max(1, min2);
  10215. int32 positive_Max2 = max2;
  10216. // Negative / Positive
  10217. int32 quadrant4_Min = negative_Min1 >= -positive_Min2? -1 : negative_Min1 / positive_Min2;
  10218. int32 quadrant4_Max = negative_Max1 >= -positive_Max2? -1 : negative_Max1 / positive_Max2;
  10219. // The result should negative
  10220. Assert(quadrant4_Min <= quadrant4_Max && quadrant4_Max <= -1);
  10221. newMin = min(newMin, quadrant4_Min);
  10222. newMax = max(newMax, quadrant4_Max);
  10223. }
  10224. if (min2 < 0)
  10225. {
  10226. // Take only the negative denominator range
  10227. int32 negative_Min2 = min2;
  10228. int32 negative_Max2 = min(-1, max2);
  10229. int32 quadrant3_Min;
  10230. int32 quadrant3_Max;
  10231. // Negative / Negative
  10232. if (negative_Max1 == 0x80000000 && negative_Min2 == -1)
  10233. {
  10234. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : (negative_Max1+1) / negative_Min2;
  10235. }
  10236. else
  10237. {
  10238. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : negative_Max1 / negative_Min2;
  10239. }
  10240. if (negative_Min1 == 0x80000000 && negative_Max2 == -1)
  10241. {
  10242. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : (negative_Min1+1) / negative_Max2;
  10243. }
  10244. else
  10245. {
  10246. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : negative_Min1 / negative_Max2;
  10247. }
  10248. // The result should positive
  10249. Assert(1 <= quadrant3_Min && quadrant3_Min <= quadrant3_Max);
  10250. newMin = min(newMin, quadrant3_Min);
  10251. newMax = max(newMax, quadrant3_Max);
  10252. }
  10253. }
  10254. Assert(newMin <= newMax);
  10255. // Continue to int type spec
  10256. break;
  10257. }
  10258. }
  10259. // fall-through
  10260. default:
  10261. {
  10262. const bool involesLargeInt32 =
  10263. (src1Val && src1Val->GetValueInfo()->IsLikelyUntaggedInt()) ||
  10264. (src2Val && src2Val->GetValueInfo()->IsLikelyUntaggedInt());
  10265. const auto trySpecializeToFloat =
  10266. [&](const bool mayOverflow) -> bool
  10267. {
  10268. // It has been determined that this instruction cannot be int-specialized. Need to determine whether to attempt
  10269. // to float-specialize the instruction, or leave it unspecialized.
  10270. if((involesLargeInt32
  10271. #if INT32VAR
  10272. && mayOverflow
  10273. #endif
  10274. ) || (instr->m_opcode == Js::OpCode::Mul_A && !this->DoAggressiveMulIntTypeSpec())
  10275. )
  10276. {
  10277. // An input range is completely outside the range of an int31 and the operation is likely to overflow.
  10278. // Additionally, on 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is
  10279. // significantly slower to use in an unspecialized operation compared to a tagged int. So, try to
  10280. // float-specialize the instruction.
  10281. src1Val = src1OriginalVal;
  10282. src2Val = src2OriginalVal;
  10283. return TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10284. }
  10285. return false;
  10286. };
  10287. if (instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  10288. {
  10289. if ((src1Val && src1Val->GetValueInfo()->IsLikelyFloat()) || (src2Val && src2Val->GetValueInfo()->IsLikelyFloat()))
  10290. {
  10291. // Try to type specialize to float
  10292. src1Val = src1OriginalVal;
  10293. src2Val = src2OriginalVal;
  10294. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10295. }
  10296. if (src1Val == nullptr ||
  10297. src2Val == nullptr ||
  10298. !src1Val->GetValueInfo()->IsLikelyInt() ||
  10299. !src2Val->GetValueInfo()->IsLikelyInt() ||
  10300. (
  10301. !DoAggressiveIntTypeSpec() &&
  10302. (
  10303. !(src1Val->GetValueInfo()->IsInt() || IsSwitchInt32TypeSpecialized(instr, currentBlock)) ||
  10304. !src2Val->GetValueInfo()->IsInt()
  10305. )
  10306. ) ||
  10307. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt) ||
  10308. (instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->m_isNotInt))
  10309. {
  10310. return trySpecializeToFloat(true);
  10311. }
  10312. }
  10313. // Try to type specialize to int32
  10314. // If one of the values is a float constant with a value that fits in a uint32 but not an int32,
  10315. // and the instruction can ignore int overflow, the source value for the purposes of int specialization
  10316. // would have been changed to an int constant value by ignoring overflow. But, the conversion is still lossy.
  10317. if (!(src1OriginalVal && src1OriginalVal->GetValueInfo()->IsFloatConstant() && src1Val && src1Val->GetValueInfo()->HasIntConstantValue()))
  10318. {
  10319. src1Lossy = false;
  10320. }
  10321. if (!(src2OriginalVal && src2OriginalVal->GetValueInfo()->IsFloatConstant() && src2Val && src2Val->GetValueInfo()->HasIntConstantValue()))
  10322. {
  10323. src2Lossy = false;
  10324. }
  10325. switch(instr->m_opcode)
  10326. {
  10327. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  10328. // If the src is already type-specialized, if we don't type-specialize ArgOut_A_InlineBuiltIn instr, we'll get additional ToVar.
  10329. // So, to avoid that, type-specialize the ArgOut_A_InlineBuiltIn instr.
  10330. // Else we don't need to type-specialize the instr, we are fine with src being Var.
  10331. if (instr->GetSrc1()->IsRegOpnd())
  10332. {
  10333. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  10334. if (this->IsInt32TypeSpecialized(sym, this->currentBlock))
  10335. {
  10336. opcode = instr->m_opcode;
  10337. skipDst = true; // We should keep dst as is, otherwise the link opnd for next ArgOut/InlineBuiltInStart would be broken.
  10338. skipSrc2 = true; // src2 is linkOpnd. We don't need to type-specialize it.
  10339. newMin = min1; newMax = max1; // Values don't matter, these are unused.
  10340. goto LOutsideSwitch; // Continue to int-type-specialize.
  10341. }
  10342. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  10343. {
  10344. src1Val = src1OriginalVal;
  10345. src2Val = src2OriginalVal;
  10346. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  10347. }
  10348. else if (this->IsSimd128F4TypeSpecialized(sym, this->currentBlock))
  10349. {
  10350. // SIMD_JS
  10351. // We should be already using the SIMD type-spec sym. See TypeSpecializeSimd128.
  10352. Assert(IRType_IsSimd128(instr->GetSrc1()->GetType()));
  10353. }
  10354. }
  10355. return false;
  10356. case Js::OpCode::Add_A:
  10357. do // while(false)
  10358. {
  10359. const auto CannotOverflowBasedOnRelativeBounds = [&](int32 *const constantValueRef)
  10360. {
  10361. Assert(constantValueRef);
  10362. if(min2 == max2 &&
  10363. src1Val->GetValueInfo()->IsIntBounded() &&
  10364. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min2))
  10365. {
  10366. *constantValueRef = min2;
  10367. return true;
  10368. }
  10369. else if(
  10370. min1 == max1 &&
  10371. src2Val->GetValueInfo()->IsIntBounded() &&
  10372. src2Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min1))
  10373. {
  10374. *constantValueRef = min1;
  10375. return true;
  10376. }
  10377. return false;
  10378. };
  10379. if (Int32Math::Add(min1, min2, &newMin))
  10380. {
  10381. int32 constantSrcValue;
  10382. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  10383. {
  10384. newMin = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  10385. }
  10386. else if(instr->ShouldCheckForIntOverflow())
  10387. {
  10388. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10389. {
  10390. // May overflow
  10391. return trySpecializeToFloat(true);
  10392. }
  10393. bailOutKind |= IR::BailOutOnOverflow;
  10394. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  10395. }
  10396. else
  10397. {
  10398. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since
  10399. // overflow causes the value to wrap around, and we don't have a way to specify a lower and upper
  10400. // range of ints, we use the full range of int32s.
  10401. ignoredIntOverflow = true;
  10402. newMin = INT32_MIN;
  10403. newMax = INT32_MAX;
  10404. break;
  10405. }
  10406. }
  10407. if (Int32Math::Add(max1, max2, &newMax))
  10408. {
  10409. int32 constantSrcValue;
  10410. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  10411. {
  10412. newMax = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  10413. }
  10414. else if(instr->ShouldCheckForIntOverflow())
  10415. {
  10416. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10417. {
  10418. // May overflow
  10419. return trySpecializeToFloat(true);
  10420. }
  10421. bailOutKind |= IR::BailOutOnOverflow;
  10422. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  10423. }
  10424. else
  10425. {
  10426. // See comment about ignoring overflow above
  10427. ignoredIntOverflow = true;
  10428. newMin = INT32_MIN;
  10429. newMax = INT32_MAX;
  10430. break;
  10431. }
  10432. }
  10433. if(bailOutKind & IR::BailOutOnOverflow)
  10434. {
  10435. Assert(bailOutKind == IR::BailOutOnOverflow);
  10436. Assert(instr->ShouldCheckForIntOverflow());
  10437. int32 temp;
  10438. if(Int32Math::Add(
  10439. Int32Math::NearestInRangeTo(0, min1, max1),
  10440. Int32Math::NearestInRangeTo(0, min2, max2),
  10441. &temp))
  10442. {
  10443. // Always overflows
  10444. return trySpecializeToFloat(true);
  10445. }
  10446. }
  10447. } while(false);
  10448. if (!this->IsLoopPrePass() && newMin == newMax && bailOutKind == IR::BailOutInvalid)
  10449. {
  10450. // Take care of Add with zero here, since we know we're dealing with 2 numbers.
  10451. this->CaptureByteCodeSymUses(instr);
  10452. IR::Opnd *src;
  10453. bool isAddZero = true;
  10454. int32 intConstantValue;
  10455. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  10456. {
  10457. src = instr->UnlinkSrc2();
  10458. instr->FreeSrc1();
  10459. }
  10460. else if (src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  10461. {
  10462. src = instr->UnlinkSrc1();
  10463. instr->FreeSrc2();
  10464. }
  10465. else
  10466. {
  10467. // This should have been handled by const folding, unless:
  10468. // - A source's value was substituted with a different value here, which is after const folding happened
  10469. // - A value is not definitely int, but once converted to definite int, it would be zero due to a
  10470. // condition in the source code such as if(a === 0). Ideally, we would specialize the sources and
  10471. // remove the add, but doesn't seem too important for now.
  10472. Assert(
  10473. !DoConstFold() ||
  10474. src1Val != src1OriginalVal ||
  10475. src2Val != src2OriginalVal ||
  10476. !src1Val->GetValueInfo()->IsInt() ||
  10477. !src2Val->GetValueInfo()->IsInt());
  10478. isAddZero = false;
  10479. src = nullptr;
  10480. }
  10481. if (isAddZero)
  10482. {
  10483. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Ld_A, instr->UnlinkDst(), src, instr->m_func);
  10484. newInstr->SetByteCodeOffset(instr);
  10485. instr->m_opcode = Js::OpCode::Nop;
  10486. this->currentBlock->InsertInstrAfter(newInstr, instr);
  10487. return true;
  10488. }
  10489. }
  10490. if(!ignoredIntOverflow)
  10491. {
  10492. if(min2 == max2 &&
  10493. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  10494. instr->GetSrc1()->IsRegOpnd())
  10495. {
  10496. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, min2);
  10497. }
  10498. else if(
  10499. min1 == max1 &&
  10500. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val)) &&
  10501. instr->GetSrc2()->IsRegOpnd())
  10502. {
  10503. addSubConstantInfo.Set(instr->GetSrc2()->AsRegOpnd()->m_sym, src2Val, min2 == max2, min1);
  10504. }
  10505. }
  10506. opcode = Js::OpCode::Add_I4;
  10507. break;
  10508. case Js::OpCode::Sub_A:
  10509. do // while(false)
  10510. {
  10511. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  10512. {
  10513. return
  10514. min2 == max2 &&
  10515. src1Val->GetValueInfo()->IsIntBounded() &&
  10516. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(min2);
  10517. };
  10518. if (Int32Math::Sub(min1, max2, &newMin))
  10519. {
  10520. if(CannotOverflowBasedOnRelativeBounds())
  10521. {
  10522. Assert(min2 == max2);
  10523. newMin = min2 >= 0 ? INT32_MIN : INT32_MAX;
  10524. }
  10525. else if(instr->ShouldCheckForIntOverflow())
  10526. {
  10527. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10528. {
  10529. // May overflow
  10530. return trySpecializeToFloat(true);
  10531. }
  10532. bailOutKind |= IR::BailOutOnOverflow;
  10533. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  10534. }
  10535. else
  10536. {
  10537. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  10538. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  10539. // we use the full range of int32s.
  10540. ignoredIntOverflow = true;
  10541. newMin = INT32_MIN;
  10542. newMax = INT32_MAX;
  10543. break;
  10544. }
  10545. }
  10546. if (Int32Math::Sub(max1, min2, &newMax))
  10547. {
  10548. if(CannotOverflowBasedOnRelativeBounds())
  10549. {
  10550. Assert(min2 == max2);
  10551. newMax = min2 >= 0 ? INT32_MIN: INT32_MAX;
  10552. }
  10553. else if(instr->ShouldCheckForIntOverflow())
  10554. {
  10555. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  10556. {
  10557. // May overflow
  10558. return trySpecializeToFloat(true);
  10559. }
  10560. bailOutKind |= IR::BailOutOnOverflow;
  10561. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  10562. }
  10563. else
  10564. {
  10565. // See comment about ignoring overflow above
  10566. ignoredIntOverflow = true;
  10567. newMin = INT32_MIN;
  10568. newMax = INT32_MAX;
  10569. break;
  10570. }
  10571. }
  10572. if(bailOutKind & IR::BailOutOnOverflow)
  10573. {
  10574. Assert(bailOutKind == IR::BailOutOnOverflow);
  10575. Assert(instr->ShouldCheckForIntOverflow());
  10576. int32 temp;
  10577. if(Int32Math::Sub(
  10578. Int32Math::NearestInRangeTo(-1, min1, max1),
  10579. Int32Math::NearestInRangeTo(0, min2, max2),
  10580. &temp))
  10581. {
  10582. // Always overflows
  10583. return trySpecializeToFloat(true);
  10584. }
  10585. }
  10586. } while(false);
  10587. if(!ignoredIntOverflow &&
  10588. min2 == max2 &&
  10589. min2 != INT32_MIN &&
  10590. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  10591. instr->GetSrc1()->IsRegOpnd())
  10592. {
  10593. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, -min2);
  10594. }
  10595. opcode = Js::OpCode::Sub_I4;
  10596. break;
  10597. case Js::OpCode::Mul_A:
  10598. {
  10599. if (Int32Math::Mul(min1, min2, &newMin))
  10600. {
  10601. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10602. {
  10603. // May overflow
  10604. return trySpecializeToFloat(true);
  10605. }
  10606. bailOutKind |= IR::BailOutOnMulOverflow;
  10607. newMin = (min1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  10608. }
  10609. newMax = newMin;
  10610. if (Int32Math::Mul(max1, max2, &tmp))
  10611. {
  10612. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10613. {
  10614. // May overflow
  10615. return trySpecializeToFloat(true);
  10616. }
  10617. bailOutKind |= IR::BailOutOnMulOverflow;
  10618. tmp = (max1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  10619. }
  10620. newMin = min(newMin, tmp);
  10621. newMax = max(newMax, tmp);
  10622. if (Int32Math::Mul(min1, max2, &tmp))
  10623. {
  10624. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10625. {
  10626. // May overflow
  10627. return trySpecializeToFloat(true);
  10628. }
  10629. bailOutKind |= IR::BailOutOnMulOverflow;
  10630. tmp = (min1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  10631. }
  10632. newMin = min(newMin, tmp);
  10633. newMax = max(newMax, tmp);
  10634. if (Int32Math::Mul(max1, min2, &tmp))
  10635. {
  10636. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  10637. {
  10638. // May overflow
  10639. return trySpecializeToFloat(true);
  10640. }
  10641. bailOutKind |= IR::BailOutOnMulOverflow;
  10642. tmp = (max1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  10643. }
  10644. newMin = min(newMin, tmp);
  10645. newMax = max(newMax, tmp);
  10646. if (bailOutKind & IR::BailOutOnMulOverflow)
  10647. {
  10648. // CSE only if two MULs have the same overflow check behavior.
  10649. // Currently this is set to be ignore int32 overflow, but not 53-bit, or int32 overflow matters.
  10650. if (!instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  10651. {
  10652. // If we allow int to overflow then there can be anything in the resulting int
  10653. newMin = IntConstMin;
  10654. newMax = IntConstMax;
  10655. ignoredIntOverflow = true;
  10656. }
  10657. int32 temp, overflowValue;
  10658. if (Int32Math::Mul(
  10659. Int32Math::NearestInRangeTo(0, min1, max1),
  10660. Int32Math::NearestInRangeTo(0, min2, max2),
  10661. &temp,
  10662. &overflowValue))
  10663. {
  10664. Assert(instr->ignoreOverflowBitCount >= 32);
  10665. int overflowMatters = 64 - instr->ignoreOverflowBitCount;
  10666. if (!ignoredIntOverflow ||
  10667. // Use shift to check high bits in case its negative
  10668. ((overflowValue << overflowMatters) >> overflowMatters) != overflowValue
  10669. )
  10670. {
  10671. // Always overflows
  10672. return trySpecializeToFloat(true);
  10673. }
  10674. }
  10675. }
  10676. if (newMin <= 0 && newMax >= 0 && // New range crosses zero
  10677. (min1 < 0 || min2 < 0) && // An operand's range contains a negative integer
  10678. !(min1 > 0 || min2 > 0) && // Neither operand's range contains only positive integers
  10679. !instr->GetSrc1()->IsEqual(instr->GetSrc2())) // The operands don't have the same value
  10680. {
  10681. if (instr->ShouldCheckForNegativeZero())
  10682. {
  10683. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  10684. if (!DoAggressiveIntTypeSpec())
  10685. {
  10686. // May result in -0
  10687. return trySpecializeToFloat(false);
  10688. }
  10689. if (((min1 == 0 && max1 == 0) || (min2 == 0 && max2 == 0)) && (max1 < 0 || max2 < 0))
  10690. {
  10691. // Always results in -0
  10692. return trySpecializeToFloat(false);
  10693. }
  10694. bailOutKind |= IR::BailOutOnNegativeZero;
  10695. }
  10696. else
  10697. {
  10698. ignoredNegativeZero = true;
  10699. }
  10700. }
  10701. opcode = Js::OpCode::Mul_I4;
  10702. break;
  10703. }
  10704. case Js::OpCode::Rem_A:
  10705. {
  10706. src2 = instr->GetSrc2();
  10707. if (!this->IsLoopPrePass() && min2 == max2 && min1 >= 0)
  10708. {
  10709. int32 value = min2;
  10710. if (value == (1 << Math::Log2(value)) && src2->IsAddrOpnd())
  10711. {
  10712. Assert(src2->AsAddrOpnd()->IsVar());
  10713. instr->m_opcode = Js::OpCode::And_A;
  10714. src2->AsAddrOpnd()->SetAddress(Js::TaggedInt::ToVarUnchecked(value - 1),
  10715. IR::AddrOpndKindConstantVar);
  10716. *pSrc2Val = GetIntConstantValue(value - 1, instr);
  10717. src2Val = *pSrc2Val;
  10718. return this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2Val, redoTypeSpecRef);
  10719. }
  10720. }
  10721. #ifdef _M_ARM
  10722. if (!AutoSystemInfo::Data.ArmDivAvailable())
  10723. {
  10724. return false;
  10725. }
  10726. #endif
  10727. if (min1 < 0)
  10728. {
  10729. // The most negative it can be is min1, unless limited by min2/max2
  10730. int32 negMaxAbs2;
  10731. if (min2 == INT32_MIN)
  10732. {
  10733. negMaxAbs2 = INT32_MIN;
  10734. }
  10735. else
  10736. {
  10737. negMaxAbs2 = -max(abs(min2), abs(max2)) + 1;
  10738. }
  10739. newMin = max(min1, negMaxAbs2);
  10740. }
  10741. else
  10742. {
  10743. newMin = 0;
  10744. }
  10745. bool isModByPowerOf2 = (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo() &&
  10746. instr->m_func->GetReadOnlyProfileInfo()->IsModulusOpByPowerOf2(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId)));
  10747. if(isModByPowerOf2)
  10748. {
  10749. Assert(bailOutKind == IR::BailOutInvalid);
  10750. bailOutKind = IR::BailOnModByPowerOf2;
  10751. newMin = 0;
  10752. }
  10753. else
  10754. {
  10755. if (min2 <= 0 && max2 >= 0)
  10756. {
  10757. // Consider: We could handle the zero case with a check and bailout...
  10758. return false;
  10759. }
  10760. if (min1 == 0x80000000 && (min2 <= -1 && max2 >= -1))
  10761. {
  10762. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  10763. return false;
  10764. }
  10765. if (min1 < 0)
  10766. {
  10767. if(instr->ShouldCheckForNegativeZero())
  10768. {
  10769. if (!DoAggressiveIntTypeSpec())
  10770. {
  10771. return false;
  10772. }
  10773. bailOutKind |= IR::BailOutOnNegativeZero;
  10774. }
  10775. else
  10776. {
  10777. ignoredNegativeZero = true;
  10778. }
  10779. }
  10780. }
  10781. {
  10782. int32 absMax2;
  10783. if (min2 == INT32_MIN)
  10784. {
  10785. // abs(INT32_MIN) == INT32_MAX because of overflow
  10786. absMax2 = INT32_MAX;
  10787. }
  10788. else
  10789. {
  10790. absMax2 = max(abs(min2), abs(max2)) - 1;
  10791. }
  10792. newMax = min(absMax2, max(max1, 0));
  10793. newMax = max(newMin, newMax);
  10794. }
  10795. opcode = Js::OpCode::Rem_I4;
  10796. break;
  10797. }
  10798. case Js::OpCode::CmEq_A:
  10799. case Js::OpCode::CmSrEq_A:
  10800. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10801. {
  10802. return false;
  10803. }
  10804. newMin = 0;
  10805. newMax = 1;
  10806. opcode = Js::OpCode::CmEq_I4;
  10807. needsBoolConv = true;
  10808. break;
  10809. case Js::OpCode::CmNeq_A:
  10810. case Js::OpCode::CmSrNeq_A:
  10811. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10812. {
  10813. return false;
  10814. }
  10815. newMin = 0;
  10816. newMax = 1;
  10817. opcode = Js::OpCode::CmNeq_I4;
  10818. needsBoolConv = true;
  10819. break;
  10820. case Js::OpCode::CmLe_A:
  10821. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10822. {
  10823. return false;
  10824. }
  10825. newMin = 0;
  10826. newMax = 1;
  10827. opcode = Js::OpCode::CmLe_I4;
  10828. needsBoolConv = true;
  10829. break;
  10830. case Js::OpCode::CmLt_A:
  10831. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10832. {
  10833. return false;
  10834. }
  10835. newMin = 0;
  10836. newMax = 1;
  10837. opcode = Js::OpCode::CmLt_I4;
  10838. needsBoolConv = true;
  10839. break;
  10840. case Js::OpCode::CmGe_A:
  10841. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10842. {
  10843. return false;
  10844. }
  10845. newMin = 0;
  10846. newMax = 1;
  10847. opcode = Js::OpCode::CmGe_I4;
  10848. needsBoolConv = true;
  10849. break;
  10850. case Js::OpCode::CmGt_A:
  10851. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  10852. {
  10853. return false;
  10854. }
  10855. newMin = 0;
  10856. newMax = 1;
  10857. opcode = Js::OpCode::CmGt_I4;
  10858. needsBoolConv = true;
  10859. break;
  10860. case Js::OpCode::BrSrEq_A:
  10861. case Js::OpCode::BrEq_A:
  10862. case Js::OpCode::BrNotNeq_A:
  10863. case Js::OpCode::BrSrNotNeq_A:
  10864. {
  10865. if(DoConstFold() &&
  10866. !IsLoopPrePass() &&
  10867. TryOptConstFoldBrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10868. {
  10869. return true;
  10870. }
  10871. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10872. UpdateIntBoundsForEqualBranch(src1Val, src2Val);
  10873. if(!specialize)
  10874. {
  10875. return false;
  10876. }
  10877. opcode = Js::OpCode::BrEq_I4;
  10878. // We'll get a warning if we don't assign a value to these...
  10879. // We'll assert if we use them and make a range where min > max
  10880. newMin = 2; newMax = 1;
  10881. break;
  10882. }
  10883. case Js::OpCode::BrSrNeq_A:
  10884. case Js::OpCode::BrNeq_A:
  10885. case Js::OpCode::BrSrNotEq_A:
  10886. case Js::OpCode::BrNotEq_A:
  10887. {
  10888. if(DoConstFold() &&
  10889. !IsLoopPrePass() &&
  10890. TryOptConstFoldBrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10891. {
  10892. return true;
  10893. }
  10894. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10895. UpdateIntBoundsForNotEqualBranch(src1Val, src2Val);
  10896. if(!specialize)
  10897. {
  10898. return false;
  10899. }
  10900. opcode = Js::OpCode::BrNeq_I4;
  10901. // We'll get a warning if we don't assign a value to these...
  10902. // We'll assert if we use them and make a range where min > max
  10903. newMin = 2; newMax = 1;
  10904. break;
  10905. }
  10906. case Js::OpCode::BrGt_A:
  10907. case Js::OpCode::BrNotLe_A:
  10908. {
  10909. if(DoConstFold() &&
  10910. !IsLoopPrePass() &&
  10911. TryOptConstFoldBrGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10912. {
  10913. return true;
  10914. }
  10915. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10916. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  10917. if(!specialize)
  10918. {
  10919. return false;
  10920. }
  10921. opcode = Js::OpCode::BrGt_I4;
  10922. // We'll get a warning if we don't assign a value to these...
  10923. // We'll assert if we use them and make a range where min > max
  10924. newMin = 2; newMax = 1;
  10925. break;
  10926. }
  10927. case Js::OpCode::BrGe_A:
  10928. case Js::OpCode::BrNotLt_A:
  10929. {
  10930. if(DoConstFold() &&
  10931. !IsLoopPrePass() &&
  10932. TryOptConstFoldBrGreaterThanOrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  10933. {
  10934. return true;
  10935. }
  10936. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10937. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  10938. if(!specialize)
  10939. {
  10940. return false;
  10941. }
  10942. opcode = Js::OpCode::BrGe_I4;
  10943. // We'll get a warning if we don't assign a value to these...
  10944. // We'll assert if we use them and make a range where min > max
  10945. newMin = 2; newMax = 1;
  10946. break;
  10947. }
  10948. case Js::OpCode::BrLt_A:
  10949. case Js::OpCode::BrNotGe_A:
  10950. {
  10951. if(DoConstFold() &&
  10952. !IsLoopPrePass() &&
  10953. TryOptConstFoldBrGreaterThanOrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10954. {
  10955. return true;
  10956. }
  10957. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10958. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  10959. if(!specialize)
  10960. {
  10961. return false;
  10962. }
  10963. opcode = Js::OpCode::BrLt_I4;
  10964. // We'll get a warning if we don't assign a value to these...
  10965. // We'll assert if we use them and make a range where min > max
  10966. newMin = 2; newMax = 1;
  10967. break;
  10968. }
  10969. case Js::OpCode::BrLe_A:
  10970. case Js::OpCode::BrNotGt_A:
  10971. {
  10972. if(DoConstFold() &&
  10973. !IsLoopPrePass() &&
  10974. TryOptConstFoldBrGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  10975. {
  10976. return true;
  10977. }
  10978. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  10979. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  10980. if(!specialize)
  10981. {
  10982. return false;
  10983. }
  10984. opcode = Js::OpCode::BrLe_I4;
  10985. // We'll get a warning if we don't assign a value to these...
  10986. // We'll assert if we use them and make a range where min > max
  10987. newMin = 2; newMax = 1;
  10988. break;
  10989. }
  10990. default:
  10991. return false;
  10992. }
  10993. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it
  10994. // (won't leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value
  10995. // needs to be guaranteed to be an int
  10996. if(!ignoredIntOverflow &&
  10997. !ignoredNegativeZero &&
  10998. !needsBoolConv &&
  10999. instr->ShouldCheckForIntOverflow() &&
  11000. !IsWorthSpecializingToInt32(instr, src1Val, src2Val))
  11001. {
  11002. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  11003. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  11004. // the following cases.
  11005. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to
  11006. // be an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  11007. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  11008. // that case, especially if the dst sym is live on the back-edge.
  11009. if(bailOutKind == IR::BailOutInvalid &&
  11010. instr->GetDst() &&
  11011. src1Val->GetValueInfo()->IsInt() &&
  11012. src2Val->GetValueInfo()->IsInt() &&
  11013. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  11014. {
  11015. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  11016. }
  11017. return false;
  11018. }
  11019. } // case default
  11020. } // switch
  11021. LOutsideSwitch:
  11022. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  11023. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  11024. {
  11025. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  11026. Value *src1IndirIndexVal = nullptr;
  11027. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  11028. {
  11029. *redoTypeSpecRef = true;
  11030. return false;
  11031. }
  11032. }
  11033. const Js::OpCode originalOpCode = instr->m_opcode;
  11034. if (!this->IsLoopPrePass())
  11035. {
  11036. // No re-write on prepass
  11037. instr->m_opcode = opcode;
  11038. }
  11039. Value *src1ValueToSpecialize = src1Val, *src2ValueToSpecialize = src2Val;
  11040. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  11041. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  11042. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  11043. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  11044. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  11045. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  11046. if(src1Lossy)
  11047. {
  11048. src1ValueToSpecialize = src1OriginalVal;
  11049. }
  11050. if (src2Lossy)
  11051. {
  11052. src2ValueToSpecialize = src2OriginalVal;
  11053. }
  11054. // Make sure the srcs are specialized
  11055. src1 = instr->GetSrc1();
  11056. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, src1Lossy);
  11057. if (!skipSrc2)
  11058. {
  11059. src2 = instr->GetSrc2();
  11060. this->ToInt32(instr, src2, this->currentBlock, src2ValueToSpecialize, nullptr, src2Lossy);
  11061. }
  11062. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  11063. {
  11064. GenerateBailAtOperation(&instr, bailOutKind);
  11065. }
  11066. if (!skipDst && instr->GetDst())
  11067. {
  11068. if (needsBoolConv)
  11069. {
  11070. IR::RegOpnd *varDst;
  11071. if (this->IsLoopPrePass())
  11072. {
  11073. varDst = instr->GetDst()->AsRegOpnd();
  11074. this->ToVarRegOpnd(varDst, this->currentBlock);
  11075. }
  11076. else
  11077. {
  11078. // Generate:
  11079. // t1.i = CmCC t2.i, t3.i
  11080. // t1.v = Conv_bool t1.i
  11081. //
  11082. // If the only uses of t1 are ints, the conv_bool will get dead-stored
  11083. TypeSpecializeIntDst(instr, originalOpCode, nullptr, src1Val, src2Val, bailOutKind, newMin, newMax, pDstVal);
  11084. IR::RegOpnd *intDst = instr->GetDst()->AsRegOpnd();
  11085. intDst->SetIsJITOptimizedReg(true);
  11086. varDst = IR::RegOpnd::New(intDst->m_sym->GetVarEquivSym(this->func), TyVar, this->func);
  11087. IR::Instr *convBoolInstr = IR::Instr::New(Js::OpCode::Conv_Bool, varDst, intDst, this->func);
  11088. // In some cases (e.g. unsigned compare peep code), a comparison will use variables
  11089. // other than the ones initially intended for it, if we can determine that we would
  11090. // arrive at the same result. This means that we get a ByteCodeUses operation after
  11091. // the actual comparison. Since Inserting the Conv_bool just after the compare, and
  11092. // just before the ByteCodeUses, would cause issues later on with register lifetime
  11093. // calculation, we want to insert the Conv_bool after the whole compare instruction
  11094. // block.
  11095. IR::Instr *putAfter = instr;
  11096. while (putAfter->m_next && putAfter->m_next->m_opcode == Js::OpCode::ByteCodeUses)
  11097. {
  11098. putAfter = putAfter->m_next;
  11099. }
  11100. putAfter->InsertAfter(convBoolInstr);
  11101. convBoolInstr->SetByteCodeOffset(instr);
  11102. this->ToVarRegOpnd(varDst, this->currentBlock);
  11103. this->blockData.liveInt32Syms->Set(varDst->m_sym->m_id);
  11104. this->blockData.liveLossyInt32Syms->Set(varDst->m_sym->m_id);
  11105. }
  11106. *pDstVal = this->NewGenericValue(ValueType::Boolean, varDst);
  11107. }
  11108. else
  11109. {
  11110. TypeSpecializeIntDst(
  11111. instr,
  11112. originalOpCode,
  11113. nullptr,
  11114. src1Val,
  11115. src2Val,
  11116. bailOutKind,
  11117. newMin,
  11118. newMax,
  11119. pDstVal,
  11120. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  11121. }
  11122. }
  11123. if(bailOutKind == IR::BailOutInvalid)
  11124. {
  11125. GOPT_TRACE(_u("Type specialized to INT\n"));
  11126. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11127. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11128. {
  11129. Output::Print(_u("Type specialized to INT: "));
  11130. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11131. }
  11132. #endif
  11133. }
  11134. else
  11135. {
  11136. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  11137. if(bailOutKind & (IR::BailOutOnOverflow | IR::BailOutOnMulOverflow) )
  11138. {
  11139. GOPT_TRACE(_u(" Overflow\n"));
  11140. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11141. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11142. {
  11143. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  11144. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11145. }
  11146. #endif
  11147. }
  11148. if(bailOutKind & IR::BailOutOnNegativeZero)
  11149. {
  11150. GOPT_TRACE(_u(" Zero\n"));
  11151. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11152. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  11153. {
  11154. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  11155. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11156. }
  11157. #endif
  11158. }
  11159. }
  11160. return true;
  11161. }
  11162. bool
  11163. GlobOpt::IsWorthSpecializingToInt32Branch(IR::Instr * instr, Value * src1Val, Value * src2Val)
  11164. {
  11165. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  11166. {
  11167. StackSym *sym1 = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11168. if (this->IsInt32TypeSpecialized(sym1, this->currentBlock) == false)
  11169. {
  11170. if (!src2Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc2()->IsRegOpnd())
  11171. {
  11172. StackSym *sym2 = instr->GetSrc2()->AsRegOpnd()->m_sym;
  11173. if (this->IsInt32TypeSpecialized(sym2, this->currentBlock) == false)
  11174. {
  11175. // Type specializing a Br itself isn't worth it, unless one src
  11176. // is already type specialized
  11177. return false;
  11178. }
  11179. }
  11180. }
  11181. }
  11182. return true;
  11183. }
  11184. bool
  11185. GlobOpt::TryOptConstFoldBrFalse(
  11186. IR::Instr *const instr,
  11187. Value *const srcValue,
  11188. const int32 min,
  11189. const int32 max)
  11190. {
  11191. Assert(instr);
  11192. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  11193. Assert(srcValue);
  11194. if(!(DoAggressiveIntTypeSpec() ? srcValue->GetValueInfo()->IsLikelyInt() : srcValue->GetValueInfo()->IsInt()))
  11195. {
  11196. return false;
  11197. }
  11198. if(ValueInfo::IsEqualTo(srcValue, min, max, nullptr, 0, 0))
  11199. {
  11200. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrFalse_A, instr, srcValue);
  11201. return true;
  11202. }
  11203. if(ValueInfo::IsNotEqualTo(srcValue, min, max, nullptr, 0, 0))
  11204. {
  11205. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrTrue_A, instr, srcValue);
  11206. return true;
  11207. }
  11208. return false;
  11209. }
  11210. bool
  11211. GlobOpt::TryOptConstFoldBrEqual(
  11212. IR::Instr *const instr,
  11213. const bool branchOnEqual,
  11214. Value *const src1Value,
  11215. const int32 min1,
  11216. const int32 max1,
  11217. Value *const src2Value,
  11218. const int32 min2,
  11219. const int32 max2)
  11220. {
  11221. Assert(instr);
  11222. Assert(src1Value);
  11223. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11224. Assert(src2Value);
  11225. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11226. if(ValueInfo::IsEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11227. {
  11228. OptConstFoldBr(branchOnEqual, instr, src1Value, src2Value);
  11229. return true;
  11230. }
  11231. if(ValueInfo::IsNotEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11232. {
  11233. OptConstFoldBr(!branchOnEqual, instr, src1Value, src2Value);
  11234. return true;
  11235. }
  11236. return false;
  11237. }
  11238. bool
  11239. GlobOpt::TryOptConstFoldBrGreaterThan(
  11240. IR::Instr *const instr,
  11241. const bool branchOnGreaterThan,
  11242. Value *const src1Value,
  11243. const int32 min1,
  11244. const int32 max1,
  11245. Value *const src2Value,
  11246. const int32 min2,
  11247. const int32 max2)
  11248. {
  11249. Assert(instr);
  11250. Assert(src1Value);
  11251. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11252. Assert(src2Value);
  11253. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11254. if(ValueInfo::IsGreaterThan(src1Value, min1, max1, src2Value, min2, max2))
  11255. {
  11256. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  11257. return true;
  11258. }
  11259. if(ValueInfo::IsLessThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11260. {
  11261. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  11262. return true;
  11263. }
  11264. return false;
  11265. }
  11266. bool
  11267. GlobOpt::TryOptConstFoldBrGreaterThanOrEqual(
  11268. IR::Instr *const instr,
  11269. const bool branchOnGreaterThanOrEqual,
  11270. Value *const src1Value,
  11271. const int32 min1,
  11272. const int32 max1,
  11273. Value *const src2Value,
  11274. const int32 min2,
  11275. const int32 max2)
  11276. {
  11277. Assert(instr);
  11278. Assert(src1Value);
  11279. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  11280. Assert(src2Value);
  11281. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  11282. if(ValueInfo::IsGreaterThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  11283. {
  11284. OptConstFoldBr(branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  11285. return true;
  11286. }
  11287. if(ValueInfo::IsLessThan(src1Value, min1, max1, src2Value, min2, max2))
  11288. {
  11289. OptConstFoldBr(!branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  11290. return true;
  11291. }
  11292. return false;
  11293. }
  11294. bool
  11295. GlobOpt::TryOptConstFoldBrUnsignedLessThan(
  11296. IR::Instr *const instr,
  11297. const bool branchOnLessThan,
  11298. Value *const src1Value,
  11299. const int32 min1,
  11300. const int32 max1,
  11301. Value *const src2Value,
  11302. const int32 min2,
  11303. const int32 max2)
  11304. {
  11305. Assert(DoConstFold());
  11306. Assert(!IsLoopPrePass());
  11307. if(!src1Value ||
  11308. !src2Value ||
  11309. !(
  11310. DoAggressiveIntTypeSpec()
  11311. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  11312. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  11313. ))
  11314. {
  11315. return false;
  11316. }
  11317. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  11318. uint uMax1 = max((uint)min1, (uint)max1);
  11319. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  11320. uint uMax2 = max((uint)min2, (uint)max2);
  11321. if (uMax1 < uMin2)
  11322. {
  11323. // Range 1 is always lesser than Range 2
  11324. OptConstFoldBr(branchOnLessThan, instr, src1Value, src2Value);
  11325. return true;
  11326. }
  11327. if (uMin1 >= uMax2)
  11328. {
  11329. // Range 2 is always lesser than Range 1
  11330. OptConstFoldBr(!branchOnLessThan, instr, src1Value, src2Value);
  11331. return true;
  11332. }
  11333. return false;
  11334. }
  11335. bool
  11336. GlobOpt::TryOptConstFoldBrUnsignedGreaterThan(
  11337. IR::Instr *const instr,
  11338. const bool branchOnGreaterThan,
  11339. Value *const src1Value,
  11340. const int32 min1,
  11341. const int32 max1,
  11342. Value *const src2Value,
  11343. const int32 min2,
  11344. const int32 max2)
  11345. {
  11346. Assert(DoConstFold());
  11347. Assert(!IsLoopPrePass());
  11348. if(!src1Value ||
  11349. !src2Value ||
  11350. !(
  11351. DoAggressiveIntTypeSpec()
  11352. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  11353. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  11354. ))
  11355. {
  11356. return false;
  11357. }
  11358. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  11359. uint uMax1 = max((uint)min1, (uint)max1);
  11360. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  11361. uint uMax2 = max((uint)min2, (uint)max2);
  11362. if (uMin1 > uMax2)
  11363. {
  11364. // Range 1 is always greater than Range 2
  11365. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  11366. return true;
  11367. }
  11368. if (uMax1 <= uMin2)
  11369. {
  11370. // Range 2 is always greater than Range 1
  11371. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  11372. return true;
  11373. }
  11374. return false;
  11375. }
  11376. void
  11377. GlobOpt::SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info)
  11378. {
  11379. Assert(this->currentBlock->GetSuccList()->Count() == 2);
  11380. IR::Instr * fallthrough = this->currentBlock->GetNext()->GetFirstInstr();
  11381. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->currentBlock->GetSuccList())
  11382. {
  11383. if (conditionToBranch == (edge->GetSucc()->GetFirstInstr() != fallthrough))
  11384. {
  11385. edge->SetPathDependentInfo(info, alloc);
  11386. return;
  11387. }
  11388. }
  11389. NEXT_SLISTBASECOUNTED_ENTRY;
  11390. Assert(false);
  11391. }
  11392. PathDependentInfoToRestore
  11393. GlobOpt::UpdatePathDependentInfo(PathDependentInfo *const info)
  11394. {
  11395. Assert(info);
  11396. if(!info->HasInfo())
  11397. {
  11398. return PathDependentInfoToRestore();
  11399. }
  11400. decltype(&GlobOpt::UpdateIntBoundsForEqual) UpdateIntBoundsForLeftValue, UpdateIntBoundsForRightValue;
  11401. switch(info->Relationship())
  11402. {
  11403. case PathDependentRelationship::Equal:
  11404. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForEqual;
  11405. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForEqual;
  11406. break;
  11407. case PathDependentRelationship::NotEqual:
  11408. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  11409. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  11410. break;
  11411. case PathDependentRelationship::GreaterThanOrEqual:
  11412. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  11413. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  11414. break;
  11415. case PathDependentRelationship::GreaterThan:
  11416. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  11417. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThan;
  11418. break;
  11419. case PathDependentRelationship::LessThanOrEqual:
  11420. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  11421. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  11422. break;
  11423. case PathDependentRelationship::LessThan:
  11424. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThan;
  11425. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  11426. break;
  11427. default:
  11428. Assert(false);
  11429. __assume(false);
  11430. }
  11431. ValueInfo *leftValueInfo = info->LeftValue()->GetValueInfo();
  11432. IntConstantBounds leftConstantBounds;
  11433. AssertVerify(leftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  11434. ValueInfo *rightValueInfo;
  11435. IntConstantBounds rightConstantBounds;
  11436. if(info->RightValue())
  11437. {
  11438. rightValueInfo = info->RightValue()->GetValueInfo();
  11439. AssertVerify(rightValueInfo->TryGetIntConstantBounds(&rightConstantBounds, true));
  11440. }
  11441. else
  11442. {
  11443. rightValueInfo = nullptr;
  11444. rightConstantBounds = IntConstantBounds(info->RightConstantValue(), info->RightConstantValue());
  11445. }
  11446. ValueInfo *const newLeftValueInfo =
  11447. (this->*UpdateIntBoundsForLeftValue)(
  11448. info->LeftValue(),
  11449. leftConstantBounds,
  11450. info->RightValue(),
  11451. rightConstantBounds,
  11452. true);
  11453. if(newLeftValueInfo)
  11454. {
  11455. ChangeValueInfo(nullptr, info->LeftValue(), newLeftValueInfo);
  11456. AssertVerify(newLeftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  11457. }
  11458. else
  11459. {
  11460. leftValueInfo = nullptr;
  11461. }
  11462. ValueInfo *const newRightValueInfo =
  11463. (this->*UpdateIntBoundsForRightValue)(
  11464. info->RightValue(),
  11465. rightConstantBounds,
  11466. info->LeftValue(),
  11467. leftConstantBounds,
  11468. true);
  11469. if(newRightValueInfo)
  11470. {
  11471. ChangeValueInfo(nullptr, info->RightValue(), newRightValueInfo);
  11472. }
  11473. else
  11474. {
  11475. rightValueInfo = nullptr;
  11476. }
  11477. return PathDependentInfoToRestore(leftValueInfo, rightValueInfo);
  11478. }
  11479. void
  11480. GlobOpt::RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore)
  11481. {
  11482. Assert(info);
  11483. if(infoToRestore.LeftValueInfo())
  11484. {
  11485. Assert(info->LeftValue());
  11486. ChangeValueInfo(nullptr, info->LeftValue(), infoToRestore.LeftValueInfo());
  11487. }
  11488. if(infoToRestore.RightValueInfo())
  11489. {
  11490. Assert(info->RightValue());
  11491. ChangeValueInfo(nullptr, info->RightValue(), infoToRestore.RightValueInfo());
  11492. }
  11493. }
  11494. bool
  11495. GlobOpt::TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst /* = false */)
  11496. {
  11497. IR::Instr *&instr = *pInstr;
  11498. IR::Opnd *src1;
  11499. IR::Opnd *dst;
  11500. Js::OpCode opcode = instr->m_opcode;
  11501. Value *valueToTransfer = nullptr;
  11502. Assert(src1Val && src1Val->GetValueInfo()->IsLikelyNumber() || OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  11503. if (!this->DoFloatTypeSpec())
  11504. {
  11505. return false;
  11506. }
  11507. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  11508. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  11509. {
  11510. switch (opcode)
  11511. {
  11512. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  11513. skipDst = true;
  11514. // fall-through
  11515. case Js::OpCode::Ld_A:
  11516. case Js::OpCode::BrTrue_A:
  11517. case Js::OpCode::BrFalse_A:
  11518. if (instr->GetSrc1()->IsRegOpnd())
  11519. {
  11520. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11521. if (this->IsFloat64TypeSpecialized(sym, this->currentBlock) == false)
  11522. {
  11523. // Type specializing an Ld_A isn't worth it, unless the src
  11524. // is already type specialized
  11525. return false;
  11526. }
  11527. }
  11528. if (instr->m_opcode == Js::OpCode::Ld_A)
  11529. {
  11530. valueToTransfer = src1Val;
  11531. }
  11532. break;
  11533. case Js::OpCode::Neg_A:
  11534. break;
  11535. case Js::OpCode::Conv_Num:
  11536. Assert(src1Val);
  11537. opcode = Js::OpCode::Ld_A;
  11538. valueToTransfer = src1Val;
  11539. if (!src1Val->GetValueInfo()->IsNumber())
  11540. {
  11541. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  11542. valueToTransfer = NewGenericValue(ValueType::Float, instr->GetDst()->GetStackSym());
  11543. if (this->IsFloat64TypeSpecialized(sym, this->currentBlock) == false)
  11544. {
  11545. // Set the dst as a nonDeadStore. We want to keep the Ld_A to prevent the FromVar from
  11546. // being dead-stored, as it could cause implicit calls.
  11547. dst = instr->GetDst();
  11548. dst->AsRegOpnd()->m_dontDeadStore = true;
  11549. }
  11550. }
  11551. break;
  11552. case Js::OpCode::StElemI_A:
  11553. case Js::OpCode::StElemI_A_Strict:
  11554. case Js::OpCode::StElemC:
  11555. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  11556. default:
  11557. return false;
  11558. }
  11559. }
  11560. // Make sure the srcs are specialized
  11561. src1 = instr->GetSrc1();
  11562. // Use original val when calling toFloat64 as this is what we'll use to try hoisting the fromVar if we're in a loop.
  11563. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  11564. if (!skipDst)
  11565. {
  11566. dst = instr->GetDst();
  11567. if (dst)
  11568. {
  11569. this->TypeSpecializeFloatDst(instr, valueToTransfer, src1Val, nullptr, pDstVal);
  11570. if (!this->IsLoopPrePass())
  11571. {
  11572. instr->m_opcode = opcode;
  11573. }
  11574. }
  11575. }
  11576. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  11577. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11578. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  11579. {
  11580. Output::Print(_u("Type specialized to FLOAT: "));
  11581. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11582. }
  11583. #endif
  11584. return true;
  11585. }
  11586. // Unconditionally type-spec dst to float.
  11587. void
  11588. GlobOpt::TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal)
  11589. {
  11590. IR::Opnd* dst = instr->GetDst();
  11591. Assert(dst);
  11592. AssertMsg(dst->IsRegOpnd(), "What else?");
  11593. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11594. if(valToTransfer)
  11595. {
  11596. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  11597. InsertNewValue(*pDstVal, dst);
  11598. }
  11599. else
  11600. {
  11601. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Value, src2Value);
  11602. }
  11603. }
  11604. void
  11605. GlobOpt::TypeSpecializeSimd128Dst(IRType type, IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value **pDstVal)
  11606. {
  11607. IR::Opnd* dst = instr->GetDst();
  11608. Assert(dst);
  11609. AssertMsg(dst->IsRegOpnd(), "What else?");
  11610. this->ToSimd128Dst(type, instr, dst->AsRegOpnd(), this->currentBlock);
  11611. if (valToTransfer)
  11612. {
  11613. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  11614. InsertNewValue(*pDstVal, dst);
  11615. }
  11616. else
  11617. {
  11618. *pDstVal = NewGenericValue(GetValueTypeFromIRType(type), instr->GetDst());
  11619. }
  11620. }
  11621. bool
  11622. GlobOpt::TypeSpecializeLdLen(
  11623. IR::Instr * *const instrRef,
  11624. Value * *const src1ValueRef,
  11625. Value * *const dstValueRef,
  11626. bool *const forceInvariantHoistingRef)
  11627. {
  11628. Assert(instrRef);
  11629. IR::Instr *&instr = *instrRef;
  11630. Assert(instr);
  11631. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  11632. Assert(src1ValueRef);
  11633. Value *&src1Value = *src1ValueRef;
  11634. Assert(dstValueRef);
  11635. Value *&dstValue = *dstValueRef;
  11636. Assert(forceInvariantHoistingRef);
  11637. bool &forceInvariantHoisting = *forceInvariantHoistingRef;
  11638. if(!DoLdLenIntSpec(instr, instr->GetSrc1()->GetValueType()))
  11639. {
  11640. return false;
  11641. }
  11642. IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  11643. if(!IsLoopPrePass())
  11644. {
  11645. IR::RegOpnd *const baseOpnd = instr->GetSrc1()->AsRegOpnd();
  11646. if(baseOpnd->IsArrayRegOpnd())
  11647. {
  11648. StackSym *const lengthSym = baseOpnd->AsArrayRegOpnd()->LengthSym();
  11649. if(lengthSym)
  11650. {
  11651. CaptureByteCodeSymUses(instr);
  11652. instr->m_opcode = Js::OpCode::Ld_I4;
  11653. instr->ReplaceSrc1(IR::RegOpnd::New(lengthSym, lengthSym->GetType(), func));
  11654. instr->ClearBailOutInfo();
  11655. // Find the hoisted length value
  11656. Value *const lengthValue = FindValue(lengthSym);
  11657. Assert(lengthValue);
  11658. src1Value = lengthValue;
  11659. ValueInfo *const lengthValueInfo = lengthValue->GetValueInfo();
  11660. Assert(lengthValueInfo->GetSymStore() != lengthSym);
  11661. IntConstantBounds lengthConstantBounds;
  11662. AssertVerify(lengthValueInfo->TryGetIntConstantBounds(&lengthConstantBounds));
  11663. Assert(lengthConstantBounds.LowerBound() >= 0);
  11664. // Int-specialize, and transfer the value to the dst
  11665. TypeSpecializeIntDst(
  11666. instr,
  11667. Js::OpCode::LdLen_A,
  11668. src1Value,
  11669. src1Value,
  11670. nullptr,
  11671. bailOutKind,
  11672. lengthConstantBounds.LowerBound(),
  11673. lengthConstantBounds.UpperBound(),
  11674. &dstValue);
  11675. // Try to force hoisting the Ld_I4 so that the length will have an invariant sym store that can be
  11676. // copy-propped. Invariant hoisting does not automatically hoist Ld_I4.
  11677. forceInvariantHoisting = true;
  11678. return true;
  11679. }
  11680. }
  11681. if (instr->HasBailOutInfo())
  11682. {
  11683. Assert(instr->GetBailOutKind() == IR::BailOutMarkTempObject);
  11684. bailOutKind = IR::BailOutOnIrregularLength | IR::BailOutMarkTempObject;
  11685. instr->SetBailOutKind(bailOutKind);
  11686. }
  11687. else
  11688. {
  11689. Assert(bailOutKind == IR::BailOutOnIrregularLength);
  11690. GenerateBailAtOperation(&instr, bailOutKind);
  11691. }
  11692. }
  11693. TypeSpecializeIntDst(
  11694. instr,
  11695. Js::OpCode::LdLen_A,
  11696. nullptr,
  11697. nullptr,
  11698. nullptr,
  11699. bailOutKind,
  11700. 0,
  11701. INT32_MAX,
  11702. &dstValue);
  11703. return true;
  11704. }
  11705. bool
  11706. GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal)
  11707. {
  11708. IR::Opnd *src1;
  11709. IR::Opnd *src2;
  11710. IR::Opnd *dst;
  11711. bool allowUndefinedOrNullSrc1 = true;
  11712. bool allowUndefinedOrNullSrc2 = true;
  11713. bool skipSrc1 = false;
  11714. bool skipSrc2 = false;
  11715. bool skipDst = false;
  11716. if (!this->DoFloatTypeSpec())
  11717. {
  11718. return false;
  11719. }
  11720. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  11721. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  11722. {
  11723. switch (instr->m_opcode)
  11724. {
  11725. case Js::OpCode::Sub_A:
  11726. case Js::OpCode::Mul_A:
  11727. case Js::OpCode::Div_A:
  11728. case Js::OpCode::Expo_A:
  11729. // Avoid if one source is known not to be a number.
  11730. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11731. {
  11732. return false;
  11733. }
  11734. break;
  11735. case Js::OpCode::BrSrEq_A:
  11736. case Js::OpCode::BrSrNeq_A:
  11737. case Js::OpCode::BrEq_A:
  11738. case Js::OpCode::BrNeq_A:
  11739. case Js::OpCode::BrSrNotEq_A:
  11740. case Js::OpCode::BrNotEq_A:
  11741. case Js::OpCode::BrSrNotNeq_A:
  11742. case Js::OpCode::BrNotNeq_A:
  11743. // Avoid if one source is known not to be a number.
  11744. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11745. {
  11746. return false;
  11747. }
  11748. // Undef == Undef, but +Undef != +Undef
  11749. // 0.0 != null, but 0.0 == +null
  11750. //
  11751. // So Bailout on anything but numbers for both src1 and src2
  11752. allowUndefinedOrNullSrc1 = false;
  11753. allowUndefinedOrNullSrc2 = false;
  11754. break;
  11755. case Js::OpCode::BrGt_A:
  11756. case Js::OpCode::BrGe_A:
  11757. case Js::OpCode::BrLt_A:
  11758. case Js::OpCode::BrLe_A:
  11759. case Js::OpCode::BrNotGt_A:
  11760. case Js::OpCode::BrNotGe_A:
  11761. case Js::OpCode::BrNotLt_A:
  11762. case Js::OpCode::BrNotLe_A:
  11763. // Avoid if one source is known not to be a number.
  11764. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  11765. {
  11766. return false;
  11767. }
  11768. break;
  11769. case Js::OpCode::Add_A:
  11770. // For Add, we need both sources to be Numbers, otherwise it could be a string concat
  11771. if (!src1Val || !src2Val || !(src1Val->GetValueInfo()->IsLikelyNumber() && src2Val->GetValueInfo()->IsLikelyNumber()))
  11772. {
  11773. return false;
  11774. }
  11775. break;
  11776. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  11777. skipSrc2 = true;
  11778. skipDst = true;
  11779. break;
  11780. default:
  11781. return false;
  11782. }
  11783. }
  11784. else
  11785. {
  11786. switch (instr->m_opcode)
  11787. {
  11788. case Js::OpCode::InlineArrayPush:
  11789. bool isFloatConstMissingItem = src2Val->GetValueInfo()->IsFloatConstant();
  11790. if(isFloatConstMissingItem)
  11791. {
  11792. FloatConstType floatValue = src2Val->GetValueInfo()->AsFloatConstant()->FloatValue();
  11793. isFloatConstMissingItem = Js::SparseArraySegment<double>::IsMissingItem(&floatValue);
  11794. }
  11795. // Don't specialize if the element is not likelyNumber - we will surely bailout
  11796. if(!(src2Val->GetValueInfo()->IsLikelyNumber()) || isFloatConstMissingItem)
  11797. {
  11798. return false;
  11799. }
  11800. // Only specialize the Second source - element
  11801. skipSrc1 = true;
  11802. skipDst = true;
  11803. allowUndefinedOrNullSrc2 = false;
  11804. break;
  11805. }
  11806. }
  11807. // Make sure the srcs are specialized
  11808. if(!skipSrc1)
  11809. {
  11810. src1 = instr->GetSrc1();
  11811. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, (allowUndefinedOrNullSrc1 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  11812. }
  11813. if (!skipSrc2)
  11814. {
  11815. src2 = instr->GetSrc2();
  11816. this->ToFloat64(instr, src2, this->currentBlock, src2Val, nullptr, (allowUndefinedOrNullSrc2 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  11817. }
  11818. if (!skipDst)
  11819. {
  11820. dst = instr->GetDst();
  11821. if (dst)
  11822. {
  11823. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
  11824. AssertMsg(dst->IsRegOpnd(), "What else?");
  11825. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11826. }
  11827. }
  11828. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  11829. #if ENABLE_DEBUG_CONFIG_OPTIONS
  11830. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  11831. {
  11832. Output::Print(_u("Type specialized to FLOAT: "));
  11833. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  11834. }
  11835. #endif
  11836. return true;
  11837. }
  11838. bool
  11839. GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstVal)
  11840. {
  11841. IR::Instr *&instr = *pInstr;
  11842. IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  11843. ValueType baseValueType(baseOpnd->GetValueType());
  11844. if (instr->DoStackArgsOpt(this->func) ||
  11845. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  11846. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  11847. !(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
  11848. {
  11849. GOPT_TRACE_INSTR(instr, _u("Didn't type specialize array access, because typed array type specialization is disabled, or base is not an optimized typed array.\n"));
  11850. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11851. {
  11852. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11853. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11854. baseValueType.ToString(baseValueTypeStr);
  11855. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because %s.\n"),
  11856. this->func->GetJITFunctionBody()->GetDisplayName(),
  11857. this->func->GetDebugNumberSet(debugStringBuffer),
  11858. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11859. baseValueTypeStr,
  11860. instr->DoStackArgsOpt(this->func) ?
  11861. _u("instruction uses the arguments object") :
  11862. _u("typed array type specialization is disabled, or base is not an optimized typed array"));
  11863. Output::Flush();
  11864. }
  11865. return false;
  11866. }
  11867. Assert(instr->GetSrc1()->IsRegOpnd() || (src1Val && src1Val->GetValueInfo()->HasIntConstantValue()));
  11868. StackSym *sym = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd()->m_sym : nullptr;
  11869. // Only type specialize the source of store element if the source symbol is already type specialized to int or float.
  11870. if (sym)
  11871. {
  11872. if (baseValueType.IsLikelyNativeArray())
  11873. {
  11874. // Gently coerce these src's into native if it seems likely to work.
  11875. // Otherwise we can't use the fast path to store.
  11876. // But don't try to put a float-specialized number into an int array this way.
  11877. if (!(
  11878. this->IsInt32TypeSpecialized(sym, this->currentBlock) ||
  11879. (
  11880. src1Val &&
  11881. (
  11882. DoAggressiveIntTypeSpec()
  11883. ? src1Val->GetValueInfo()->IsLikelyInt()
  11884. : src1Val->GetValueInfo()->IsInt()
  11885. )
  11886. )
  11887. ))
  11888. {
  11889. if (!(
  11890. this->IsFloat64TypeSpecialized(sym, this->currentBlock) ||
  11891. (src1Val && src1Val->GetValueInfo()->IsLikelyNumber())
  11892. ) ||
  11893. baseValueType.HasIntElements())
  11894. {
  11895. return false;
  11896. }
  11897. }
  11898. }
  11899. else if (!this->IsInt32TypeSpecialized(sym, this->currentBlock) && !this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  11900. {
  11901. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because src is not type specialized.\n"));
  11902. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11903. {
  11904. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11905. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11906. baseValueType.ToString(baseValueTypeStr);
  11907. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because src is not specialized.\n"),
  11908. this->func->GetJITFunctionBody()->GetDisplayName(),
  11909. this->func->GetDebugNumberSet(debugStringBuffer),
  11910. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11911. baseValueTypeStr);
  11912. Output::Flush();
  11913. }
  11914. return false;
  11915. }
  11916. }
  11917. int32 src1IntConstantValue;
  11918. if(baseValueType.IsLikelyNativeIntArray() && src1Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue))
  11919. {
  11920. if(Js::SparseArraySegment<int32>::IsMissingItem(&src1IntConstantValue))
  11921. {
  11922. return false;
  11923. }
  11924. }
  11925. // Note: doing ToVarUses to make sure we do get the int32 version of the index before trying to access its value in
  11926. // ShouldExpectConventionalArrayIndexValue. Not sure why that never gave us a problem before.
  11927. Assert(instr->GetDst()->IsIndirOpnd());
  11928. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  11929. // Make sure we use the int32 version of the index operand symbol, if available. Otherwise, ensure the var symbol is live (by
  11930. // potentially inserting a ToVar).
  11931. this->ToVarUses(instr, dst, /* isDst = */ true, nullptr);
  11932. if (!ShouldExpectConventionalArrayIndexValue(dst))
  11933. {
  11934. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because index is negative or likely not int.\n"));
  11935. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  11936. {
  11937. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  11938. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  11939. baseValueType.ToString(baseValueTypeStr);
  11940. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because index is negative or likely not int.\n"),
  11941. this->func->GetJITFunctionBody()->GetDisplayName(),
  11942. this->func->GetDebugNumberSet(debugStringBuffer),
  11943. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  11944. baseValueTypeStr);
  11945. Output::Flush();
  11946. }
  11947. return false;
  11948. }
  11949. IRType toType = TyVar;
  11950. bool isLossyAllowed = true;
  11951. IR::BailOutKind arrayBailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  11952. switch(baseValueType.GetObjectType())
  11953. {
  11954. case ObjectType::Int8Array:
  11955. case ObjectType::Uint8Array:
  11956. case ObjectType::Int16Array:
  11957. case ObjectType::Uint16Array:
  11958. case ObjectType::Int32Array:
  11959. case ObjectType::Int8VirtualArray:
  11960. case ObjectType::Uint8VirtualArray:
  11961. case ObjectType::Int16VirtualArray:
  11962. case ObjectType::Uint16VirtualArray:
  11963. case ObjectType::Int32VirtualArray:
  11964. case ObjectType::Int8MixedArray:
  11965. case ObjectType::Uint8MixedArray:
  11966. case ObjectType::Int16MixedArray:
  11967. case ObjectType::Uint16MixedArray:
  11968. case ObjectType::Int32MixedArray:
  11969. Int32Array:
  11970. toType = TyInt32;
  11971. break;
  11972. case ObjectType::Uint32Array:
  11973. case ObjectType::Uint32VirtualArray:
  11974. case ObjectType::Uint32MixedArray:
  11975. // Uint32Arrays may store values that overflow int32. If the value being stored comes from a symbol that's
  11976. // already losslessly type specialized to int32, we'll use it. Otherwise, if we only have a float64 specialized
  11977. // value, we don't want to force bailout if it doesn't fit in int32. Instead, we'll emit conversion in the
  11978. // lowerer, and handle overflow, if necessary.
  11979. if (!sym || this->IsInt32TypeSpecialized(sym, this->currentBlock))
  11980. {
  11981. toType = TyInt32;
  11982. }
  11983. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  11984. {
  11985. toType = TyFloat64;
  11986. }
  11987. break;
  11988. case ObjectType::Float32Array:
  11989. case ObjectType::Float64Array:
  11990. case ObjectType::Float32VirtualArray:
  11991. case ObjectType::Float32MixedArray:
  11992. case ObjectType::Float64VirtualArray:
  11993. case ObjectType::Float64MixedArray:
  11994. Float64Array:
  11995. toType = TyFloat64;
  11996. break;
  11997. case ObjectType::Uint8ClampedArray:
  11998. case ObjectType::Uint8ClampedVirtualArray:
  11999. case ObjectType::Uint8ClampedMixedArray:
  12000. // Uint8ClampedArray requires rounding (as opposed to truncation) of floating point values. If source symbol is
  12001. // float type specialized, type specialize this instruction to float as well, and handle rounding in the
  12002. // lowerer.
  12003. if (!sym || this->IsInt32TypeSpecialized(sym, this->currentBlock))
  12004. {
  12005. toType = TyInt32;
  12006. isLossyAllowed = false;
  12007. }
  12008. else if (this->IsFloat64TypeSpecialized(sym, this->currentBlock))
  12009. {
  12010. toType = TyFloat64;
  12011. }
  12012. break;
  12013. default:
  12014. Assert(baseValueType.IsLikelyNativeArray());
  12015. isLossyAllowed = false;
  12016. arrayBailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  12017. if(baseValueType.HasIntElements())
  12018. {
  12019. goto Int32Array;
  12020. }
  12021. Assert(baseValueType.HasFloatElements());
  12022. goto Float64Array;
  12023. }
  12024. if (toType != TyVar)
  12025. {
  12026. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  12027. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  12028. {
  12029. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12030. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  12031. baseValueType.ToString(baseValueTypeStr);
  12032. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s.\n"),
  12033. this->func->GetJITFunctionBody()->GetDisplayName(),
  12034. this->func->GetDebugNumberSet(debugStringBuffer),
  12035. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  12036. baseValueTypeStr,
  12037. toType == TyInt32 ? _u("int32") : _u("float64"));
  12038. Output::Flush();
  12039. }
  12040. IR::BailOutKind bailOutKind = ((toType == TyInt32) ? IR::BailOutIntOnly : IR::BailOutNumberOnly);
  12041. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, toType, bailOutKind, /* lossy = */ isLossyAllowed);
  12042. if (!this->IsLoopPrePass())
  12043. {
  12044. bool bConvertToBailoutInstr = true;
  12045. // Definite StElemC doesn't need bailout, because it can't fail or cause conversion.
  12046. if (instr->m_opcode == Js::OpCode::StElemC && baseValueType.IsObject())
  12047. {
  12048. if (baseValueType.HasIntElements())
  12049. {
  12050. //Native int array requires a missing element check & bailout
  12051. int32 min = INT32_MIN;
  12052. int32 max = INT32_MAX;
  12053. if (src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, false))
  12054. {
  12055. bConvertToBailoutInstr = ((min <= Js::JavascriptNativeIntArray::MissingItem) && (max >= Js::JavascriptNativeIntArray::MissingItem));
  12056. }
  12057. }
  12058. else
  12059. {
  12060. bConvertToBailoutInstr = false;
  12061. }
  12062. }
  12063. if (bConvertToBailoutInstr)
  12064. {
  12065. if(instr->HasBailOutInfo())
  12066. {
  12067. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  12068. Assert(
  12069. (
  12070. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  12071. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  12072. ) &&
  12073. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  12074. if(arrayBailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  12075. {
  12076. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  12077. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  12078. // bails out for the right reason.
  12079. instr->SetBailOutKind(
  12080. arrayBailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  12081. }
  12082. else
  12083. {
  12084. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  12085. // calls to occur, so it must be merged in to eliminate generating the helper call.
  12086. Assert(arrayBailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  12087. instr->SetBailOutKind(oldBailOutKind | arrayBailOutKind);
  12088. }
  12089. }
  12090. else
  12091. {
  12092. GenerateBailAtOperation(&instr, arrayBailOutKind);
  12093. }
  12094. }
  12095. }
  12096. }
  12097. else
  12098. {
  12099. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because the source was not already specialized.\n"));
  12100. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  12101. {
  12102. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12103. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  12104. baseValueType.ToString(baseValueTypeStr);
  12105. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because of array type.\n"),
  12106. this->func->GetJITFunctionBody()->GetDisplayName(),
  12107. this->func->GetDebugNumberSet(debugStringBuffer),
  12108. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  12109. baseValueTypeStr);
  12110. Output::Flush();
  12111. }
  12112. }
  12113. return toType != TyVar;
  12114. }
  12115. IR::Instr *
  12116. GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
  12117. {
  12118. Sym *sym;
  12119. switch (opnd->GetKind())
  12120. {
  12121. case IR::OpndKindReg:
  12122. if (!isDst && !this->blockData.liveVarSyms->Test(opnd->AsRegOpnd()->m_sym->m_id))
  12123. {
  12124. instr = this->ToVar(instr, opnd->AsRegOpnd(), this->currentBlock, val, true);
  12125. }
  12126. break;
  12127. case IR::OpndKindSym:
  12128. sym = opnd->AsSymOpnd()->m_sym;
  12129. if (sym->IsPropertySym() && !this->blockData.liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id)
  12130. && sym->AsPropertySym()->m_stackSym->IsVar())
  12131. {
  12132. StackSym *propertyBase = sym->AsPropertySym()->m_stackSym;
  12133. IR::RegOpnd *newOpnd = IR::RegOpnd::New(propertyBase, TyVar, instr->m_func);
  12134. instr = this->ToVar(instr, newOpnd, this->currentBlock, this->FindValue(propertyBase), true);
  12135. }
  12136. break;
  12137. case IR::OpndKindIndir:
  12138. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  12139. if (!this->blockData.liveVarSyms->Test(baseOpnd->m_sym->m_id))
  12140. {
  12141. instr = this->ToVar(instr, baseOpnd, this->currentBlock, this->FindValue(baseOpnd->m_sym), true);
  12142. }
  12143. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  12144. if (indexOpnd && !indexOpnd->m_sym->IsTypeSpec())
  12145. {
  12146. if((indexOpnd->GetValueType().IsInt()
  12147. ? !IsTypeSpecPhaseOff(func)
  12148. : indexOpnd->GetValueType().IsLikelyInt() && DoAggressiveIntTypeSpec()) && !GetIsAsmJSFunc()) // typespec is disabled for asmjs
  12149. {
  12150. StackSym *const indexVarSym = indexOpnd->m_sym;
  12151. Value *const indexValue = FindValue(indexVarSym);
  12152. Assert(indexValue);
  12153. Assert(indexValue->GetValueInfo()->IsLikelyInt());
  12154. ToInt32(instr, indexOpnd, currentBlock, indexValue, opnd->AsIndirOpnd(), false);
  12155. Assert(indexValue->GetValueInfo()->IsInt());
  12156. if(!IsLoopPrePass())
  12157. {
  12158. indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  12159. if(indexOpnd)
  12160. {
  12161. Assert(indexOpnd->m_sym->IsTypeSpec());
  12162. IntConstantBounds indexConstantBounds;
  12163. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  12164. if(ValueInfo::IsGreaterThanOrEqualTo(
  12165. indexValue,
  12166. indexConstantBounds.LowerBound(),
  12167. indexConstantBounds.UpperBound(),
  12168. nullptr,
  12169. 0,
  12170. 0))
  12171. {
  12172. indexOpnd->SetType(TyUint32);
  12173. }
  12174. }
  12175. }
  12176. }
  12177. else if (!this->blockData.liveVarSyms->Test(indexOpnd->m_sym->m_id))
  12178. {
  12179. instr = this->ToVar(instr, indexOpnd, this->currentBlock, this->FindValue(indexOpnd->m_sym), true);
  12180. }
  12181. }
  12182. break;
  12183. }
  12184. return instr;
  12185. }
  12186. IR::Instr *
  12187. GlobOpt::ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *value, bool needsUpdate)
  12188. {
  12189. IR::Instr *newInstr;
  12190. StackSym *varSym = regOpnd->m_sym;
  12191. if (IsTypeSpecPhaseOff(this->func))
  12192. {
  12193. return instr;
  12194. }
  12195. if (this->IsLoopPrePass())
  12196. {
  12197. block->globOptData.liveVarSyms->Set(varSym->m_id);
  12198. return instr;
  12199. }
  12200. if (block->globOptData.liveVarSyms->Test(varSym->m_id))
  12201. {
  12202. // Already live, nothing to do
  12203. return instr;
  12204. }
  12205. if (!varSym->IsVar())
  12206. {
  12207. Assert(!varSym->IsTypeSpec());
  12208. // Leave non-vars alone.
  12209. return instr;
  12210. }
  12211. Assert(this->IsTypeSpecialized(varSym, block));
  12212. if (!value)
  12213. {
  12214. value = this->FindValue(block->globOptData.symToValueMap, varSym);
  12215. }
  12216. ValueInfo *valueInfo = value ? value->GetValueInfo() : nullptr;
  12217. if(valueInfo && valueInfo->IsInt())
  12218. {
  12219. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  12220. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  12221. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  12222. // lossy state.
  12223. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12224. }
  12225. IRType fromType;
  12226. StackSym *typeSpecSym;
  12227. if (block->globOptData.liveInt32Syms->Test(varSym->m_id) && !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id))
  12228. {
  12229. fromType = TyInt32;
  12230. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  12231. Assert(valueInfo);
  12232. Assert(valueInfo->IsInt());
  12233. }
  12234. else if (block->globOptData.liveFloat64Syms->Test(varSym->m_id))
  12235. {
  12236. fromType = TyFloat64;
  12237. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  12238. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any non-number
  12239. // value, even ones that have already been generated before. Float-specialized non-number values cannot be converted
  12240. // back to Var since they will not go back to the original non-number value. The dead-store pass will update the bailout
  12241. // kind on already-generated FromVars based on this bit.
  12242. typeSpecSym->m_requiresBailOnNotNumber = true;
  12243. // A previous float conversion may have used BailOutPrimitiveButString, which does not change the value type to say
  12244. // definitely float, since it can also be a non-string primitive. The convert back to Var though, will cause that
  12245. // bailout kind to be changed to BailOutNumberOnly in the dead-store phase, so from the point of the initial conversion
  12246. // to float, that the value is definitely number. Since we don't know where the FromVar is, change the value type here.
  12247. if(valueInfo)
  12248. {
  12249. if(!valueInfo->IsNumber())
  12250. {
  12251. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12252. ChangeValueInfo(block, value, valueInfo);
  12253. regOpnd->SetValueType(valueInfo->Type());
  12254. }
  12255. }
  12256. else
  12257. {
  12258. value = NewGenericValue(ValueType::Float);
  12259. valueInfo = value->GetValueInfo();
  12260. SetValue(&block->globOptData, value, varSym);
  12261. regOpnd->SetValueType(valueInfo->Type());
  12262. }
  12263. }
  12264. else
  12265. {
  12266. // SIMD_JS
  12267. Assert(IsLiveAsSimd128(varSym, &block->globOptData));
  12268. if (IsLiveAsSimd128F4(varSym, &block->globOptData))
  12269. {
  12270. fromType = TySimd128F4;
  12271. }
  12272. else
  12273. {
  12274. Assert(IsLiveAsSimd128I4(varSym, &block->globOptData));
  12275. fromType = TySimd128I4;
  12276. }
  12277. if (valueInfo)
  12278. {
  12279. if (fromType == TySimd128F4 && !valueInfo->Type().IsSimd128Float32x4())
  12280. {
  12281. valueInfo = valueInfo->SpecializeToSimd128F4(alloc);
  12282. ChangeValueInfo(block, value, valueInfo);
  12283. regOpnd->SetValueType(valueInfo->Type());
  12284. }
  12285. else if (fromType == TySimd128I4 && !valueInfo->Type().IsSimd128Int32x4())
  12286. {
  12287. if (!valueInfo->Type().IsSimd128Int32x4())
  12288. {
  12289. valueInfo = valueInfo->SpecializeToSimd128I4(alloc);
  12290. ChangeValueInfo(block, value, valueInfo);
  12291. regOpnd->SetValueType(valueInfo->Type());
  12292. }
  12293. }
  12294. }
  12295. else
  12296. {
  12297. ValueType valueType = fromType == TySimd128F4 ? ValueType::GetSimd128(ObjectType::Simd128Float32x4) : ValueType::GetSimd128(ObjectType::Simd128Int32x4);
  12298. value = NewGenericValue(valueType);
  12299. valueInfo = value->GetValueInfo();
  12300. SetValue(&block->globOptData, value, varSym);
  12301. regOpnd->SetValueType(valueInfo->Type());
  12302. }
  12303. ValueType valueType = valueInfo->Type();
  12304. // Should be definite if type-specialized
  12305. Assert(valueType.IsSimd128());
  12306. typeSpecSym = varSym->GetSimd128EquivSym(fromType, this->func);
  12307. }
  12308. Assert(valueInfo);
  12309. int32 intConstantValue;
  12310. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  12311. {
  12312. // Lower will tag or create a number directly
  12313. newInstr = IR::Instr::New(Js::OpCode::LdC_A_I4, regOpnd,
  12314. IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func), instr->m_func);
  12315. }
  12316. else
  12317. {
  12318. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, fromType, instr->m_func);
  12319. Js::OpCode opcode = Js::OpCode::ToVar;
  12320. regNew->SetIsJITOptimizedReg(true);
  12321. newInstr = IR::Instr::New(opcode, regOpnd, regNew, instr->m_func);
  12322. }
  12323. newInstr->SetByteCodeOffset(instr);
  12324. newInstr->GetDst()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  12325. ValueType valueType = valueInfo->Type();
  12326. if(fromType == TyInt32)
  12327. {
  12328. #if !INT32VAR // All 32-bit ints are taggable on 64-bit architectures
  12329. IntConstantBounds constantBounds;
  12330. AssertVerify(valueInfo->TryGetIntConstantBounds(&constantBounds));
  12331. if(constantBounds.IsTaggable())
  12332. #endif
  12333. {
  12334. // The value is within the taggable range, so set the opnd value types to TaggedInt to avoid the overflow check
  12335. valueType = ValueType::GetTaggedInt();
  12336. }
  12337. }
  12338. newInstr->GetDst()->SetValueType(valueType);
  12339. newInstr->GetSrc1()->SetValueType(valueType);
  12340. IR::Instr *insertAfterInstr = instr->m_prev;
  12341. if (instr == block->GetLastInstr() &&
  12342. (instr->IsBranchInstr() || instr->m_opcode == Js::OpCode::BailTarget))
  12343. {
  12344. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  12345. while(insertAfterInstr->m_opcode == Js::OpCode::ByteCodeUses)
  12346. {
  12347. insertAfterInstr = insertAfterInstr->m_prev;
  12348. }
  12349. }
  12350. block->InsertInstrAfter(newInstr, insertAfterInstr);
  12351. block->globOptData.liveVarSyms->Set(varSym->m_id);
  12352. GOPT_TRACE_OPND(regOpnd, _u("Converting to var\n"));
  12353. if (block->loop)
  12354. {
  12355. Assert(!this->IsLoopPrePass());
  12356. this->TryHoistInvariant(newInstr, block, value, value, nullptr, false);
  12357. }
  12358. if (needsUpdate)
  12359. {
  12360. // Make sure that the kill effect of the ToVar instruction is tracked and that the kill of a property
  12361. // type is reflected in the current instruction.
  12362. this->ProcessKills(newInstr);
  12363. this->ValueNumberObjectType(newInstr->GetDst(), newInstr);
  12364. if (instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  12365. {
  12366. // Reprocess the load source. We need to reset the PropertySymOpnd fields first.
  12367. IR::PropertySymOpnd *propertySymOpnd = instr->GetSrc1()->AsPropertySymOpnd();
  12368. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  12369. {
  12370. propertySymOpnd->SetTypeChecked(false);
  12371. propertySymOpnd->SetTypeAvailable(false);
  12372. propertySymOpnd->SetWriteGuardChecked(false);
  12373. }
  12374. this->FinishOptPropOp(instr, propertySymOpnd);
  12375. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  12376. }
  12377. }
  12378. return instr;
  12379. }
  12380. IR::Instr *
  12381. GlobOpt::ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy)
  12382. {
  12383. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyInt32, IR::BailOutIntOnly, lossy);
  12384. }
  12385. IR::Instr *
  12386. GlobOpt::ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind)
  12387. {
  12388. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyFloat64, bailOutKind);
  12389. }
  12390. IR::Instr *
  12391. GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  12392. {
  12393. Assert(bailOutKind != IR::BailOutInvalid);
  12394. IR::Instr *newInstr;
  12395. if (!val && opnd->IsRegOpnd())
  12396. {
  12397. val = this->FindValue(block->globOptData.symToValueMap, opnd->AsRegOpnd()->m_sym);
  12398. }
  12399. ValueInfo *valueInfo = val ? val->GetValueInfo() : nullptr;
  12400. bool needReplaceSrc = false;
  12401. bool updateBlockLastInstr = false;
  12402. if (instr)
  12403. {
  12404. needReplaceSrc = true;
  12405. if (!insertBeforeInstr)
  12406. {
  12407. insertBeforeInstr = instr;
  12408. }
  12409. }
  12410. else if (!insertBeforeInstr)
  12411. {
  12412. // Insert it at the end of the block
  12413. insertBeforeInstr = block->GetLastInstr();
  12414. if (insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  12415. {
  12416. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  12417. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  12418. {
  12419. insertBeforeInstr = insertBeforeInstr->m_prev;
  12420. }
  12421. }
  12422. else
  12423. {
  12424. insertBeforeInstr = insertBeforeInstr->m_next;
  12425. updateBlockLastInstr = true;
  12426. }
  12427. }
  12428. // Int constant values will be propagated into the instruction. For ArgOut_A_InlineBuiltIn, there's no benefit from
  12429. // const-propping, so those are excluded.
  12430. if (opnd->IsRegOpnd() &&
  12431. !(
  12432. valueInfo &&
  12433. (valueInfo->HasIntConstantValue() || valueInfo->IsFloatConstant()) &&
  12434. (!instr || instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  12435. ))
  12436. {
  12437. IR::RegOpnd *regSrc = opnd->AsRegOpnd();
  12438. StackSym *varSym = regSrc->m_sym;
  12439. Js::OpCode opcode = Js::OpCode::FromVar;
  12440. if (varSym->IsTypeSpec() || !block->globOptData.liveVarSyms->Test(varSym->m_id))
  12441. {
  12442. // Conversion between int32 and float64
  12443. if (varSym->IsTypeSpec())
  12444. {
  12445. varSym = varSym->GetVarEquivSym(this->func);
  12446. }
  12447. opcode = Js::OpCode::Conv_Prim;
  12448. }
  12449. Assert(block->globOptData.liveVarSyms->Test(varSym->m_id) || this->IsTypeSpecialized(varSym, block));
  12450. StackSym *typeSpecSym;
  12451. BOOL isLive;
  12452. BVSparse<JitArenaAllocator> *livenessBv;
  12453. if(valueInfo && valueInfo->IsInt())
  12454. {
  12455. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  12456. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  12457. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  12458. // lossy state.
  12459. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12460. }
  12461. if (toType == TyInt32)
  12462. {
  12463. // Need to determine whether the conversion is actually lossy or lossless. If the value is an int, then it's a
  12464. // lossless conversion despite the type of conversion requested. The liveness of the converted int32 sym needs to be
  12465. // set to reflect the actual type of conversion done. Also, a lossless conversion needs the value to determine
  12466. // whether the conversion may need to bail out.
  12467. Assert(valueInfo);
  12468. if(valueInfo->IsInt())
  12469. {
  12470. lossy = false;
  12471. }
  12472. else
  12473. {
  12474. Assert(IsLoopPrePass() || !IsInt32TypeSpecialized(varSym, block));
  12475. }
  12476. livenessBv = block->globOptData.liveInt32Syms;
  12477. isLive = livenessBv->Test(varSym->m_id) && (lossy || !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id));
  12478. if (this->IsLoopPrePass())
  12479. {
  12480. if(!isLive)
  12481. {
  12482. livenessBv->Set(varSym->m_id);
  12483. if(lossy)
  12484. {
  12485. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  12486. }
  12487. else
  12488. {
  12489. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  12490. }
  12491. }
  12492. if(!lossy)
  12493. {
  12494. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  12495. valueInfo = valueInfo->SpecializeToInt32(alloc);
  12496. ChangeValueInfo(nullptr, val, valueInfo);
  12497. if(needReplaceSrc)
  12498. {
  12499. opnd->SetValueType(valueInfo->Type());
  12500. }
  12501. }
  12502. return instr;
  12503. }
  12504. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  12505. if (!isLive)
  12506. {
  12507. if (!opnd->IsVar() ||
  12508. !block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  12509. (block->globOptData.liveFloat64Syms->Test(varSym->m_id) && valueInfo && valueInfo->IsLikelyFloat()))
  12510. {
  12511. Assert(block->globOptData.liveFloat64Syms->Test(varSym->m_id));
  12512. if(!lossy && !valueInfo->IsInt())
  12513. {
  12514. // Shouldn't try to do a lossless conversion from float64 to int32 when the value is not known to be an
  12515. // int. There are cases where we need more than two passes over loops to flush out all dependencies.
  12516. // It's possible for the loop prepass to think that a sym s1 remains an int because it acquires the
  12517. // value of another sym s2 that is an int in the prepass at that time. However, s2 can become a float
  12518. // later in the loop body, in which case s1 would become a float on the second iteration of the loop. By
  12519. // that time, we would have already committed to having s1 live as a lossless int on entry into the
  12520. // loop, and we end up having to compensate by doing a lossless conversion from float to int, which will
  12521. // need a bailout and will most likely bail out.
  12522. //
  12523. // If s2 becomes a var instead of a float, then the compensation is legal although not ideal. After
  12524. // enough bailouts, rejit would be triggered with aggressive int type spec turned off. For the
  12525. // float-to-int conversion though, there's no point in emitting a bailout because we already know that
  12526. // the value is a float and has high probability of bailing out (whereas a var has a chance to be a
  12527. // tagged int), and so currently lossless conversion from float to int with bailout is not supported.
  12528. //
  12529. // So, treating this case as a compile-time bailout. The exception will trigger the jit work item to be
  12530. // restarted with aggressive int type specialization disabled.
  12531. if(bailOutKind == IR::BailOutExpectingInteger)
  12532. {
  12533. Assert(IsSwitchOptEnabled());
  12534. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  12535. }
  12536. else
  12537. {
  12538. Assert(DoAggressiveIntTypeSpec());
  12539. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12540. {
  12541. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  12542. Output::Print(
  12543. _u("BailOut (compile-time): function: %s (%s) varSym: "),
  12544. this->func->GetJITFunctionBody()->GetDisplayName(),
  12545. this->func->GetDebugNumberSet(debugStringBuffer),
  12546. varSym->m_id);
  12547. #if DBG_DUMP
  12548. varSym->Dump();
  12549. #else
  12550. Output::Print(_u("s%u"), varSym->m_id);
  12551. #endif
  12552. if(varSym->HasByteCodeRegSlot())
  12553. {
  12554. Output::Print(_u(" byteCodeReg: R%u"), varSym->GetByteCodeRegSlot());
  12555. }
  12556. Output::Print(_u(" (lossless conversion from float64 to int32)\n"));
  12557. Output::Flush();
  12558. }
  12559. if(!DoAggressiveIntTypeSpec())
  12560. {
  12561. // Aggressive int type specialization is already off for some reason. Prevent trying to rejit again
  12562. // because it won't help and the same thing will happen again. Just abort jitting this function.
  12563. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  12564. {
  12565. Output::Print(_u(" Aborting JIT because AggressiveIntTypeSpec is already off\n"));
  12566. Output::Flush();
  12567. }
  12568. throw Js::OperationAbortedException();
  12569. }
  12570. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  12571. }
  12572. }
  12573. if(opnd->IsVar())
  12574. {
  12575. regSrc->SetType(TyFloat64);
  12576. regSrc->m_sym = varSym->GetFloat64EquivSym(this->func);
  12577. opcode = Js::OpCode::Conv_Prim;
  12578. }
  12579. else
  12580. {
  12581. Assert(regSrc->IsFloat64());
  12582. Assert(regSrc->m_sym->IsFloat64());
  12583. Assert(opcode == Js::OpCode::Conv_Prim);
  12584. }
  12585. }
  12586. }
  12587. GOPT_TRACE_OPND(regSrc, _u("Converting to int32\n"));
  12588. }
  12589. else if (toType == TyFloat64)
  12590. {
  12591. // float64
  12592. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  12593. if(!IsLoopPrePass() && typeSpecSym->m_requiresBailOnNotNumber && IsFloat64TypeSpecialized(varSym, block))
  12594. {
  12595. // This conversion is already protected by a BailOutNumberOnly bailout (or at least it will be after the
  12596. // dead-store phase). Since 'requiresBailOnNotNumber' is not flow-based, change the value to definitely float.
  12597. if(valueInfo)
  12598. {
  12599. if(!valueInfo->IsNumber())
  12600. {
  12601. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12602. ChangeValueInfo(block, val, valueInfo);
  12603. opnd->SetValueType(valueInfo->Type());
  12604. }
  12605. }
  12606. else
  12607. {
  12608. val = NewGenericValue(ValueType::Float);
  12609. valueInfo = val->GetValueInfo();
  12610. SetValue(&block->globOptData, val, varSym);
  12611. opnd->SetValueType(valueInfo->Type());
  12612. }
  12613. }
  12614. if(bailOutKind == IR::BailOutNumberOnly)
  12615. {
  12616. if(!IsLoopPrePass())
  12617. {
  12618. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any
  12619. // non-number value, even ones that have already been generated before. The dead-store pass will update the
  12620. // bailout kind on already-generated FromVars based on this bit.
  12621. typeSpecSym->m_requiresBailOnNotNumber = true;
  12622. }
  12623. }
  12624. else if(typeSpecSym->m_requiresBailOnNotNumber)
  12625. {
  12626. Assert(bailOutKind == IR::BailOutPrimitiveButString);
  12627. bailOutKind = IR::BailOutNumberOnly;
  12628. }
  12629. livenessBv = block->globOptData.liveFloat64Syms;
  12630. isLive = livenessBv->Test(varSym->m_id);
  12631. if (this->IsLoopPrePass())
  12632. {
  12633. if(!isLive)
  12634. {
  12635. livenessBv->Set(varSym->m_id);
  12636. }
  12637. if (this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  12638. {
  12639. this->prePassLoop->forceFloat64SymsOnEntry->Set(varSym->m_id);
  12640. }
  12641. else
  12642. {
  12643. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  12644. if (symStore && symStore != varSym
  12645. && this->OptIsInvariant(symStore, block, this->prePassLoop, this->FindValue(block->globOptData.symToValueMap, symStore), false, true))
  12646. {
  12647. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  12648. // outside the loop.
  12649. this->prePassLoop->forceFloat64SymsOnEntry->Set(symStore->m_id);
  12650. }
  12651. }
  12652. if(bailOutKind == IR::BailOutNumberOnly)
  12653. {
  12654. if(valueInfo)
  12655. {
  12656. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12657. ChangeValueInfo(block, val, valueInfo);
  12658. }
  12659. else
  12660. {
  12661. val = NewGenericValue(ValueType::Float);
  12662. valueInfo = val->GetValueInfo();
  12663. SetValue(&block->globOptData, val, varSym);
  12664. }
  12665. if(needReplaceSrc)
  12666. {
  12667. opnd->SetValueType(valueInfo->Type());
  12668. }
  12669. }
  12670. return instr;
  12671. }
  12672. if (!isLive && regSrc->IsVar())
  12673. {
  12674. if (!block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  12675. (
  12676. block->globOptData.liveInt32Syms->Test(varSym->m_id) &&
  12677. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  12678. valueInfo &&
  12679. valueInfo->IsLikelyInt()
  12680. ))
  12681. {
  12682. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  12683. Assert(!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)); // Shouldn't try to convert a lossy int32 to anything
  12684. regSrc->SetType(TyInt32);
  12685. regSrc->m_sym = varSym->GetInt32EquivSym(this->func);
  12686. opcode = Js::OpCode::Conv_Prim;
  12687. }
  12688. }
  12689. GOPT_TRACE_OPND(regSrc, _u("Converting to float64\n"));
  12690. }
  12691. else
  12692. {
  12693. // SIMD_JS
  12694. Assert(IRType_IsSimd128(toType));
  12695. // Get or create type-spec sym
  12696. typeSpecSym = varSym->GetSimd128EquivSym(toType, this->func);
  12697. if (!IsLoopPrePass() && IsSimd128TypeSpecialized(toType, varSym, block))
  12698. {
  12699. // Consider: Is this needed ? Shouldn't this have been done at previous FromVar since the simd128 sym is alive ?
  12700. if (valueInfo)
  12701. {
  12702. if (!valueInfo->IsSimd128(toType))
  12703. {
  12704. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12705. ChangeValueInfo(block, val, valueInfo);
  12706. opnd->SetValueType(valueInfo->Type());
  12707. }
  12708. }
  12709. else
  12710. {
  12711. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12712. valueInfo = val->GetValueInfo();
  12713. SetValue(&block->globOptData, val, varSym);
  12714. opnd->SetValueType(valueInfo->Type());
  12715. }
  12716. }
  12717. livenessBv = block->globOptData.GetSimd128LivenessBV(toType);
  12718. isLive = livenessBv->Test(varSym->m_id);
  12719. if (this->IsLoopPrePass())
  12720. {
  12721. // FromVar Hoisting
  12722. BVSparse<Memory::JitArenaAllocator> * forceSimd128SymsOnEntry;
  12723. forceSimd128SymsOnEntry = \
  12724. toType == TySimd128F4 ? this->prePassLoop->forceSimd128F4SymsOnEntry : this->prePassLoop->forceSimd128I4SymsOnEntry;
  12725. if (!isLive)
  12726. {
  12727. livenessBv->Set(varSym->m_id);
  12728. }
  12729. // Be aggressive with hoisting only if value is always initialized to SIMD type before entering loop.
  12730. // This reduces the chance that the FromVar gets executed while the specialized instruction in the loop is not. Leading to unnecessary excessive bailouts.
  12731. if (val && !val->GetValueInfo()->HasBeenUndefined() && !val->GetValueInfo()->HasBeenNull() &&
  12732. this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  12733. {
  12734. forceSimd128SymsOnEntry->Set(varSym->m_id);
  12735. }
  12736. else
  12737. {
  12738. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  12739. Value * value = symStore ? this->FindValue(block->globOptData.symToValueMap, symStore) : nullptr;
  12740. if (symStore && symStore != varSym
  12741. && value
  12742. && !value->GetValueInfo()->HasBeenUndefined() && !value->GetValueInfo()->HasBeenNull()
  12743. && this->OptIsInvariant(symStore, block, this->prePassLoop, value, true, true))
  12744. {
  12745. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  12746. // outside the loop.
  12747. forceSimd128SymsOnEntry->Set(symStore->m_id);
  12748. }
  12749. }
  12750. Assert(bailOutKind == IR::BailOutSimd128F4Only || bailOutKind == IR::BailOutSimd128I4Only);
  12751. // We are in loop prepass, we haven't propagated the value info to the src. Do it now.
  12752. if (valueInfo)
  12753. {
  12754. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12755. ChangeValueInfo(block, val, valueInfo);
  12756. }
  12757. else
  12758. {
  12759. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12760. valueInfo = val->GetValueInfo();
  12761. SetValue(&block->globOptData, val, varSym);
  12762. }
  12763. if (needReplaceSrc)
  12764. {
  12765. opnd->SetValueType(valueInfo->Type());
  12766. }
  12767. return instr;
  12768. }
  12769. GOPT_TRACE_OPND(regSrc, _u("Converting to Simd128\n"));
  12770. }
  12771. bool needLoad = false;
  12772. if (needReplaceSrc)
  12773. {
  12774. bool wasDead = regSrc->GetIsDead();
  12775. // needReplaceSrc means we are type specializing a use, and need to replace the src on the instr
  12776. if (!isLive)
  12777. {
  12778. needLoad = true;
  12779. // ReplaceSrc will delete it.
  12780. regSrc = regSrc->Copy(instr->m_func)->AsRegOpnd();
  12781. }
  12782. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  12783. if(valueInfo)
  12784. {
  12785. regNew->SetValueType(valueInfo->Type());
  12786. regNew->m_wasNegativeZeroPreventedByBailout = valueInfo->WasNegativeZeroPreventedByBailout();
  12787. }
  12788. regNew->SetIsDead(wasDead);
  12789. regNew->SetIsJITOptimizedReg(true);
  12790. this->CaptureByteCodeSymUses(instr);
  12791. if (indir == nullptr)
  12792. {
  12793. instr->ReplaceSrc(opnd, regNew);
  12794. }
  12795. else
  12796. {
  12797. indir->ReplaceIndexOpnd(regNew);
  12798. }
  12799. opnd = regNew;
  12800. if (!needLoad)
  12801. {
  12802. Assert(isLive);
  12803. return instr;
  12804. }
  12805. }
  12806. else
  12807. {
  12808. // We just need to insert a load of a type spec sym
  12809. if(isLive)
  12810. {
  12811. return instr;
  12812. }
  12813. // Insert it before the specified instruction
  12814. instr = insertBeforeInstr;
  12815. }
  12816. IR::RegOpnd *regDst = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  12817. bool isBailout = false;
  12818. bool isHoisted = false;
  12819. bool isInLandingPad = (block->next && !block->next->isDeleted && block->next->isLoopHeader);
  12820. if (isInLandingPad)
  12821. {
  12822. Loop *loop = block->next->loop;
  12823. Assert(loop && loop->landingPad == block);
  12824. Assert(loop->bailOutInfo);
  12825. }
  12826. if (opcode == Js::OpCode::FromVar)
  12827. {
  12828. if (toType == TyInt32)
  12829. {
  12830. Assert(valueInfo);
  12831. if (lossy)
  12832. {
  12833. if (!valueInfo->IsPrimitive() && !IsTypeSpecialized(varSym, block))
  12834. {
  12835. // Lossy conversions to int32 on non-primitive values may have implicit calls to toString or valueOf, which
  12836. // may be overridden to have a side effect. The side effect needs to happen every time the conversion is
  12837. // supposed to happen, so the resulting lossy int32 value cannot be reused. Bail out on implicit calls.
  12838. Assert(DoLossyIntTypeSpec());
  12839. bailOutKind = IR::BailOutOnNotPrimitive;
  12840. isBailout = true;
  12841. }
  12842. }
  12843. else if (!valueInfo->IsInt())
  12844. {
  12845. // The operand is likely an int (hence the request to convert to int), so bail out if it's not an int. Only
  12846. // bail out if a lossless conversion to int is requested. Lossy conversions to int such as in (a | 0) don't
  12847. // need to bail out.
  12848. if (bailOutKind == IR::BailOutExpectingInteger)
  12849. {
  12850. Assert(IsSwitchOptEnabled());
  12851. }
  12852. else
  12853. {
  12854. Assert(DoAggressiveIntTypeSpec());
  12855. }
  12856. isBailout = true;
  12857. }
  12858. }
  12859. else if (toType == TyFloat64 &&
  12860. (!valueInfo || !valueInfo->IsNumber()))
  12861. {
  12862. // Bailout if converting vars to float if we can't prove they are floats:
  12863. // x = str + float; -> need to bailout if str is a string
  12864. //
  12865. // x = obj * 0.1;
  12866. // y = obj * 0.2; -> if obj has valueof, we'll only call valueof once on the FromVar conversion...
  12867. Assert(bailOutKind != IR::BailOutInvalid);
  12868. isBailout = true;
  12869. }
  12870. else if (IRType_IsSimd128(toType) &&
  12871. (!valueInfo || !valueInfo->IsSimd128(toType)))
  12872. {
  12873. Assert(toType == TySimd128F4 && bailOutKind == IR::BailOutSimd128F4Only
  12874. || toType == TySimd128I4 && bailOutKind == IR::BailOutSimd128I4Only);
  12875. isBailout = true;
  12876. }
  12877. }
  12878. if (isBailout)
  12879. {
  12880. if (isInLandingPad)
  12881. {
  12882. Loop *loop = block->next->loop;
  12883. this->EnsureBailTarget(loop);
  12884. instr = loop->bailOutInfo->bailOutInstr;
  12885. updateBlockLastInstr = false;
  12886. newInstr = IR::BailOutInstr::New(opcode, bailOutKind, loop->bailOutInfo, instr->m_func);
  12887. newInstr->SetDst(regDst);
  12888. newInstr->SetSrc1(regSrc);
  12889. }
  12890. else
  12891. {
  12892. newInstr = IR::BailOutInstr::New(opcode, regDst, regSrc, bailOutKind, instr, instr->m_func);
  12893. }
  12894. }
  12895. else
  12896. {
  12897. newInstr = IR::Instr::New(opcode, regDst, regSrc, instr->m_func);
  12898. }
  12899. newInstr->SetByteCodeOffset(instr);
  12900. instr->InsertBefore(newInstr);
  12901. if (updateBlockLastInstr)
  12902. {
  12903. block->SetLastInstr(newInstr);
  12904. }
  12905. regDst->SetIsJITOptimizedReg(true);
  12906. newInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  12907. ValueInfo *const oldValueInfo = valueInfo;
  12908. if(valueInfo)
  12909. {
  12910. newInstr->GetSrc1()->SetValueType(valueInfo->Type());
  12911. }
  12912. if(isBailout)
  12913. {
  12914. Assert(opcode == Js::OpCode::FromVar);
  12915. if(toType == TyInt32)
  12916. {
  12917. Assert(valueInfo);
  12918. if(!lossy)
  12919. {
  12920. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  12921. valueInfo = valueInfo->SpecializeToInt32(alloc, isPerformingLoopBackEdgeCompensation);
  12922. ChangeValueInfo(nullptr, val, valueInfo);
  12923. int32 intConstantValue;
  12924. if(indir && needReplaceSrc && valueInfo->TryGetIntConstantValue(&intConstantValue))
  12925. {
  12926. // A likely-int value can have constant bounds due to conditional branches narrowing its range. Now that
  12927. // the sym has been proven to be an int, the likely-int value, after specialization, will be constant.
  12928. // Replace the index opnd in the indir with an offset.
  12929. Assert(opnd == indir->GetIndexOpnd());
  12930. Assert(indir->GetScale() == 0);
  12931. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  12932. opnd = nullptr;
  12933. indir->SetOffset(intConstantValue);
  12934. }
  12935. }
  12936. }
  12937. else if (toType == TyFloat64)
  12938. {
  12939. if(bailOutKind == IR::BailOutNumberOnly)
  12940. {
  12941. if(valueInfo)
  12942. {
  12943. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  12944. ChangeValueInfo(block, val, valueInfo);
  12945. }
  12946. else
  12947. {
  12948. val = NewGenericValue(ValueType::Float);
  12949. valueInfo = val->GetValueInfo();
  12950. SetValue(&block->globOptData, val, varSym);
  12951. }
  12952. }
  12953. }
  12954. else
  12955. {
  12956. Assert(IRType_IsSimd128(toType));
  12957. if (valueInfo)
  12958. {
  12959. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  12960. ChangeValueInfo(block, val, valueInfo);
  12961. }
  12962. else
  12963. {
  12964. val = NewGenericValue(GetValueTypeFromIRType(toType));
  12965. valueInfo = val->GetValueInfo();
  12966. SetValue(&block->globOptData, val, varSym);
  12967. }
  12968. }
  12969. }
  12970. if(valueInfo)
  12971. {
  12972. newInstr->GetDst()->SetValueType(valueInfo->Type());
  12973. if(needReplaceSrc && opnd)
  12974. {
  12975. opnd->SetValueType(valueInfo->Type());
  12976. }
  12977. }
  12978. if (block->loop)
  12979. {
  12980. Assert(!this->IsLoopPrePass());
  12981. isHoisted = this->TryHoistInvariant(newInstr, block, val, val, nullptr, false, lossy, false, bailOutKind);
  12982. }
  12983. if (isBailout)
  12984. {
  12985. if (!isHoisted && !isInLandingPad)
  12986. {
  12987. if(valueInfo)
  12988. {
  12989. // Since this is a pre-op bailout, the old value info should be used for the purposes of bailout. For
  12990. // instance, the value info could be LikelyInt but with a constant range. Once specialized to int, the value
  12991. // info would be an int constant. However, the int constant is only guaranteed if the value is actually an
  12992. // int, which this conversion is verifying, so bailout cannot assume the constant value.
  12993. if(oldValueInfo)
  12994. {
  12995. val->SetValueInfo(oldValueInfo);
  12996. }
  12997. else
  12998. {
  12999. block->globOptData.symToValueMap->Clear(varSym->m_id);
  13000. }
  13001. }
  13002. // Fill in bail out info if the FromVar is a bailout instr, and it wasn't hoisted as invariant.
  13003. // If it was hoisted, the invariant code will fill out the bailout info with the loop landing pad bailout info.
  13004. this->FillBailOutInfo(block, newInstr->GetBailOutInfo());
  13005. if(valueInfo)
  13006. {
  13007. // Restore the new value info after filling the bailout info
  13008. if(oldValueInfo)
  13009. {
  13010. val->SetValueInfo(valueInfo);
  13011. }
  13012. else
  13013. {
  13014. SetValue(&block->globOptData, val, varSym);
  13015. }
  13016. }
  13017. }
  13018. }
  13019. // Now that we've captured the liveness in the bailout info, we can mark this as live.
  13020. // This type specialized sym isn't live if the FromVar bails out.
  13021. livenessBv->Set(varSym->m_id);
  13022. if(toType == TyInt32)
  13023. {
  13024. if(lossy)
  13025. {
  13026. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  13027. }
  13028. else
  13029. {
  13030. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13031. }
  13032. }
  13033. }
  13034. else
  13035. {
  13036. Assert(valueInfo);
  13037. if(opnd->IsRegOpnd() && valueInfo->IsInt())
  13038. {
  13039. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  13040. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  13041. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  13042. // lossy state.
  13043. block->globOptData.liveLossyInt32Syms->Clear(opnd->AsRegOpnd()->m_sym->m_id);
  13044. if(toType == TyInt32)
  13045. {
  13046. lossy = false;
  13047. }
  13048. }
  13049. if (this->IsLoopPrePass())
  13050. {
  13051. if(opnd->IsRegOpnd())
  13052. {
  13053. StackSym *const sym = opnd->AsRegOpnd()->m_sym;
  13054. if(toType == TyInt32)
  13055. {
  13056. Assert(!sym->IsTypeSpec());
  13057. block->globOptData.liveInt32Syms->Set(sym->m_id);
  13058. if(lossy)
  13059. {
  13060. block->globOptData.liveLossyInt32Syms->Set(sym->m_id);
  13061. }
  13062. else
  13063. {
  13064. block->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  13065. }
  13066. }
  13067. else
  13068. {
  13069. Assert(toType == TyFloat64);
  13070. AnalysisAssert(instr);
  13071. StackSym *const varSym = sym->IsTypeSpec() ? sym->GetVarEquivSym(instr->m_func) : sym;
  13072. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  13073. }
  13074. }
  13075. return instr;
  13076. }
  13077. if (!needReplaceSrc)
  13078. {
  13079. instr = insertBeforeInstr;
  13080. }
  13081. IR::Opnd *constOpnd;
  13082. int32 intConstantValue;
  13083. if(valueInfo->TryGetIntConstantValue(&intConstantValue))
  13084. {
  13085. if(toType == TyInt32)
  13086. {
  13087. constOpnd = IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func);
  13088. }
  13089. else
  13090. {
  13091. Assert(toType == TyFloat64);
  13092. constOpnd = IR::FloatConstOpnd::New(static_cast<FloatConstType>(intConstantValue), TyFloat64, instr->m_func);
  13093. }
  13094. }
  13095. else if(valueInfo->IsFloatConstant())
  13096. {
  13097. const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
  13098. if(toType == TyInt32)
  13099. {
  13100. Assert(lossy);
  13101. constOpnd =
  13102. IR::IntConstOpnd::New(
  13103. Js::JavascriptMath::ToInt32(floatValue),
  13104. TyInt32,
  13105. instr->m_func);
  13106. }
  13107. else
  13108. {
  13109. Assert(toType == TyFloat64);
  13110. constOpnd = IR::FloatConstOpnd::New(floatValue, TyFloat64, instr->m_func);
  13111. }
  13112. }
  13113. else
  13114. {
  13115. Assert(opnd->IsVar());
  13116. Assert(opnd->IsAddrOpnd());
  13117. AssertMsg(opnd->AsAddrOpnd()->IsVar(), "We only expect to see addr that are var before lower.");
  13118. // Don't need to capture uses, we are only replacing an addr opnd
  13119. if(toType == TyInt32)
  13120. {
  13121. constOpnd = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opnd->AsAddrOpnd()->m_address), TyInt32, instr->m_func);
  13122. }
  13123. else
  13124. {
  13125. Assert(toType == TyFloat64);
  13126. constOpnd = IR::FloatConstOpnd::New(Js::TaggedInt::ToDouble(opnd->AsAddrOpnd()->m_address), TyFloat64, instr->m_func);
  13127. }
  13128. }
  13129. if (toType == TyInt32)
  13130. {
  13131. if (needReplaceSrc)
  13132. {
  13133. CaptureByteCodeSymUses(instr);
  13134. if(indir)
  13135. {
  13136. Assert(opnd == indir->GetIndexOpnd());
  13137. Assert(indir->GetScale() == 0);
  13138. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  13139. indir->SetOffset(constOpnd->AsIntConstOpnd()->AsInt32());
  13140. }
  13141. else
  13142. {
  13143. instr->ReplaceSrc(opnd, constOpnd);
  13144. }
  13145. }
  13146. else
  13147. {
  13148. StackSym *varSym = opnd->AsRegOpnd()->m_sym;
  13149. if(varSym->IsTypeSpec())
  13150. {
  13151. varSym = varSym->GetVarEquivSym(nullptr);
  13152. Assert(varSym);
  13153. }
  13154. if(block->globOptData.liveInt32Syms->TestAndSet(varSym->m_id))
  13155. {
  13156. Assert(!!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) == lossy);
  13157. }
  13158. else
  13159. {
  13160. if(lossy)
  13161. {
  13162. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  13163. }
  13164. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  13165. IR::RegOpnd *int32Reg = IR::RegOpnd::New(int32Sym, TyInt32, instr->m_func);
  13166. int32Reg->SetIsJITOptimizedReg(true);
  13167. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, int32Reg, constOpnd, instr->m_func);
  13168. newInstr->SetByteCodeOffset(instr);
  13169. instr->InsertBefore(newInstr);
  13170. if (updateBlockLastInstr)
  13171. {
  13172. block->SetLastInstr(newInstr);
  13173. }
  13174. }
  13175. }
  13176. }
  13177. else
  13178. {
  13179. StackSym *floatSym;
  13180. bool newFloatSym = false;
  13181. StackSym* varSym;
  13182. if (opnd->IsRegOpnd())
  13183. {
  13184. varSym = opnd->AsRegOpnd()->m_sym;
  13185. if (varSym->IsTypeSpec())
  13186. {
  13187. varSym = varSym->GetVarEquivSym(nullptr);
  13188. Assert(varSym);
  13189. }
  13190. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  13191. }
  13192. else
  13193. {
  13194. varSym = GetCopyPropSym(block, nullptr, val);
  13195. // If there is no float 64 type specialized sym for this - create a new sym.
  13196. if(!varSym || !IsFloat64TypeSpecialized(varSym, block))
  13197. {
  13198. // Clear the symstore to ensure it's set below to this new symbol
  13199. this->SetSymStoreDirect(val->GetValueInfo(), nullptr);
  13200. varSym = StackSym::New(TyVar, instr->m_func);
  13201. newFloatSym = true;
  13202. }
  13203. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  13204. }
  13205. IR::RegOpnd *floatReg = IR::RegOpnd::New(floatSym, TyFloat64, instr->m_func);
  13206. floatReg->SetIsJITOptimizedReg(true);
  13207. // If the value is not live - let's load it.
  13208. if(!block->globOptData.liveFloat64Syms->TestAndSet(varSym->m_id))
  13209. {
  13210. newInstr = IR::Instr::New(Js::OpCode::LdC_F8_R8, floatReg, constOpnd, instr->m_func);
  13211. newInstr->SetByteCodeOffset(instr);
  13212. instr->InsertBefore(newInstr);
  13213. if (updateBlockLastInstr)
  13214. {
  13215. block->SetLastInstr(newInstr);
  13216. }
  13217. if(newFloatSym)
  13218. {
  13219. this->SetValue(&block->globOptData, val, varSym);
  13220. }
  13221. // Src is always invariant, but check if the dst is, and then hoist.
  13222. if (block->loop &&
  13223. (
  13224. (newFloatSym && block->loop->CanHoistInvariants()) ||
  13225. this->OptIsInvariant(floatReg, block, block->loop, val, false, false)
  13226. ))
  13227. {
  13228. Assert(!this->IsLoopPrePass());
  13229. this->OptHoistInvariant(newInstr, block, block->loop, val, val, false);
  13230. }
  13231. }
  13232. if (needReplaceSrc)
  13233. {
  13234. CaptureByteCodeSymUses(instr);
  13235. instr->ReplaceSrc(opnd, floatReg);
  13236. }
  13237. }
  13238. return instr;
  13239. }
  13240. return newInstr;
  13241. }
  13242. void
  13243. GlobOpt::ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block)
  13244. {
  13245. ToVarStackSym(dst->m_sym, block);
  13246. }
  13247. void
  13248. GlobOpt::ToVarStackSym(StackSym *varSym, BasicBlock *block)
  13249. {
  13250. //added another check for sym , in case of asmjs there is mostly no var syms and hence added a new check to see if it is the primary sym
  13251. Assert(!varSym->IsTypeSpec());
  13252. block->globOptData.liveVarSyms->Set(varSym->m_id);
  13253. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13254. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13255. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13256. // SIMD_JS
  13257. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13258. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13259. }
  13260. void
  13261. GlobOpt::ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13262. {
  13263. StackSym *varSym = dst->m_sym;
  13264. Assert(!varSym->IsTypeSpec());
  13265. if (!this->IsLoopPrePass() && varSym->IsVar())
  13266. {
  13267. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  13268. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13269. // since we'll just be hammering the symbol.
  13270. dst = instr->UnlinkDst()->AsRegOpnd();
  13271. dst->m_sym = int32Sym;
  13272. dst->SetType(TyInt32);
  13273. instr->SetDst(dst);
  13274. }
  13275. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  13276. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  13277. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13278. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13279. // SIMD_JS
  13280. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13281. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13282. }
  13283. void
  13284. GlobOpt::ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13285. {
  13286. // We should be calling only for asmjs function
  13287. Assert(GetIsAsmJSFunc());
  13288. StackSym *varSym = dst->m_sym;
  13289. Assert(!varSym->IsTypeSpec());
  13290. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  13291. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  13292. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13293. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13294. // SIMD_JS
  13295. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13296. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13297. }
  13298. void
  13299. GlobOpt::ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13300. {
  13301. StackSym *varSym = dst->m_sym;
  13302. Assert(!varSym->IsTypeSpec());
  13303. if (!this->IsLoopPrePass() && varSym->IsVar())
  13304. {
  13305. StackSym *float64Sym = varSym->GetFloat64EquivSym(this->func);
  13306. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13307. // since we'll just be hammering the symbol.
  13308. dst = instr->UnlinkDst()->AsRegOpnd();
  13309. dst->m_sym = float64Sym;
  13310. dst->SetType(TyFloat64);
  13311. instr->SetDst(dst);
  13312. }
  13313. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  13314. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13315. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13316. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13317. // SIMD_JS
  13318. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13319. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13320. }
  13321. // SIMD_JS
  13322. void
  13323. GlobOpt::ToSimd128Dst(IRType toType, IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  13324. {
  13325. StackSym *varSym = dst->m_sym;
  13326. Assert(!varSym->IsTypeSpec());
  13327. BVSparse<JitArenaAllocator> * livenessBV = block->globOptData.GetSimd128LivenessBV(toType);
  13328. Assert(livenessBV);
  13329. if (!this->IsLoopPrePass() && varSym->IsVar())
  13330. {
  13331. StackSym *simd128Sym = varSym->GetSimd128EquivSym(toType, this->func);
  13332. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  13333. // since we'll just be hammering the symbol.
  13334. dst = instr->UnlinkDst()->AsRegOpnd();
  13335. dst->m_sym = simd128Sym;
  13336. dst->SetType(toType);
  13337. instr->SetDst(dst);
  13338. }
  13339. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  13340. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  13341. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  13342. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  13343. // SIMD_JS
  13344. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  13345. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  13346. livenessBV->Set(varSym->m_id);
  13347. }
  13348. BOOL
  13349. GlobOpt::IsInt32TypeSpecialized(Sym *sym, BasicBlock *block)
  13350. {
  13351. return IsInt32TypeSpecialized(sym, &block->globOptData);
  13352. }
  13353. BOOL
  13354. GlobOpt::IsSwitchInt32TypeSpecialized(IR::Instr * instr, BasicBlock * block)
  13355. {
  13356. return IsSwitchOptEnabled(instr->m_func->GetTopFunc()) && instr->GetSrc1()->IsRegOpnd() &&
  13357. IsInt32TypeSpecialized(instr->GetSrc1()->AsRegOpnd()->m_sym, block);
  13358. }
  13359. BOOL
  13360. GlobOpt::IsInt32TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13361. {
  13362. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13363. return sym && data->liveInt32Syms->Test(sym->m_id) && !data->liveLossyInt32Syms->Test(sym->m_id);
  13364. }
  13365. BOOL
  13366. GlobOpt::IsFloat64TypeSpecialized(Sym *sym, BasicBlock *block)
  13367. {
  13368. return IsFloat64TypeSpecialized(sym, &block->globOptData);
  13369. }
  13370. BOOL
  13371. GlobOpt::IsFloat64TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13372. {
  13373. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13374. return sym && data->liveFloat64Syms->Test(sym->m_id);
  13375. }
  13376. // SIMD_JS
  13377. BOOL
  13378. GlobOpt::IsSimd128TypeSpecialized(Sym *sym, BasicBlock *block)
  13379. {
  13380. return IsSimd128TypeSpecialized(sym, &block->globOptData);
  13381. }
  13382. BOOL
  13383. GlobOpt::IsSimd128TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13384. {
  13385. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13386. return sym && (data->liveSimd128F4Syms->Test(sym->m_id) || data->liveSimd128I4Syms->Test(sym->m_id));
  13387. }
  13388. BOOL
  13389. GlobOpt::IsSimd128TypeSpecialized(IRType type, Sym *sym, BasicBlock *block)
  13390. {
  13391. return IsSimd128TypeSpecialized(type, sym, &block->globOptData);
  13392. }
  13393. BOOL
  13394. GlobOpt::IsSimd128TypeSpecialized(IRType type, Sym *sym, GlobOptBlockData *data)
  13395. {
  13396. switch (type)
  13397. {
  13398. case TySimd128F4:
  13399. return IsSimd128F4TypeSpecialized(sym, data);
  13400. case TySimd128I4:
  13401. return IsSimd128I4TypeSpecialized(sym, data);
  13402. default:
  13403. Assert(UNREACHED);
  13404. return false;
  13405. }
  13406. }
  13407. BOOL
  13408. GlobOpt::IsSimd128F4TypeSpecialized(Sym *sym, BasicBlock *block)
  13409. {
  13410. return IsSimd128F4TypeSpecialized(sym, &block->globOptData);
  13411. }
  13412. BOOL
  13413. GlobOpt::IsSimd128F4TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13414. {
  13415. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13416. return sym && (data->liveSimd128F4Syms->Test(sym->m_id));
  13417. }
  13418. BOOL
  13419. GlobOpt::IsSimd128I4TypeSpecialized(Sym *sym, BasicBlock *block)
  13420. {
  13421. return IsSimd128I4TypeSpecialized(sym, &block->globOptData);
  13422. }
  13423. BOOL
  13424. GlobOpt::IsSimd128I4TypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13425. {
  13426. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13427. return sym && (data->liveSimd128I4Syms->Test(sym->m_id));
  13428. }
  13429. BOOL
  13430. GlobOpt::IsLiveAsSimd128(Sym *sym, GlobOptBlockData *data)
  13431. {
  13432. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13433. return
  13434. sym &&
  13435. (
  13436. data->liveSimd128F4Syms->Test(sym->m_id) ||
  13437. data->liveSimd128I4Syms->Test(sym->m_id)
  13438. );
  13439. }
  13440. BOOL
  13441. GlobOpt::IsLiveAsSimd128F4(Sym *sym, GlobOptBlockData *data)
  13442. {
  13443. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13444. return sym && data->liveSimd128F4Syms->Test(sym->m_id);
  13445. }
  13446. BOOL
  13447. GlobOpt::IsLiveAsSimd128I4(Sym *sym, GlobOptBlockData *data)
  13448. {
  13449. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13450. return sym && data->liveSimd128I4Syms->Test(sym->m_id);
  13451. }
  13452. BOOL
  13453. GlobOpt::IsTypeSpecialized(Sym *sym, BasicBlock *block)
  13454. {
  13455. return IsTypeSpecialized(sym, &block->globOptData);
  13456. }
  13457. BOOL
  13458. GlobOpt::IsTypeSpecialized(Sym *sym, GlobOptBlockData *data)
  13459. {
  13460. return IsInt32TypeSpecialized(sym, data) || IsFloat64TypeSpecialized(sym, data) || IsSimd128TypeSpecialized(sym, data);
  13461. }
  13462. BOOL
  13463. GlobOpt::IsLive(Sym *sym, BasicBlock *block)
  13464. {
  13465. return IsLive(sym, &block->globOptData);
  13466. }
  13467. BOOL
  13468. GlobOpt::IsLive(Sym *sym, GlobOptBlockData *data)
  13469. {
  13470. sym = StackSym::GetVarEquivStackSym_NoCreate(sym);
  13471. return
  13472. sym &&
  13473. (
  13474. data->liveVarSyms->Test(sym->m_id) ||
  13475. data->liveInt32Syms->Test(sym->m_id) ||
  13476. data->liveFloat64Syms->Test(sym->m_id) ||
  13477. data->liveSimd128F4Syms->Test(sym->m_id) ||
  13478. data->liveSimd128I4Syms->Test(sym->m_id)
  13479. );
  13480. }
  13481. void
  13482. GlobOpt::MakeLive(StackSym *const sym, GlobOptBlockData *const blockData, const bool lossy) const
  13483. {
  13484. Assert(sym);
  13485. Assert(blockData);
  13486. if(sym->IsTypeSpec())
  13487. {
  13488. const SymID varSymId = sym->GetVarEquivSym(func)->m_id;
  13489. if(sym->IsInt32())
  13490. {
  13491. blockData->liveInt32Syms->Set(varSymId);
  13492. if(lossy)
  13493. {
  13494. blockData->liveLossyInt32Syms->Set(varSymId);
  13495. }
  13496. else
  13497. {
  13498. blockData->liveLossyInt32Syms->Clear(varSymId);
  13499. }
  13500. return;
  13501. }
  13502. if (sym->IsFloat64())
  13503. {
  13504. blockData->liveFloat64Syms->Set(varSymId);
  13505. return;
  13506. }
  13507. // SIMD_JS
  13508. if (sym->IsSimd128F4())
  13509. {
  13510. blockData->liveSimd128F4Syms->Set(varSymId);
  13511. return;
  13512. }
  13513. if (sym->IsSimd128I4())
  13514. {
  13515. blockData->liveSimd128I4Syms->Set(varSymId);
  13516. return;
  13517. }
  13518. }
  13519. blockData->liveVarSyms->Set(sym->m_id);
  13520. }
  13521. bool
  13522. GlobOpt::OptConstFoldBinary(
  13523. IR::Instr * *pInstr,
  13524. const IntConstantBounds &src1IntConstantBounds,
  13525. const IntConstantBounds &src2IntConstantBounds,
  13526. Value **pDstVal)
  13527. {
  13528. IR::Instr * &instr = *pInstr;
  13529. int32 value;
  13530. IR::IntConstOpnd *constOpnd;
  13531. if (!DoConstFold())
  13532. {
  13533. return false;
  13534. }
  13535. int32 src1IntConstantValue = -1;
  13536. int32 src2IntConstantValue = -1;
  13537. int32 src1MaxIntConstantValue = -1;
  13538. int32 src2MaxIntConstantValue = -1;
  13539. int32 src1MinIntConstantValue = -1;
  13540. int32 src2MinIntConstantValue = -1;
  13541. if (instr->IsBranchInstr())
  13542. {
  13543. src1MinIntConstantValue = src1IntConstantBounds.LowerBound();
  13544. src1MaxIntConstantValue = src1IntConstantBounds.UpperBound();
  13545. src2MinIntConstantValue = src2IntConstantBounds.LowerBound();
  13546. src2MaxIntConstantValue = src2IntConstantBounds.UpperBound();
  13547. }
  13548. else if (src1IntConstantBounds.IsConstant() && src2IntConstantBounds.IsConstant())
  13549. {
  13550. src1IntConstantValue = src1IntConstantBounds.LowerBound();
  13551. src2IntConstantValue = src2IntConstantBounds.LowerBound();
  13552. }
  13553. else
  13554. {
  13555. return false;
  13556. }
  13557. IntConstType tmpValueOut;
  13558. if (!instr->BinaryCalculator(src1IntConstantValue, src2IntConstantValue, &tmpValueOut)
  13559. || !Math::FitsInDWord(tmpValueOut))
  13560. {
  13561. return false;
  13562. }
  13563. value = (int32)tmpValueOut;
  13564. this->CaptureByteCodeSymUses(instr);
  13565. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  13566. instr->ReplaceSrc1(constOpnd);
  13567. instr->FreeSrc2();
  13568. this->OptSrc(constOpnd, &instr);
  13569. IR::Opnd *dst = instr->GetDst();
  13570. Assert(dst->IsRegOpnd());
  13571. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  13572. if (dstSym->IsSingleDef())
  13573. {
  13574. dstSym->SetIsIntConst(value);
  13575. }
  13576. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  13577. *pDstVal = GetIntConstantValue(value, instr, dst);
  13578. if (IsTypeSpecPhaseOff(this->func))
  13579. {
  13580. instr->m_opcode = Js::OpCode::LdC_A_I4;
  13581. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  13582. }
  13583. else
  13584. {
  13585. instr->m_opcode = Js::OpCode::Ld_I4;
  13586. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  13587. }
  13588. return true;
  13589. }
  13590. void
  13591. GlobOpt::OptConstFoldBr(bool test, IR::Instr *instr, Value * src1Val, Value * src2Val)
  13592. {
  13593. GOPT_TRACE_INSTR(instr, _u("Constant folding to branch: "));
  13594. BasicBlock *deadBlock;
  13595. if (src1Val)
  13596. {
  13597. this->ToInt32(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, false);
  13598. }
  13599. if (src2Val)
  13600. {
  13601. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, false);
  13602. }
  13603. this->CaptureByteCodeSymUses(instr);
  13604. if (test)
  13605. {
  13606. instr->m_opcode = Js::OpCode::Br;
  13607. instr->FreeSrc1();
  13608. if(instr->GetSrc2())
  13609. {
  13610. instr->FreeSrc2();
  13611. }
  13612. deadBlock = instr->m_next->AsLabelInstr()->GetBasicBlock();
  13613. }
  13614. else
  13615. {
  13616. AssertMsg(instr->m_next->IsLabelInstr(), "Next instr of branch should be a label...");
  13617. if(instr->AsBranchInstr()->IsMultiBranch())
  13618. {
  13619. return;
  13620. }
  13621. deadBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  13622. instr->FreeSrc1();
  13623. if(instr->GetSrc2())
  13624. {
  13625. instr->FreeSrc2();
  13626. }
  13627. instr->m_opcode = Js::OpCode::Nop;
  13628. }
  13629. // Loop back edge: we would have already decremented data use count for the tail block when we processed the loop header.
  13630. if (!(this->currentBlock->loop && this->currentBlock->loop->GetHeadBlock() == deadBlock))
  13631. {
  13632. this->currentBlock->DecrementDataUseCount();
  13633. }
  13634. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  13635. if (deadBlock->GetPredList()->Count() == 0)
  13636. {
  13637. deadBlock->SetDataUseCount(0);
  13638. }
  13639. }
  13640. void
  13641. GlobOpt::ChangeValueType(
  13642. BasicBlock *const block,
  13643. Value *const value,
  13644. const ValueType newValueType,
  13645. const bool preserveSubclassInfo,
  13646. const bool allowIncompatibleType) const
  13647. {
  13648. Assert(value);
  13649. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  13650. Assert(!value->GetValueInfo()->IsJsType());
  13651. ValueInfo *const valueInfo = value->GetValueInfo();
  13652. const ValueType valueType(valueInfo->Type());
  13653. if(valueType == newValueType && (preserveSubclassInfo || valueInfo->IsGeneric()))
  13654. {
  13655. return;
  13656. }
  13657. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  13658. Assert(
  13659. !preserveSubclassInfo ||
  13660. !valueInfo->IsArrayValueInfo() ||
  13661. newValueType.IsObject() && newValueType.GetObjectType() == valueInfo->GetObjectType());
  13662. ValueInfo *const newValueInfo =
  13663. preserveSubclassInfo
  13664. ? valueInfo->Copy(alloc)
  13665. : valueInfo->CopyWithGenericStructureKind(alloc);
  13666. newValueInfo->Type() = newValueType;
  13667. ChangeValueInfo(block, value, newValueInfo, allowIncompatibleType);
  13668. }
  13669. void
  13670. GlobOpt::ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType, const bool compensated) const
  13671. {
  13672. Assert(value);
  13673. Assert(newValueInfo);
  13674. // The value type must be changed to something more specific or something more generic. For instance, it would be changed to
  13675. // something more specific if the current value type is LikelyArray and checks have been done to ensure that it's an array,
  13676. // and it would be changed to something more generic if a call kills the Array value type and it must be treated as
  13677. // LikelyArray going forward.
  13678. // There are cases where we change the type because of different profile information, and because of rejit, these profile information
  13679. // may conflict. Need to allow incompatible type in those cause. However, the old type should be indefinite.
  13680. Assert((allowIncompatibleType && !value->GetValueInfo()->IsDefinite()) ||
  13681. AreValueInfosCompatible(newValueInfo, value->GetValueInfo()));
  13682. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  13683. Assert(
  13684. !value->GetValueInfo()->IsArrayValueInfo() ||
  13685. !newValueInfo->IsArrayValueInfo() ||
  13686. newValueInfo->GetObjectType() == value->GetValueInfo()->GetObjectType());
  13687. if(block)
  13688. {
  13689. TrackValueInfoChangeForKills(block, value, newValueInfo, compensated);
  13690. }
  13691. value->SetValueInfo(newValueInfo);
  13692. }
  13693. bool
  13694. GlobOpt::AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const
  13695. {
  13696. Assert(v0);
  13697. Assert(v1);
  13698. if(v0->IsUninitialized() || v1->IsUninitialized())
  13699. {
  13700. return true;
  13701. }
  13702. const bool doAggressiveIntTypeSpec = DoAggressiveIntTypeSpec();
  13703. if(doAggressiveIntTypeSpec && (v0->IsInt() || v1->IsInt()))
  13704. {
  13705. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of
  13706. // arbitrary types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop
  13707. // prepass
  13708. return true;
  13709. }
  13710. if ((v0->Type()).IsMixedTypedArrayPair(v1->Type()) || (v1->Type()).IsMixedTypedArrayPair(v0->Type()))
  13711. {
  13712. return true;
  13713. }
  13714. const bool doFloatTypeSpec = DoFloatTypeSpec();
  13715. if(doFloatTypeSpec && (v0->IsFloat() || v1->IsFloat()))
  13716. {
  13717. // Float specialization allows specializing values of arbitrary types, even values that are definitely not float
  13718. return true;
  13719. }
  13720. // SIMD_JS
  13721. if (SIMD128_TYPE_SPEC_FLAG && v0->Type().IsSimd128())
  13722. {
  13723. // We only type-spec Undefined values, Objects (possibly merged SIMD values), or actual SIMD values.
  13724. if (v1->Type().IsLikelyUndefined() || v1->Type().IsLikelyNull())
  13725. {
  13726. return true;
  13727. }
  13728. if (v1->Type().IsLikelyObject() && v1->Type().GetObjectType() == ObjectType::Object)
  13729. {
  13730. return true;
  13731. }
  13732. if (v1->Type().IsSimd128())
  13733. {
  13734. return v0->Type().GetObjectType() == v1->Type().GetObjectType();
  13735. }
  13736. }
  13737. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  13738. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  13739. const auto AreValueTypesCompatible = [=](const ValueType t0, const ValueType t1)
  13740. {
  13741. return
  13742. t0.IsSubsetOf(t1, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec) ||
  13743. t1.IsSubsetOf(t0, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec);
  13744. };
  13745. const ValueType t0(v0->Type().ToDefinite()), t1(v1->Type().ToDefinite());
  13746. if(t0.IsLikelyObject() && t1.IsLikelyObject())
  13747. {
  13748. // Check compatibility for the primitive portions and the object portions of the value types separately
  13749. if(AreValueTypesCompatible(t0.ToDefiniteObject(), t1.ToDefiniteObject()) &&
  13750. (
  13751. !t0.HasBeenPrimitive() ||
  13752. !t1.HasBeenPrimitive() ||
  13753. AreValueTypesCompatible(t0.ToDefinitePrimitiveSubset(), t1.ToDefinitePrimitiveSubset())
  13754. ))
  13755. {
  13756. return true;
  13757. }
  13758. }
  13759. else if(AreValueTypesCompatible(t0, t1))
  13760. {
  13761. return true;
  13762. }
  13763. const FloatConstantValueInfo *floatConstantValueInfo;
  13764. const ValueInfo *likelyIntValueinfo;
  13765. if(v0->IsFloatConstant() && v1->IsLikelyInt())
  13766. {
  13767. floatConstantValueInfo = v0->AsFloatConstant();
  13768. likelyIntValueinfo = v1;
  13769. }
  13770. else if(v0->IsLikelyInt() && v1->IsFloatConstant())
  13771. {
  13772. floatConstantValueInfo = v1->AsFloatConstant();
  13773. likelyIntValueinfo = v0;
  13774. }
  13775. else
  13776. {
  13777. return false;
  13778. }
  13779. // A float constant value with a value that is actually an int is a subset of a likely-int value.
  13780. // Ideally, we should create an int constant value for this up front, such that IsInt() also returns true. There
  13781. // were other issues with that, should see if that can be done.
  13782. int32 int32Value;
  13783. return
  13784. Js::JavascriptNumber::TryGetInt32Value(floatConstantValueInfo->FloatValue(), &int32Value) &&
  13785. (!likelyIntValueinfo->IsLikelyTaggedInt() || !Js::TaggedInt::IsOverflow(int32Value));
  13786. }
  13787. #if DBG
  13788. void
  13789. GlobOpt::VerifyArrayValueInfoForTracking(
  13790. const ValueInfo *const valueInfo,
  13791. const bool isJsArray,
  13792. const BasicBlock *const block,
  13793. const bool ignoreKnownImplicitCalls) const
  13794. {
  13795. Assert(valueInfo);
  13796. Assert(valueInfo->IsAnyOptimizedArray());
  13797. Assert(isJsArray == valueInfo->IsArrayOrObjectWithArray());
  13798. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13799. Assert(block);
  13800. Loop *implicitCallsLoop;
  13801. if(block->next && !block->next->isDeleted && block->next->isLoopHeader)
  13802. {
  13803. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  13804. // landing pad based on the loop for which this block is the landing pad.
  13805. implicitCallsLoop = block->next->loop;
  13806. Assert(implicitCallsLoop);
  13807. Assert(implicitCallsLoop->landingPad == block);
  13808. }
  13809. else
  13810. {
  13811. implicitCallsLoop = block->loop;
  13812. }
  13813. Assert(
  13814. !isJsArray ||
  13815. DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop) ||
  13816. (
  13817. ignoreKnownImplicitCalls &&
  13818. !(implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  13819. ));
  13820. Assert(!(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist()));
  13821. Assert(
  13822. !(
  13823. valueInfo->IsArrayValueInfo() &&
  13824. (
  13825. valueInfo->AsArrayValueInfo()->HeadSegmentSym() ||
  13826. valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  13827. ) &&
  13828. !DoArraySegmentHoist(valueInfo->Type())
  13829. ));
  13830. #if 0
  13831. // We can't assert here that there is only a head segment length sym if hoisting is allowed in the current block,
  13832. // because we may have propagated the sym forward out of a loop, and hoisting may be allowed inside but not
  13833. // outside the loop.
  13834. Assert(
  13835. isJsArray ||
  13836. !valueInfo->IsArrayValueInfo() ||
  13837. !valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym() ||
  13838. DoTypedArraySegmentLengthHoist(implicitCallsLoop) ||
  13839. ignoreKnownImplicitCalls ||
  13840. (implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  13841. );
  13842. #endif
  13843. Assert(
  13844. !(
  13845. isJsArray &&
  13846. valueInfo->IsArrayValueInfo() &&
  13847. valueInfo->AsArrayValueInfo()->LengthSym() &&
  13848. !DoArrayLengthHoist()
  13849. ));
  13850. }
  13851. #endif
  13852. void
  13853. GlobOpt::TrackNewValueForKills(Value *const value)
  13854. {
  13855. Assert(value);
  13856. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13857. {
  13858. return;
  13859. }
  13860. DoTrackNewValueForKills(value);
  13861. }
  13862. void
  13863. GlobOpt::DoTrackNewValueForKills(Value *const value)
  13864. {
  13865. Assert(value);
  13866. ValueInfo *const valueInfo = value->GetValueInfo();
  13867. Assert(valueInfo->IsAnyOptimizedArray());
  13868. Assert(!valueInfo->IsArrayValueInfo());
  13869. // The value and value info here are new, so it's okay to modify the value info in-place
  13870. Assert(!valueInfo->GetSymStore());
  13871. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13872. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13873. Loop *implicitCallsLoop;
  13874. if(currentBlock->next && !currentBlock->next->isDeleted && currentBlock->next->isLoopHeader)
  13875. {
  13876. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  13877. // landing pad based on the loop for which this block is the landing pad.
  13878. implicitCallsLoop = currentBlock->next->loop;
  13879. Assert(implicitCallsLoop);
  13880. Assert(implicitCallsLoop->landingPad == currentBlock);
  13881. }
  13882. else
  13883. {
  13884. implicitCallsLoop = currentBlock->loop;
  13885. }
  13886. if(isJsArray)
  13887. {
  13888. if(!DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop))
  13889. {
  13890. // Array opts are disabled for this value type, so treat it as an indefinite value type going forward
  13891. valueInfo->Type() = valueInfo->Type().ToLikely();
  13892. return;
  13893. }
  13894. if(valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  13895. {
  13896. valueInfo->Type() = valueInfo->Type().SetHasNoMissingValues(false);
  13897. }
  13898. }
  13899. #if DBG
  13900. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  13901. #endif
  13902. if(!isJsArray)
  13903. {
  13904. return;
  13905. }
  13906. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13907. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13908. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13909. // revert the value type to a likely version.
  13910. blockData.valuesToKillOnCalls->Add(value);
  13911. }
  13912. void
  13913. GlobOpt::TrackCopiedValueForKills(Value *const value)
  13914. {
  13915. Assert(value);
  13916. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13917. {
  13918. return;
  13919. }
  13920. DoTrackCopiedValueForKills(value);
  13921. }
  13922. void
  13923. GlobOpt::DoTrackCopiedValueForKills(Value *const value)
  13924. {
  13925. Assert(value);
  13926. ValueInfo *const valueInfo = value->GetValueInfo();
  13927. Assert(valueInfo->IsAnyOptimizedArray());
  13928. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13929. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13930. #if DBG
  13931. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  13932. #endif
  13933. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  13934. {
  13935. return;
  13936. }
  13937. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13938. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13939. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13940. // revert the value type to a likely version.
  13941. blockData.valuesToKillOnCalls->Add(value);
  13942. }
  13943. void
  13944. GlobOpt::TrackMergedValueForKills(
  13945. Value *const value,
  13946. GlobOptBlockData *const blockData,
  13947. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  13948. {
  13949. Assert(value);
  13950. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  13951. {
  13952. return;
  13953. }
  13954. DoTrackMergedValueForKills(value, blockData, mergedValueTypesTrackedForKills);
  13955. }
  13956. void
  13957. GlobOpt::DoTrackMergedValueForKills(
  13958. Value *const value,
  13959. GlobOptBlockData *const blockData,
  13960. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  13961. {
  13962. Assert(value);
  13963. Assert(blockData);
  13964. ValueInfo *valueInfo = value->GetValueInfo();
  13965. Assert(valueInfo->IsAnyOptimizedArray());
  13966. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  13967. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  13968. #if DBG
  13969. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock, true);
  13970. #endif
  13971. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  13972. {
  13973. return;
  13974. }
  13975. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  13976. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  13977. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  13978. // revert the value type to a likely version.
  13979. if(!mergedValueTypesTrackedForKills || !mergedValueTypesTrackedForKills->TestAndSet(value->GetValueNumber()))
  13980. {
  13981. blockData->valuesToKillOnCalls->Add(value);
  13982. }
  13983. }
  13984. void
  13985. GlobOpt::TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const
  13986. {
  13987. Assert(block);
  13988. Assert(value);
  13989. Assert(newValueInfo);
  13990. ValueInfo *const oldValueInfo = value->GetValueInfo();
  13991. #if DBG
  13992. if(oldValueInfo->IsAnyOptimizedArray())
  13993. {
  13994. VerifyArrayValueInfoForTracking(oldValueInfo, oldValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  13995. }
  13996. #endif
  13997. const bool trackOldValueInfo =
  13998. oldValueInfo->IsArrayOrObjectWithArray() ||
  13999. (
  14000. oldValueInfo->IsOptimizedTypedArray() &&
  14001. oldValueInfo->IsArrayValueInfo() &&
  14002. oldValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  14003. );
  14004. Assert(trackOldValueInfo == block->globOptData.valuesToKillOnCalls->ContainsKey(value));
  14005. #if DBG
  14006. if(newValueInfo->IsAnyOptimizedArray())
  14007. {
  14008. VerifyArrayValueInfoForTracking(newValueInfo, newValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  14009. }
  14010. #endif
  14011. const bool trackNewValueInfo =
  14012. newValueInfo->IsArrayOrObjectWithArray() ||
  14013. (
  14014. newValueInfo->IsOptimizedTypedArray() &&
  14015. newValueInfo->IsArrayValueInfo() &&
  14016. newValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  14017. );
  14018. if(trackOldValueInfo == trackNewValueInfo)
  14019. {
  14020. return;
  14021. }
  14022. if(trackNewValueInfo)
  14023. {
  14024. block->globOptData.valuesToKillOnCalls->Add(value);
  14025. }
  14026. else
  14027. {
  14028. block->globOptData.valuesToKillOnCalls->Remove(value);
  14029. }
  14030. }
  14031. void
  14032. GlobOpt::ProcessValueKills(IR::Instr *const instr)
  14033. {
  14034. Assert(instr);
  14035. ValueSet *const valuesToKillOnCalls = blockData.valuesToKillOnCalls;
  14036. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  14037. {
  14038. return;
  14039. }
  14040. const JsArrayKills kills = CheckJsArrayKills(instr);
  14041. Assert(!kills.KillsArrayHeadSegments() || kills.KillsArrayHeadSegmentLengths());
  14042. if(IsLoopPrePass())
  14043. {
  14044. rootLoopPrePass->jsArrayKills = rootLoopPrePass->jsArrayKills.Merge(kills);
  14045. Assert(
  14046. !rootLoopPrePass->parent ||
  14047. rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  14048. if(kills.KillsAllArrays())
  14049. {
  14050. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = false;
  14051. }
  14052. if(valuesToKillOnCalls->Count() == 0)
  14053. {
  14054. return;
  14055. }
  14056. }
  14057. if(kills.KillsAllArrays())
  14058. {
  14059. Assert(kills.KillsTypedArrayHeadSegmentLengths());
  14060. // - Calls need to kill the value types of values in the following list. For instance, calls can transform a JS array
  14061. // into an ES5 array, so any definitely-array value types need to be killed. Update the value types.
  14062. // - Calls also need to kill typed array head segment lengths. A typed array's array buffer may be transferred to a web
  14063. // worker, in which case the typed array's length is set to zero.
  14064. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14065. {
  14066. Value *const value = it.CurrentValue();
  14067. ValueInfo *const valueInfo = value->GetValueInfo();
  14068. Assert(
  14069. valueInfo->IsArrayOrObjectWithArray() ||
  14070. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14071. if(valueInfo->IsArrayOrObjectWithArray())
  14072. {
  14073. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14074. continue;
  14075. }
  14076. ChangeValueInfo(
  14077. nullptr,
  14078. value,
  14079. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14080. }
  14081. valuesToKillOnCalls->Clear();
  14082. return;
  14083. }
  14084. if(kills.KillsArraysWithNoMissingValues())
  14085. {
  14086. // Some operations may kill arrays with no missing values in unlikely circumstances. Convert their value types to likely
  14087. // versions so that the checks have to be redone.
  14088. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14089. {
  14090. Value *const value = it.CurrentValue();
  14091. ValueInfo *const valueInfo = value->GetValueInfo();
  14092. Assert(
  14093. valueInfo->IsArrayOrObjectWithArray() ||
  14094. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14095. if(!valueInfo->IsArrayOrObjectWithArray() || !valueInfo->HasNoMissingValues())
  14096. {
  14097. continue;
  14098. }
  14099. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14100. it.RemoveCurrent();
  14101. }
  14102. }
  14103. if(kills.KillsNativeArrays())
  14104. {
  14105. // Some operations may kill native arrays in (what should be) unlikely circumstances. Convert their value types to
  14106. // likely versions so that the checks have to be redone.
  14107. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14108. {
  14109. Value *const value = it.CurrentValue();
  14110. ValueInfo *const valueInfo = value->GetValueInfo();
  14111. Assert(
  14112. valueInfo->IsArrayOrObjectWithArray() ||
  14113. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14114. if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->HasVarElements())
  14115. {
  14116. continue;
  14117. }
  14118. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14119. it.RemoveCurrent();
  14120. }
  14121. }
  14122. const bool likelyKillsJsArraysWithNoMissingValues = IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr);
  14123. if(!kills.KillsArrayHeadSegmentLengths())
  14124. {
  14125. Assert(!kills.KillsArrayHeadSegments());
  14126. if(!likelyKillsJsArraysWithNoMissingValues && !kills.KillsArrayLengths())
  14127. {
  14128. return;
  14129. }
  14130. }
  14131. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14132. {
  14133. Value *const value = it.CurrentValue();
  14134. ValueInfo *valueInfo = value->GetValueInfo();
  14135. Assert(
  14136. valueInfo->IsArrayOrObjectWithArray() ||
  14137. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14138. if(!valueInfo->IsArrayOrObjectWithArray())
  14139. {
  14140. continue;
  14141. }
  14142. if(likelyKillsJsArraysWithNoMissingValues && valueInfo->HasNoMissingValues())
  14143. {
  14144. ChangeValueType(nullptr, value, valueInfo->Type().SetHasNoMissingValues(false), true);
  14145. valueInfo = value->GetValueInfo();
  14146. }
  14147. if(!valueInfo->IsArrayValueInfo())
  14148. {
  14149. continue;
  14150. }
  14151. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  14152. const bool removeHeadSegment = kills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  14153. const bool removeHeadSegmentLength = kills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  14154. const bool removeLength = kills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  14155. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  14156. {
  14157. ChangeValueInfo(
  14158. nullptr,
  14159. value,
  14160. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  14161. valueInfo = value->GetValueInfo();
  14162. }
  14163. }
  14164. }
  14165. void
  14166. GlobOpt::ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData)
  14167. {
  14168. Assert(block);
  14169. Assert(blockData);
  14170. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  14171. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  14172. {
  14173. return;
  14174. }
  14175. // If the current block or loop has implicit calls, kill all definitely-array value types, as using that info will cause
  14176. // implicit calls to be disabled, resulting in unnecessary bailouts
  14177. const bool killValuesOnImplicitCalls =
  14178. (block->loop ? !this->ImplicitCallFlagsAllowOpts(block->loop) : !this->ImplicitCallFlagsAllowOpts(func));
  14179. if (!killValuesOnImplicitCalls)
  14180. {
  14181. return;
  14182. }
  14183. if(IsLoopPrePass() && block->loop == rootLoopPrePass)
  14184. {
  14185. AnalysisAssert(rootLoopPrePass);
  14186. for (Loop * loop = rootLoopPrePass; loop != nullptr; loop = loop->parent)
  14187. {
  14188. loop->jsArrayKills.SetKillsAllArrays();
  14189. }
  14190. Assert(!rootLoopPrePass->parent || rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  14191. if(valuesToKillOnCalls->Count() == 0)
  14192. {
  14193. return;
  14194. }
  14195. }
  14196. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  14197. {
  14198. Value *const value = it.CurrentValue();
  14199. ValueInfo *const valueInfo = value->GetValueInfo();
  14200. Assert(
  14201. valueInfo->IsArrayOrObjectWithArray() ||
  14202. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14203. if(valueInfo->IsArrayOrObjectWithArray())
  14204. {
  14205. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14206. continue;
  14207. }
  14208. ChangeValueInfo(
  14209. nullptr,
  14210. value,
  14211. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14212. }
  14213. valuesToKillOnCalls->Clear();
  14214. }
  14215. void
  14216. GlobOpt::ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData)
  14217. {
  14218. Assert(block);
  14219. Assert(block->isLoopHeader);
  14220. Assert(blockData);
  14221. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  14222. if(valuesToKillOnCalls->Count() == 0)
  14223. {
  14224. return;
  14225. }
  14226. const JsArrayKills loopKills(block->loop->jsArrayKills);
  14227. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  14228. {
  14229. Value *const value = it.CurrentValue();
  14230. ValueInfo *valueInfo = value->GetValueInfo();
  14231. Assert(
  14232. valueInfo->IsArrayOrObjectWithArray() ||
  14233. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  14234. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  14235. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  14236. if(isJsArray ? loopKills.KillsValueType(valueInfo->Type()) : loopKills.KillsTypedArrayHeadSegmentLengths())
  14237. {
  14238. // Hoisting array checks and other related things for this type is disabled for the loop due to the kill, as
  14239. // compensation code is currently not added on back-edges. When merging values from a back-edge, the array value
  14240. // type cannot be definite, as that may require adding compensation code on the back-edge if the optimization pass
  14241. // chooses to not optimize the array.
  14242. if(isJsArray)
  14243. {
  14244. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  14245. }
  14246. else
  14247. {
  14248. ChangeValueInfo(
  14249. nullptr,
  14250. value,
  14251. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  14252. }
  14253. it.RemoveCurrent();
  14254. continue;
  14255. }
  14256. if(!isJsArray || !valueInfo->IsArrayValueInfo())
  14257. {
  14258. continue;
  14259. }
  14260. // Similarly, if the loop contains an operation that kills JS array segments, don't make the segment or other related
  14261. // syms available initially inside the loop
  14262. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  14263. const bool removeHeadSegment = loopKills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  14264. const bool removeHeadSegmentLength = loopKills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  14265. const bool removeLength = loopKills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  14266. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  14267. {
  14268. ChangeValueInfo(
  14269. nullptr,
  14270. value,
  14271. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  14272. valueInfo = value->GetValueInfo();
  14273. }
  14274. }
  14275. }
  14276. bool
  14277. GlobOpt::NeedBailOnImplicitCallForLiveValues(BasicBlock *const block, const bool isForwardPass) const
  14278. {
  14279. if(isForwardPass)
  14280. {
  14281. return block->globOptData.valuesToKillOnCalls->Count() != 0;
  14282. }
  14283. if(block->noImplicitCallUses->IsEmpty())
  14284. {
  14285. Assert(block->noImplicitCallNoMissingValuesUses->IsEmpty());
  14286. Assert(block->noImplicitCallNativeArrayUses->IsEmpty());
  14287. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty());
  14288. Assert(block->noImplicitCallArrayLengthSymUses->IsEmpty());
  14289. return false;
  14290. }
  14291. return true;
  14292. }
  14293. IR::Instr*
  14294. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func)
  14295. {
  14296. IR::Instr* instr = IR::Instr::New(Js::OpCode::BoundCheck, func);
  14297. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  14298. }
  14299. IR::Instr*
  14300. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func * func)
  14301. {
  14302. IR::Instr* instr = IR::BailOutInstr::New(Js::OpCode::BoundCheck, bailoutkind, bailoutInfo, func);
  14303. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  14304. }
  14305. IR::Instr*
  14306. GlobOpt::AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset)
  14307. {
  14308. instr->SetSrc1(lowerBound);
  14309. instr->SetSrc2(upperBound);
  14310. if (offset != 0)
  14311. {
  14312. instr->SetDst(IR::IntConstOpnd::New(offset, TyInt32, instr->m_func));
  14313. }
  14314. return instr;
  14315. }
  14316. void
  14317. GlobOpt::OptArraySrc(IR::Instr * *const instrRef)
  14318. {
  14319. Assert(instrRef);
  14320. IR::Instr *&instr = *instrRef;
  14321. Assert(instr);
  14322. IR::Instr *baseOwnerInstr;
  14323. IR::IndirOpnd *baseOwnerIndir;
  14324. IR::RegOpnd *baseOpnd;
  14325. bool isProfilableLdElem, isProfilableStElem;
  14326. bool isLoad, isStore;
  14327. bool needsHeadSegment, needsHeadSegmentLength, needsLength, needsBoundChecks;
  14328. switch(instr->m_opcode)
  14329. {
  14330. // SIMD_JS
  14331. case Js::OpCode::Simd128_LdArr_F4:
  14332. case Js::OpCode::Simd128_LdArr_I4:
  14333. // no type-spec for Asm.js
  14334. if (this->GetIsAsmJSFunc())
  14335. {
  14336. return;
  14337. }
  14338. // fall through
  14339. case Js::OpCode::LdElemI_A:
  14340. case Js::OpCode::LdMethodElem:
  14341. if(!instr->GetSrc1()->IsIndirOpnd())
  14342. {
  14343. return;
  14344. }
  14345. baseOwnerInstr = nullptr;
  14346. baseOwnerIndir = instr->GetSrc1()->AsIndirOpnd();
  14347. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  14348. isProfilableLdElem = instr->m_opcode == Js::OpCode::LdElemI_A; // LdMethodElem is currently not profiled
  14349. isProfilableLdElem |= Js::IsSimd128Load(instr->m_opcode);
  14350. needsBoundChecks = needsHeadSegmentLength = needsHeadSegment = isLoad = true;
  14351. needsLength = isStore = isProfilableStElem = false;
  14352. break;
  14353. // SIMD_JS
  14354. case Js::OpCode::Simd128_StArr_F4:
  14355. case Js::OpCode::Simd128_StArr_I4:
  14356. if (this->GetIsAsmJSFunc())
  14357. {
  14358. return;
  14359. }
  14360. // fall through
  14361. case Js::OpCode::StElemI_A:
  14362. case Js::OpCode::StElemI_A_Strict:
  14363. case Js::OpCode::StElemC:
  14364. if(!instr->GetDst()->IsIndirOpnd())
  14365. {
  14366. return;
  14367. }
  14368. baseOwnerInstr = nullptr;
  14369. baseOwnerIndir = instr->GetDst()->AsIndirOpnd();
  14370. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  14371. needsBoundChecks = isProfilableStElem = instr->m_opcode != Js::OpCode::StElemC;
  14372. isProfilableStElem |= Js::IsSimd128Store(instr->m_opcode);
  14373. needsHeadSegmentLength = needsHeadSegment = isStore = true;
  14374. needsLength = isLoad = isProfilableLdElem = false;
  14375. break;
  14376. case Js::OpCode::InlineArrayPush:
  14377. case Js::OpCode::InlineArrayPop:
  14378. {
  14379. baseOwnerInstr = instr;
  14380. baseOwnerIndir = nullptr;
  14381. IR::Opnd * thisOpnd = instr->GetSrc1();
  14382. // Return if it not a LikelyArray or Object with Array - No point in doing array check elimination.
  14383. if(!thisOpnd->IsRegOpnd() || !thisOpnd->GetValueType().IsLikelyArrayOrObjectWithArray())
  14384. {
  14385. return;
  14386. }
  14387. baseOpnd = thisOpnd->AsRegOpnd();
  14388. isLoad = instr->m_opcode == Js::OpCode::InlineArrayPop;
  14389. isStore = instr->m_opcode == Js::OpCode::InlineArrayPush;
  14390. needsLength = needsHeadSegmentLength = needsHeadSegment = true;
  14391. needsBoundChecks = isProfilableLdElem = isProfilableStElem = false;
  14392. break;
  14393. }
  14394. case Js::OpCode::LdLen_A:
  14395. if(!instr->GetSrc1()->IsRegOpnd())
  14396. {
  14397. return;
  14398. }
  14399. baseOwnerInstr = instr;
  14400. baseOwnerIndir = nullptr;
  14401. baseOpnd = instr->GetSrc1()->AsRegOpnd();
  14402. if(baseOpnd->GetValueType().IsLikelyObject() &&
  14403. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  14404. {
  14405. return;
  14406. }
  14407. needsLength = true;
  14408. needsBoundChecks =
  14409. needsHeadSegmentLength =
  14410. needsHeadSegment =
  14411. isStore =
  14412. isLoad =
  14413. isProfilableStElem =
  14414. isProfilableLdElem = false;
  14415. break;
  14416. default:
  14417. return;
  14418. }
  14419. Assert(!(baseOwnerInstr && baseOwnerIndir));
  14420. Assert(!needsHeadSegmentLength || needsHeadSegment);
  14421. if(baseOwnerIndir && !IsLoopPrePass())
  14422. {
  14423. // Since this happens before type specialization, make sure that any necessary conversions are done, and that the index
  14424. // is int-specialized if possible such that the const flags are correct.
  14425. ToVarUses(instr, baseOwnerIndir, baseOwnerIndir == instr->GetDst(), nullptr);
  14426. }
  14427. if(isProfilableStElem && !IsLoopPrePass())
  14428. {
  14429. // If the dead-store pass decides to add the bailout kind IR::BailOutInvalidatedArrayHeadSegment, and the fast path is
  14430. // generated, it may bail out before the operation is done, so this would need to be a pre-op bailout.
  14431. if(instr->HasBailOutInfo())
  14432. {
  14433. Assert(
  14434. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  14435. instr->GetBailOutInfo()->bailOutOffset <= instr->GetByteCodeOffset());
  14436. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  14437. Assert(
  14438. !(bailOutKind & ~IR::BailOutKindBits) ||
  14439. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  14440. if(!(bailOutKind & ~IR::BailOutKindBits))
  14441. {
  14442. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  14443. }
  14444. }
  14445. else
  14446. {
  14447. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  14448. }
  14449. }
  14450. Value *const baseValue = FindValue(baseOpnd->m_sym);
  14451. if(!baseValue)
  14452. {
  14453. return;
  14454. }
  14455. ValueInfo *baseValueInfo = baseValue->GetValueInfo();
  14456. ValueType baseValueType(baseValueInfo->Type());
  14457. baseOpnd->SetValueType(baseValueType);
  14458. if(!baseValueType.IsLikelyAnyOptimizedArray() ||
  14459. !DoArrayCheckHoist(baseValueType, currentBlock->loop, instr) ||
  14460. (baseOwnerIndir && !ShouldExpectConventionalArrayIndexValue(baseOwnerIndir)))
  14461. {
  14462. return;
  14463. }
  14464. const bool isLikelyJsArray = !baseValueType.IsLikelyTypedArray();
  14465. Assert(isLikelyJsArray == baseValueType.IsLikelyArrayOrObjectWithArray());
  14466. Assert(!isLikelyJsArray == baseValueType.IsLikelyOptimizedTypedArray());
  14467. if(!isLikelyJsArray && instr->m_opcode == Js::OpCode::LdMethodElem)
  14468. {
  14469. // Fast path is not generated in this case since the subsequent call will throw
  14470. return;
  14471. }
  14472. ValueType newBaseValueType(baseValueType.ToDefiniteObject());
  14473. if(isLikelyJsArray && newBaseValueType.HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  14474. {
  14475. newBaseValueType = newBaseValueType.SetHasNoMissingValues(false);
  14476. }
  14477. Assert((newBaseValueType == baseValueType) == baseValueType.IsObject());
  14478. ArrayValueInfo *baseArrayValueInfo = nullptr;
  14479. const auto UpdateValue = [&](StackSym *newHeadSegmentSym, StackSym *newHeadSegmentLengthSym, StackSym *newLengthSym)
  14480. {
  14481. Assert(baseValueType.GetObjectType() == newBaseValueType.GetObjectType());
  14482. Assert(newBaseValueType.IsObject());
  14483. Assert(baseValueType.IsLikelyArray() || !newLengthSym);
  14484. if(!(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym))
  14485. {
  14486. // We're not adding new information to the value other than changing the value type. Preserve any existing
  14487. // information and just change the value type.
  14488. ChangeValueType(currentBlock, baseValue, newBaseValueType, true);
  14489. return;
  14490. }
  14491. // Merge the new syms into the value while preserving any existing information, and change the value type
  14492. if(baseArrayValueInfo)
  14493. {
  14494. if(!newHeadSegmentSym)
  14495. {
  14496. newHeadSegmentSym = baseArrayValueInfo->HeadSegmentSym();
  14497. }
  14498. if(!newHeadSegmentLengthSym)
  14499. {
  14500. newHeadSegmentLengthSym = baseArrayValueInfo->HeadSegmentLengthSym();
  14501. }
  14502. if(!newLengthSym)
  14503. {
  14504. newLengthSym = baseArrayValueInfo->LengthSym();
  14505. }
  14506. Assert(
  14507. !baseArrayValueInfo->HeadSegmentSym() ||
  14508. newHeadSegmentSym == baseArrayValueInfo->HeadSegmentSym());
  14509. Assert(
  14510. !baseArrayValueInfo->HeadSegmentLengthSym() ||
  14511. newHeadSegmentLengthSym == baseArrayValueInfo->HeadSegmentLengthSym());
  14512. Assert(!baseArrayValueInfo->LengthSym() || newLengthSym == baseArrayValueInfo->LengthSym());
  14513. }
  14514. ArrayValueInfo *const newBaseArrayValueInfo =
  14515. ArrayValueInfo::New(
  14516. alloc,
  14517. newBaseValueType,
  14518. newHeadSegmentSym,
  14519. newHeadSegmentLengthSym,
  14520. newLengthSym,
  14521. baseValueInfo->GetSymStore());
  14522. ChangeValueInfo(currentBlock, baseValue, newBaseArrayValueInfo);
  14523. };
  14524. if(IsLoopPrePass())
  14525. {
  14526. if(newBaseValueType != baseValueType)
  14527. {
  14528. UpdateValue(nullptr, nullptr, nullptr);
  14529. }
  14530. // For javascript arrays and objects with javascript arrays:
  14531. // - Implicit calls need to be disabled and calls cannot be allowed in the loop since the array vtable may be changed
  14532. // into an ES5 array.
  14533. // For typed arrays:
  14534. // - A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the
  14535. // typed array's length is set to zero. Implicit calls need to be disabled if the typed array's head segment length
  14536. // is going to be loaded and used later.
  14537. // Since we don't know if the loop has kills after this instruction, the kill information may not be complete. If a kill
  14538. // is found later, this information will be updated to not require disabling implicit calls.
  14539. if(!(
  14540. isLikelyJsArray
  14541. ? rootLoopPrePass->jsArrayKills.KillsValueType(newBaseValueType)
  14542. : rootLoopPrePass->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  14543. ))
  14544. {
  14545. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  14546. }
  14547. return;
  14548. }
  14549. if(baseValueInfo->IsArrayValueInfo())
  14550. {
  14551. baseArrayValueInfo = baseValueInfo->AsArrayValueInfo();
  14552. }
  14553. const bool doArrayChecks = !baseValueType.IsObject();
  14554. const bool doArraySegmentHoist = DoArraySegmentHoist(baseValueType) && instr->m_opcode != Js::OpCode::StElemC;
  14555. const bool headSegmentIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym();
  14556. const bool doHeadSegmentLoad = doArraySegmentHoist && needsHeadSegment && !headSegmentIsAvailable;
  14557. const bool doArraySegmentLengthHoist =
  14558. doArraySegmentHoist && (isLikelyJsArray || DoTypedArraySegmentLengthHoist(currentBlock->loop));
  14559. const bool headSegmentLengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentLengthSym();
  14560. const bool doHeadSegmentLengthLoad =
  14561. doArraySegmentLengthHoist &&
  14562. (needsHeadSegmentLength || (!isLikelyJsArray && needsLength)) &&
  14563. !headSegmentLengthIsAvailable;
  14564. const bool lengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->LengthSym();
  14565. const bool doLengthLoad =
  14566. DoArrayLengthHoist() &&
  14567. needsLength &&
  14568. !lengthIsAvailable &&
  14569. baseValueType.IsLikelyArray() &&
  14570. DoLdLenIntSpec(instr->m_opcode == Js::OpCode::LdLen_A ? instr : nullptr, baseValueType);
  14571. StackSym *const newHeadSegmentSym = doHeadSegmentLoad ? StackSym::New(TyMachPtr, instr->m_func) : nullptr;
  14572. StackSym *const newHeadSegmentLengthSym = doHeadSegmentLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  14573. StackSym *const newLengthSym = doLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  14574. bool canBailOutOnArrayAccessHelperCall;
  14575. if (Js::IsSimd128LoadStore(instr->m_opcode))
  14576. {
  14577. // SIMD_JS
  14578. // simd load/store never call helper
  14579. canBailOutOnArrayAccessHelperCall = true;
  14580. }
  14581. else
  14582. {
  14583. canBailOutOnArrayAccessHelperCall = (isProfilableLdElem || isProfilableStElem) &&
  14584. DoEliminateArrayAccessHelperCall() &&
  14585. !(
  14586. instr->IsProfiledInstr() &&
  14587. (
  14588. isProfilableLdElem
  14589. ? instr->AsProfiledInstr()->u.ldElemInfo->LikelyNeedsHelperCall()
  14590. : instr->AsProfiledInstr()->u.stElemInfo->LikelyNeedsHelperCall()
  14591. )
  14592. );
  14593. }
  14594. bool doExtractBoundChecks = false, eliminatedLowerBoundCheck = false, eliminatedUpperBoundCheck = false;
  14595. StackSym *indexVarSym = nullptr;
  14596. Value *indexValue = nullptr;
  14597. IntConstantBounds indexConstantBounds;
  14598. Value *headSegmentLengthValue = nullptr;
  14599. IntConstantBounds headSegmentLengthConstantBounds;
  14600. if (baseValueType.IsLikelyOptimizedVirtualTypedArray() && !Js::IsSimd128LoadStore(instr->m_opcode) /*Always extract bounds for SIMD */)
  14601. {
  14602. if (isProfilableStElem ||
  14603. !instr->IsDstNotAlwaysConvertedToInt32() ||
  14604. ( (baseValueType.GetObjectType() == ObjectType::Float32VirtualArray ||
  14605. baseValueType.GetObjectType() == ObjectType::Float64VirtualArray) &&
  14606. !instr->IsDstNotAlwaysConvertedToNumber()
  14607. )
  14608. )
  14609. {
  14610. // Unless we're in asm.js (where it is guaranteed that virtual typed array accesses cannot read/write beyond 4GB),
  14611. // check the range of the index to make sure we won't access beyond the reserved memory beforing eliminating bounds
  14612. // checks in jitted code.
  14613. if (!GetIsAsmJSFunc())
  14614. {
  14615. IR::RegOpnd * idxOpnd = baseOwnerIndir->GetIndexOpnd();
  14616. if (idxOpnd)
  14617. {
  14618. StackSym * idxSym = idxOpnd->m_sym->IsTypeSpec() ? idxOpnd->m_sym->GetVarEquivSym(nullptr) : idxOpnd->m_sym;
  14619. Value * idxValue = FindValue(idxSym);
  14620. IntConstantBounds idxConstantBounds;
  14621. if (idxValue && idxValue->GetValueInfo()->TryGetIntConstantBounds(&idxConstantBounds))
  14622. {
  14623. BYTE indirScale = Lowerer::GetArrayIndirScale(baseValueType);
  14624. int32 upperBound = idxConstantBounds.UpperBound();
  14625. int32 lowerBound = idxConstantBounds.LowerBound();
  14626. if (lowerBound >= 0 && ((static_cast<uint64>(upperBound) << indirScale) < MAX_ASMJS_ARRAYBUFFER_LENGTH))
  14627. {
  14628. eliminatedLowerBoundCheck = true;
  14629. eliminatedUpperBoundCheck = true;
  14630. canBailOutOnArrayAccessHelperCall = false;
  14631. }
  14632. }
  14633. }
  14634. }
  14635. else
  14636. {
  14637. eliminatedLowerBoundCheck = true;
  14638. eliminatedUpperBoundCheck = true;
  14639. canBailOutOnArrayAccessHelperCall = false;
  14640. }
  14641. }
  14642. }
  14643. if(needsBoundChecks && DoBoundCheckElimination())
  14644. {
  14645. AnalysisAssert(baseOwnerIndir);
  14646. Assert(needsHeadSegmentLength);
  14647. // Bound checks can be separated from the instruction only if it can bail out instead of making a helper call when a
  14648. // bound check fails. And only if it would bail out, can we use a bound check to eliminate redundant bound checks later
  14649. // on that path.
  14650. doExtractBoundChecks = (headSegmentLengthIsAvailable || doHeadSegmentLengthLoad) && canBailOutOnArrayAccessHelperCall;
  14651. do
  14652. {
  14653. // Get the index value
  14654. IR::RegOpnd *const indexOpnd = baseOwnerIndir->GetIndexOpnd();
  14655. if(indexOpnd)
  14656. {
  14657. StackSym *const indexSym = indexOpnd->m_sym;
  14658. if(indexSym->IsTypeSpec())
  14659. {
  14660. Assert(indexSym->IsInt32());
  14661. indexVarSym = indexSym->GetVarEquivSym(nullptr);
  14662. Assert(indexVarSym);
  14663. indexValue = FindValue(indexVarSym);
  14664. Assert(indexValue);
  14665. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  14666. Assert(indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  14667. Assert(
  14668. (indexOpnd->GetType() == TyUint32) ==
  14669. ValueInfo::IsGreaterThanOrEqualTo(
  14670. indexValue,
  14671. indexConstantBounds.LowerBound(),
  14672. indexConstantBounds.UpperBound(),
  14673. nullptr,
  14674. 0,
  14675. 0));
  14676. if(indexOpnd->GetType() == TyUint32)
  14677. {
  14678. eliminatedLowerBoundCheck = true;
  14679. }
  14680. }
  14681. else
  14682. {
  14683. doExtractBoundChecks = false; // Bound check instruction operates only on int-specialized operands
  14684. indexValue = FindValue(indexSym);
  14685. if(!indexValue || !indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds))
  14686. {
  14687. break;
  14688. }
  14689. if(ValueInfo::IsGreaterThanOrEqualTo(
  14690. indexValue,
  14691. indexConstantBounds.LowerBound(),
  14692. indexConstantBounds.UpperBound(),
  14693. nullptr,
  14694. 0,
  14695. 0))
  14696. {
  14697. eliminatedLowerBoundCheck = true;
  14698. }
  14699. }
  14700. if(!eliminatedLowerBoundCheck &&
  14701. ValueInfo::IsLessThan(
  14702. indexValue,
  14703. indexConstantBounds.LowerBound(),
  14704. indexConstantBounds.UpperBound(),
  14705. nullptr,
  14706. 0,
  14707. 0))
  14708. {
  14709. eliminatedUpperBoundCheck = true;
  14710. doExtractBoundChecks = false;
  14711. break;
  14712. }
  14713. }
  14714. else
  14715. {
  14716. const int32 indexConstantValue = baseOwnerIndir->GetOffset();
  14717. if(indexConstantValue < 0)
  14718. {
  14719. eliminatedUpperBoundCheck = true;
  14720. doExtractBoundChecks = false;
  14721. break;
  14722. }
  14723. if(indexConstantValue == INT32_MAX)
  14724. {
  14725. eliminatedLowerBoundCheck = true;
  14726. doExtractBoundChecks = false;
  14727. break;
  14728. }
  14729. indexConstantBounds = IntConstantBounds(indexConstantValue, indexConstantValue);
  14730. eliminatedLowerBoundCheck = true;
  14731. }
  14732. if(!headSegmentLengthIsAvailable)
  14733. {
  14734. break;
  14735. }
  14736. headSegmentLengthValue = FindValue(baseArrayValueInfo->HeadSegmentLengthSym());
  14737. if(!headSegmentLengthValue)
  14738. {
  14739. if(doExtractBoundChecks)
  14740. {
  14741. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  14742. }
  14743. break;
  14744. }
  14745. AssertVerify(headSegmentLengthValue->GetValueInfo()->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  14746. if (ValueInfo::IsLessThanOrEqualTo(
  14747. indexValue,
  14748. indexConstantBounds.LowerBound(),
  14749. indexConstantBounds.UpperBound(),
  14750. headSegmentLengthValue,
  14751. headSegmentLengthConstantBounds.LowerBound(),
  14752. headSegmentLengthConstantBounds.UpperBound(),
  14753. GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1)
  14754. ))
  14755. {
  14756. eliminatedUpperBoundCheck = true;
  14757. if(eliminatedLowerBoundCheck)
  14758. {
  14759. doExtractBoundChecks = false;
  14760. }
  14761. }
  14762. } while(false);
  14763. }
  14764. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad || doExtractBoundChecks)
  14765. {
  14766. // Find the loops out of which array checks and head segment loads need to be hoisted
  14767. Loop *hoistChecksOutOfLoop = nullptr;
  14768. Loop *hoistHeadSegmentLoadOutOfLoop = nullptr;
  14769. Loop *hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  14770. Loop *hoistLengthLoadOutOfLoop = nullptr;
  14771. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  14772. {
  14773. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  14774. {
  14775. const JsArrayKills loopKills(loop->jsArrayKills);
  14776. Value *baseValueInLoopLandingPad;
  14777. if((isLikelyJsArray && loopKills.KillsValueType(newBaseValueType)) ||
  14778. !OptIsInvariant(baseOpnd->m_sym, currentBlock, loop, baseValue, true, true, &baseValueInLoopLandingPad) ||
  14779. !(doArrayChecks || baseValueInLoopLandingPad->GetValueInfo()->IsObject()))
  14780. {
  14781. break;
  14782. }
  14783. // The value types should be the same, except:
  14784. // - The value type in the landing pad is a type that can merge to a specific object type. Typically, these
  14785. // cases will use BailOnNoProfile, but that can be disabled due to excessive bailouts. Those value types
  14786. // merge aggressively to the other side's object type, so the value type may have started off as
  14787. // Uninitialized, [Likely]Undefined|Null, [Likely]UninitializedObject, etc., and changed in the loop to an
  14788. // array type during a prepass.
  14789. // - StElems in the loop can kill the no-missing-values info.
  14790. // - The native array type may be made more conservative based on profile data by an instruction in the loop.
  14791. Assert(
  14792. baseValueInLoopLandingPad->GetValueInfo()->CanMergeToSpecificObjectType() ||
  14793. baseValueInLoopLandingPad->GetValueInfo()->Type().SetCanBeTaggedValue(false) ==
  14794. baseValueType.SetCanBeTaggedValue(false) ||
  14795. baseValueInLoopLandingPad->GetValueInfo()->Type().SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ==
  14796. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ||
  14797. baseValueInLoopLandingPad->GetValueInfo()->Type().SetHasNoMissingValues(false).ToLikely().SetCanBeTaggedValue(false) ==
  14798. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ||
  14799. (
  14800. baseValueInLoopLandingPad->GetValueInfo()->Type().IsLikelyNativeArray() &&
  14801. baseValueInLoopLandingPad->GetValueInfo()->Type().Merge(baseValueType).SetHasNoMissingValues(false).SetCanBeTaggedValue(false) ==
  14802. baseValueType.SetHasNoMissingValues(false).SetCanBeTaggedValue(false)
  14803. ));
  14804. if(doArrayChecks)
  14805. {
  14806. hoistChecksOutOfLoop = loop;
  14807. }
  14808. if(isLikelyJsArray && loopKills.KillsArrayHeadSegments())
  14809. {
  14810. Assert(loopKills.KillsArrayHeadSegmentLengths());
  14811. if(!(doArrayChecks || doLengthLoad))
  14812. {
  14813. break;
  14814. }
  14815. }
  14816. else
  14817. {
  14818. if(doHeadSegmentLoad || headSegmentIsAvailable)
  14819. {
  14820. // If the head segment is already available, we may need to rehoist the value including other
  14821. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  14822. // the head segment length is not being loaded here.
  14823. hoistHeadSegmentLoadOutOfLoop = loop;
  14824. }
  14825. if(isLikelyJsArray
  14826. ? loopKills.KillsArrayHeadSegmentLengths()
  14827. : loopKills.KillsTypedArrayHeadSegmentLengths())
  14828. {
  14829. if(!(doArrayChecks || doHeadSegmentLoad || doLengthLoad))
  14830. {
  14831. break;
  14832. }
  14833. }
  14834. else if(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable)
  14835. {
  14836. // If the head segment length is already available, we may need to rehoist the value including other
  14837. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  14838. // the head segment length is not being loaded here.
  14839. hoistHeadSegmentLengthLoadOutOfLoop = loop;
  14840. }
  14841. }
  14842. if(isLikelyJsArray && loopKills.KillsArrayLengths())
  14843. {
  14844. if(!(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad))
  14845. {
  14846. break;
  14847. }
  14848. }
  14849. else if(doLengthLoad || lengthIsAvailable)
  14850. {
  14851. // If the length is already available, we may need to rehoist the value including other information. So,
  14852. // need to track the loop out of which the head segment length can be hoisted even if the length is not
  14853. // being loaded here.
  14854. hoistLengthLoadOutOfLoop = loop;
  14855. }
  14856. }
  14857. }
  14858. IR::Instr *insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  14859. const auto InsertInstrInLandingPad = [&](IR::Instr *const instr, Loop *const hoistOutOfLoop)
  14860. {
  14861. if(hoistOutOfLoop->bailOutInfo->bailOutInstr)
  14862. {
  14863. instr->SetByteCodeOffset(hoistOutOfLoop->bailOutInfo->bailOutInstr);
  14864. hoistOutOfLoop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  14865. }
  14866. else
  14867. {
  14868. instr->SetByteCodeOffset(hoistOutOfLoop->landingPad->GetLastInstr());
  14869. hoistOutOfLoop->landingPad->InsertAfter(instr);
  14870. }
  14871. };
  14872. BailOutInfo *shareableBailOutInfo = nullptr;
  14873. IR::Instr *shareableBailOutInfoOriginalOwner = nullptr;
  14874. const auto ShareBailOut = [&]()
  14875. {
  14876. Assert(shareableBailOutInfo);
  14877. if(shareableBailOutInfo->bailOutInstr != shareableBailOutInfoOriginalOwner)
  14878. {
  14879. return;
  14880. }
  14881. Assert(shareableBailOutInfoOriginalOwner->GetBailOutInfo() == shareableBailOutInfo);
  14882. IR::Instr *const sharedBailOut = shareableBailOutInfoOriginalOwner->ShareBailOut();
  14883. Assert(sharedBailOut->GetBailOutInfo() == shareableBailOutInfo);
  14884. shareableBailOutInfoOriginalOwner = nullptr;
  14885. sharedBailOut->Unlink();
  14886. insertBeforeInstr->InsertBefore(sharedBailOut);
  14887. insertBeforeInstr = sharedBailOut;
  14888. };
  14889. if(doArrayChecks)
  14890. {
  14891. TRACE_TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Separating array checks with bailout\n"));
  14892. IR::Instr *bailOnNotArray = IR::Instr::New(Js::OpCode::BailOnNotArray, instr->m_func);
  14893. bailOnNotArray->SetSrc1(baseOpnd);
  14894. bailOnNotArray->GetSrc1()->SetIsJITOptimizedReg(true);
  14895. const IR::BailOutKind bailOutKind =
  14896. newBaseValueType.IsLikelyNativeArray() ? IR::BailOutOnNotNativeArray : IR::BailOutOnNotArray;
  14897. if(hoistChecksOutOfLoop)
  14898. {
  14899. Assert(!(isLikelyJsArray && hoistChecksOutOfLoop->jsArrayKills.KillsValueType(newBaseValueType)));
  14900. TRACE_PHASE_INSTR(
  14901. Js::ArrayCheckHoistPhase,
  14902. instr,
  14903. _u("Hoisting array checks with bailout out of loop %u to landing pad block %u\n"),
  14904. hoistChecksOutOfLoop->GetLoopNumber(),
  14905. hoistChecksOutOfLoop->landingPad->GetBlockNum());
  14906. TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Hoisting array checks with bailout out of loop\n"));
  14907. Assert(hoistChecksOutOfLoop->bailOutInfo);
  14908. EnsureBailTarget(hoistChecksOutOfLoop);
  14909. InsertInstrInLandingPad(bailOnNotArray, hoistChecksOutOfLoop);
  14910. bailOnNotArray = bailOnNotArray->ConvertToBailOutInstr(hoistChecksOutOfLoop->bailOutInfo, bailOutKind);
  14911. }
  14912. else
  14913. {
  14914. bailOnNotArray->SetByteCodeOffset(instr);
  14915. insertBeforeInstr->InsertBefore(bailOnNotArray);
  14916. GenerateBailAtOperation(&bailOnNotArray, bailOutKind);
  14917. shareableBailOutInfo = bailOnNotArray->GetBailOutInfo();
  14918. shareableBailOutInfoOriginalOwner = bailOnNotArray;
  14919. }
  14920. baseValueType = newBaseValueType;
  14921. baseOpnd->SetValueType(newBaseValueType);
  14922. }
  14923. if(doLengthLoad)
  14924. {
  14925. Assert(baseValueType.IsArray());
  14926. Assert(newLengthSym);
  14927. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Separating array length load\n"));
  14928. // Create an initial value for the length
  14929. blockData.liveVarSyms->Set(newLengthSym->m_id);
  14930. Value *const lengthValue = NewIntRangeValue(0, INT32_MAX, false);
  14931. SetValue(&blockData, lengthValue, newLengthSym);
  14932. // SetValue above would have set the sym store to newLengthSym. This sym won't be used for copy-prop though, so
  14933. // remove it as the sym store.
  14934. this->SetSymStoreDirect(lengthValue->GetValueInfo(), nullptr);
  14935. // length = [array + offsetOf(length)]
  14936. IR::Instr *const loadLength =
  14937. IR::Instr::New(
  14938. Js::OpCode::LdIndir,
  14939. IR::RegOpnd::New(newLengthSym, newLengthSym->GetType(), instr->m_func),
  14940. IR::IndirOpnd::New(
  14941. baseOpnd,
  14942. Js::JavascriptArray::GetOffsetOfLength(),
  14943. newLengthSym->GetType(),
  14944. instr->m_func),
  14945. instr->m_func);
  14946. loadLength->GetDst()->SetIsJITOptimizedReg(true);
  14947. loadLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  14948. // BailOnNegative length (BailOutOnIrregularLength)
  14949. IR::Instr *bailOnIrregularLength = IR::Instr::New(Js::OpCode::BailOnNegative, instr->m_func);
  14950. bailOnIrregularLength->SetSrc1(loadLength->GetDst());
  14951. const IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  14952. if(hoistLengthLoadOutOfLoop)
  14953. {
  14954. Assert(!hoistLengthLoadOutOfLoop->jsArrayKills.KillsArrayLengths());
  14955. TRACE_PHASE_INSTR(
  14956. Js::Phase::ArrayLengthHoistPhase,
  14957. instr,
  14958. _u("Hoisting array length load out of loop %u to landing pad block %u\n"),
  14959. hoistLengthLoadOutOfLoop->GetLoopNumber(),
  14960. hoistLengthLoadOutOfLoop->landingPad->GetBlockNum());
  14961. TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Hoisting array length load out of loop\n"));
  14962. Assert(hoistLengthLoadOutOfLoop->bailOutInfo);
  14963. EnsureBailTarget(hoistLengthLoadOutOfLoop);
  14964. InsertInstrInLandingPad(loadLength, hoistLengthLoadOutOfLoop);
  14965. InsertInstrInLandingPad(bailOnIrregularLength, hoistLengthLoadOutOfLoop);
  14966. bailOnIrregularLength =
  14967. bailOnIrregularLength->ConvertToBailOutInstr(hoistLengthLoadOutOfLoop->bailOutInfo, bailOutKind);
  14968. // Hoist the length value
  14969. for(InvariantBlockBackwardIterator it(
  14970. this,
  14971. currentBlock,
  14972. hoistLengthLoadOutOfLoop->landingPad,
  14973. baseOpnd->m_sym,
  14974. baseValue->GetValueNumber());
  14975. it.IsValid();
  14976. it.MoveNext())
  14977. {
  14978. BasicBlock *const block = it.Block();
  14979. block->globOptData.liveVarSyms->Set(newLengthSym->m_id);
  14980. Assert(!FindValue(block->globOptData.symToValueMap, newLengthSym));
  14981. Value *const lengthValueCopy = CopyValue(lengthValue, lengthValue->GetValueNumber());
  14982. SetValue(&block->globOptData, lengthValueCopy, newLengthSym);
  14983. this->SetSymStoreDirect(lengthValueCopy->GetValueInfo(), nullptr);
  14984. }
  14985. }
  14986. else
  14987. {
  14988. loadLength->SetByteCodeOffset(instr);
  14989. insertBeforeInstr->InsertBefore(loadLength);
  14990. bailOnIrregularLength->SetByteCodeOffset(instr);
  14991. insertBeforeInstr->InsertBefore(bailOnIrregularLength);
  14992. if(shareableBailOutInfo)
  14993. {
  14994. ShareBailOut();
  14995. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(shareableBailOutInfo, bailOutKind);
  14996. }
  14997. else
  14998. {
  14999. GenerateBailAtOperation(&bailOnIrregularLength, bailOutKind);
  15000. shareableBailOutInfo = bailOnIrregularLength->GetBailOutInfo();
  15001. shareableBailOutInfoOriginalOwner = bailOnIrregularLength;
  15002. }
  15003. }
  15004. }
  15005. const auto InsertHeadSegmentLoad = [&]()
  15006. {
  15007. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment load\n"));
  15008. Assert(newHeadSegmentSym);
  15009. IR::RegOpnd *const headSegmentOpnd =
  15010. IR::RegOpnd::New(newHeadSegmentSym, newHeadSegmentSym->GetType(), instr->m_func);
  15011. headSegmentOpnd->SetIsJITOptimizedReg(true);
  15012. IR::RegOpnd *const jitOptimizedBaseOpnd = baseOpnd->Copy(instr->m_func)->AsRegOpnd();
  15013. jitOptimizedBaseOpnd->SetIsJITOptimizedReg(true);
  15014. IR::Instr *loadObjectArray;
  15015. if(baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  15016. {
  15017. loadObjectArray =
  15018. IR::Instr::New(
  15019. Js::OpCode::LdIndir,
  15020. headSegmentOpnd,
  15021. IR::IndirOpnd::New(
  15022. jitOptimizedBaseOpnd,
  15023. Js::DynamicObject::GetOffsetOfObjectArray(),
  15024. jitOptimizedBaseOpnd->GetType(),
  15025. instr->m_func),
  15026. instr->m_func);
  15027. }
  15028. else
  15029. {
  15030. loadObjectArray = nullptr;
  15031. }
  15032. IR::Instr *const loadHeadSegment =
  15033. IR::Instr::New(
  15034. Js::OpCode::LdIndir,
  15035. headSegmentOpnd,
  15036. IR::IndirOpnd::New(
  15037. loadObjectArray ? headSegmentOpnd : jitOptimizedBaseOpnd,
  15038. Lowerer::GetArrayOffsetOfHeadSegment(baseValueType),
  15039. headSegmentOpnd->GetType(),
  15040. instr->m_func),
  15041. instr->m_func);
  15042. if(hoistHeadSegmentLoadOutOfLoop)
  15043. {
  15044. Assert(!(isLikelyJsArray && hoistHeadSegmentLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegments()));
  15045. TRACE_PHASE_INSTR(
  15046. Js::ArraySegmentHoistPhase,
  15047. instr,
  15048. _u("Hoisting array segment load out of loop %u to landing pad block %u\n"),
  15049. hoistHeadSegmentLoadOutOfLoop->GetLoopNumber(),
  15050. hoistHeadSegmentLoadOutOfLoop->landingPad->GetBlockNum());
  15051. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment load out of loop\n"));
  15052. if(loadObjectArray)
  15053. {
  15054. InsertInstrInLandingPad(loadObjectArray, hoistHeadSegmentLoadOutOfLoop);
  15055. }
  15056. InsertInstrInLandingPad(loadHeadSegment, hoistHeadSegmentLoadOutOfLoop);
  15057. }
  15058. else
  15059. {
  15060. if(loadObjectArray)
  15061. {
  15062. loadObjectArray->SetByteCodeOffset(instr);
  15063. insertBeforeInstr->InsertBefore(loadObjectArray);
  15064. }
  15065. loadHeadSegment->SetByteCodeOffset(instr);
  15066. insertBeforeInstr->InsertBefore(loadHeadSegment);
  15067. instr->loadedArrayHeadSegment = true;
  15068. }
  15069. };
  15070. if(doHeadSegmentLoad && isLikelyJsArray)
  15071. {
  15072. // For javascript arrays, the head segment is required to load the head segment length
  15073. InsertHeadSegmentLoad();
  15074. }
  15075. if(doHeadSegmentLengthLoad)
  15076. {
  15077. Assert(!isLikelyJsArray || newHeadSegmentSym || baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym());
  15078. Assert(newHeadSegmentLengthSym);
  15079. Assert(!headSegmentLengthValue);
  15080. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment length load\n"));
  15081. // Create an initial value for the head segment length
  15082. blockData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  15083. headSegmentLengthValue = NewIntRangeValue(0, Js::SparseArraySegmentBase::MaxLength, false);
  15084. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  15085. SetValue(&blockData, headSegmentLengthValue, newHeadSegmentLengthSym);
  15086. // SetValue above would have set the sym store to newHeadSegmentLengthSym. This sym won't be used for copy-prop
  15087. // though, so remove it as the sym store.
  15088. this->SetSymStoreDirect(headSegmentLengthValue->GetValueInfo(), nullptr);
  15089. StackSym *const headSegmentSym =
  15090. isLikelyJsArray
  15091. ? newHeadSegmentSym ? newHeadSegmentSym : baseArrayValueInfo->HeadSegmentSym()
  15092. : nullptr;
  15093. IR::Instr *const loadHeadSegmentLength =
  15094. IR::Instr::New(
  15095. Js::OpCode::LdIndir,
  15096. IR::RegOpnd::New(newHeadSegmentLengthSym, newHeadSegmentLengthSym->GetType(), instr->m_func),
  15097. IR::IndirOpnd::New(
  15098. isLikelyJsArray ? IR::RegOpnd::New(headSegmentSym, headSegmentSym->GetType(), instr->m_func) : baseOpnd,
  15099. isLikelyJsArray
  15100. ? Js::SparseArraySegmentBase::GetOffsetOfLength()
  15101. : Lowerer::GetArrayOffsetOfLength(baseValueType),
  15102. newHeadSegmentLengthSym->GetType(),
  15103. instr->m_func),
  15104. instr->m_func);
  15105. loadHeadSegmentLength->GetDst()->SetIsJITOptimizedReg(true);
  15106. loadHeadSegmentLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  15107. // We don't check the head segment length for negative (very large uint32) values. For JS arrays, the bound checks
  15108. // cover that. For typed arrays, we currently don't allocate array buffers with more than 1 GB elements.
  15109. if(hoistHeadSegmentLengthLoadOutOfLoop)
  15110. {
  15111. Assert(
  15112. !(
  15113. isLikelyJsArray
  15114. ? hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegmentLengths()
  15115. : hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  15116. ));
  15117. TRACE_PHASE_INSTR(
  15118. Js::ArraySegmentHoistPhase,
  15119. instr,
  15120. _u("Hoisting array segment length load out of loop %u to landing pad block %u\n"),
  15121. hoistHeadSegmentLengthLoadOutOfLoop->GetLoopNumber(),
  15122. hoistHeadSegmentLengthLoadOutOfLoop->landingPad->GetBlockNum());
  15123. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment length load out of loop\n"));
  15124. InsertInstrInLandingPad(loadHeadSegmentLength, hoistHeadSegmentLengthLoadOutOfLoop);
  15125. // Hoist the head segment length value
  15126. for(InvariantBlockBackwardIterator it(
  15127. this,
  15128. currentBlock,
  15129. hoistHeadSegmentLengthLoadOutOfLoop->landingPad,
  15130. baseOpnd->m_sym,
  15131. baseValue->GetValueNumber());
  15132. it.IsValid();
  15133. it.MoveNext())
  15134. {
  15135. BasicBlock *const block = it.Block();
  15136. block->globOptData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  15137. Assert(!FindValue(block->globOptData.symToValueMap, newHeadSegmentLengthSym));
  15138. Value *const headSegmentLengthValueCopy =
  15139. CopyValue(headSegmentLengthValue, headSegmentLengthValue->GetValueNumber());
  15140. SetValue(&block->globOptData, headSegmentLengthValueCopy, newHeadSegmentLengthSym);
  15141. this->SetSymStoreDirect(headSegmentLengthValueCopy->GetValueInfo(), nullptr);
  15142. }
  15143. }
  15144. else
  15145. {
  15146. loadHeadSegmentLength->SetByteCodeOffset(instr);
  15147. insertBeforeInstr->InsertBefore(loadHeadSegmentLength);
  15148. instr->loadedArrayHeadSegmentLength = true;
  15149. }
  15150. }
  15151. if(doExtractBoundChecks)
  15152. {
  15153. Assert(!(eliminatedLowerBoundCheck && eliminatedUpperBoundCheck));
  15154. Assert(baseOwnerIndir);
  15155. Assert(!baseOwnerIndir->GetIndexOpnd() || baseOwnerIndir->GetIndexOpnd()->m_sym->IsTypeSpec());
  15156. Assert(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable);
  15157. Assert(canBailOutOnArrayAccessHelperCall);
  15158. Assert(!isStore || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || Js::IsSimd128LoadStore(instr->m_opcode));
  15159. StackSym *const headSegmentLengthSym =
  15160. headSegmentLengthIsAvailable ? baseArrayValueInfo->HeadSegmentLengthSym() : newHeadSegmentLengthSym;
  15161. Assert(headSegmentLengthSym);
  15162. Assert(headSegmentLengthValue);
  15163. ArrayLowerBoundCheckHoistInfo lowerBoundCheckHoistInfo;
  15164. ArrayUpperBoundCheckHoistInfo upperBoundCheckHoistInfo;
  15165. bool failedToUpdateCompatibleLowerBoundCheck = false, failedToUpdateCompatibleUpperBoundCheck = false;
  15166. if(DoBoundCheckHoist())
  15167. {
  15168. if(indexVarSym)
  15169. {
  15170. TRACE_PHASE_INSTR_VERBOSE(
  15171. Js::Phase::BoundCheckHoistPhase,
  15172. instr,
  15173. _u("Determining array bound check hoistability for index s%u\n"),
  15174. indexVarSym->m_id);
  15175. }
  15176. else
  15177. {
  15178. TRACE_PHASE_INSTR_VERBOSE(
  15179. Js::Phase::BoundCheckHoistPhase,
  15180. instr,
  15181. _u("Determining array bound check hoistability for index %d\n"),
  15182. indexConstantBounds.LowerBound());
  15183. }
  15184. DetermineArrayBoundCheckHoistability(
  15185. !eliminatedLowerBoundCheck,
  15186. !eliminatedUpperBoundCheck,
  15187. lowerBoundCheckHoistInfo,
  15188. upperBoundCheckHoistInfo,
  15189. isLikelyJsArray,
  15190. indexVarSym,
  15191. indexValue,
  15192. indexConstantBounds,
  15193. headSegmentLengthSym,
  15194. headSegmentLengthValue,
  15195. headSegmentLengthConstantBounds,
  15196. hoistHeadSegmentLengthLoadOutOfLoop,
  15197. failedToUpdateCompatibleLowerBoundCheck,
  15198. failedToUpdateCompatibleUpperBoundCheck);
  15199. #ifdef ENABLE_SIMDJS
  15200. // SIMD_JS
  15201. UpdateBoundCheckHoistInfoForSimd(upperBoundCheckHoistInfo, newBaseValueType, instr);
  15202. #endif
  15203. }
  15204. if(!eliminatedLowerBoundCheck)
  15205. {
  15206. eliminatedLowerBoundCheck = true;
  15207. Assert(indexVarSym);
  15208. Assert(baseOwnerIndir->GetIndexOpnd());
  15209. Assert(indexValue);
  15210. ArrayLowerBoundCheckHoistInfo &hoistInfo = lowerBoundCheckHoistInfo;
  15211. if(hoistInfo.HasAnyInfo())
  15212. {
  15213. BasicBlock *hoistBlock;
  15214. if(hoistInfo.CompatibleBoundCheckBlock())
  15215. {
  15216. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  15217. TRACE_PHASE_INSTR(
  15218. Js::Phase::BoundCheckHoistPhase,
  15219. instr,
  15220. _u("Hoisting array lower bound check into existing bound check instruction in block %u\n"),
  15221. hoistBlock->GetBlockNum());
  15222. TESTTRACE_PHASE_INSTR(
  15223. Js::Phase::BoundCheckHoistPhase,
  15224. instr,
  15225. _u("Hoisting array lower bound check into existing bound check instruction\n"));
  15226. }
  15227. else
  15228. {
  15229. Assert(hoistInfo.Loop());
  15230. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  15231. hoistBlock = landingPad;
  15232. StackSym *indexIntSym;
  15233. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  15234. {
  15235. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad))
  15236. {
  15237. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize
  15238. // it in this block if it is invariant, as the conversion will be hoisted along with value
  15239. // updates.
  15240. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  15241. IR::Instr *specializeBeforeInstr = nullptr;
  15242. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), &blockData) &&
  15243. OptIsInvariant(
  15244. hoistInfo.IndexSym(),
  15245. currentBlock,
  15246. hoistInfo.Loop(),
  15247. FindValue(hoistInfo.IndexSym()),
  15248. false,
  15249. true))
  15250. {
  15251. specializationBlock = currentBlock;
  15252. specializeBeforeInstr = insertBeforeInstr;
  15253. }
  15254. Assert(tempBv->IsEmpty());
  15255. tempBv->Set(hoistInfo.IndexSym()->m_id);
  15256. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  15257. tempBv->ClearAll();
  15258. Assert(IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad));
  15259. }
  15260. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  15261. Assert(indexIntSym);
  15262. }
  15263. else
  15264. {
  15265. indexIntSym = hoistInfo.IndexSym();
  15266. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  15267. }
  15268. // The info in the landing pad may be better than the info in the current block due to changes made to
  15269. // the index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the
  15270. // landing pad.
  15271. if(!ValueInfo::IsLessThanOrEqualTo(
  15272. nullptr,
  15273. 0,
  15274. 0,
  15275. hoistInfo.IndexValue(),
  15276. hoistInfo.IndexConstantBounds().LowerBound(),
  15277. hoistInfo.IndexConstantBounds().UpperBound(),
  15278. hoistInfo.Offset()))
  15279. {
  15280. Assert(hoistInfo.IndexSym());
  15281. Assert(hoistInfo.Loop()->bailOutInfo);
  15282. EnsureBailTarget(hoistInfo.Loop());
  15283. if(hoistInfo.LoopCount())
  15284. {
  15285. // Generate the loop count and loop count based bound that will be used for the bound check
  15286. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  15287. {
  15288. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  15289. }
  15290. GenerateSecondaryInductionVariableBound(
  15291. hoistInfo.Loop(),
  15292. indexVarSym->GetInt32EquivSym(nullptr),
  15293. hoistInfo.LoopCount(),
  15294. hoistInfo.MaxMagnitudeChange(),
  15295. hoistInfo.IndexSym());
  15296. }
  15297. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  15298. IR::Opnd* upperBound = IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func);
  15299. upperBound->SetIsJITOptimizedReg(true);
  15300. // 0 <= indexSym + offset (src1 <= src2 + dst)
  15301. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  15302. lowerBound,
  15303. upperBound,
  15304. hoistInfo.Offset(),
  15305. hoistInfo.IsLoopCountBasedBound()
  15306. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  15307. : IR::BailOutOnFailedHoistedBoundCheck,
  15308. hoistInfo.Loop()->bailOutInfo,
  15309. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  15310. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  15311. TRACE_PHASE_INSTR(
  15312. Js::Phase::BoundCheckHoistPhase,
  15313. instr,
  15314. _u("Hoisting array lower bound check out of loop %u to landing pad block %u, as (0 <= s%u + %d)\n"),
  15315. hoistInfo.Loop()->GetLoopNumber(),
  15316. landingPad->GetBlockNum(),
  15317. hoistInfo.IndexSym()->m_id,
  15318. hoistInfo.Offset());
  15319. TESTTRACE_PHASE_INSTR(
  15320. Js::Phase::BoundCheckHoistPhase,
  15321. instr,
  15322. _u("Hoisting array lower bound check out of loop\n"));
  15323. // Record the bound check instruction as available
  15324. const IntBoundCheck boundCheckInfo(
  15325. ZeroValueNumber,
  15326. hoistInfo.IndexValueNumber(),
  15327. boundCheck,
  15328. landingPad);
  15329. {
  15330. const bool added = blockData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15331. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15332. }
  15333. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  15334. it.IsValid();
  15335. it.MoveNext())
  15336. {
  15337. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15338. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15339. }
  15340. }
  15341. }
  15342. // Update values of the syms involved in the bound check to reflect the bound check
  15343. if(hoistBlock != currentBlock && hoistInfo.IndexSym() && hoistInfo.Offset() != INT32_MIN)
  15344. {
  15345. for(InvariantBlockBackwardIterator it(
  15346. this,
  15347. currentBlock->next,
  15348. hoistBlock,
  15349. hoistInfo.IndexSym(),
  15350. hoistInfo.IndexValueNumber());
  15351. it.IsValid();
  15352. it.MoveNext())
  15353. {
  15354. Value *const value = it.InvariantSymValue();
  15355. IntConstantBounds constantBounds;
  15356. AssertVerify(value->GetValueInfo()->TryGetIntConstantBounds(&constantBounds, true));
  15357. ValueInfo *const newValueInfo =
  15358. UpdateIntBoundsForGreaterThanOrEqual(
  15359. value,
  15360. constantBounds,
  15361. nullptr,
  15362. IntConstantBounds(-hoistInfo.Offset(), -hoistInfo.Offset()),
  15363. false);
  15364. if(newValueInfo)
  15365. {
  15366. ChangeValueInfo(nullptr, value, newValueInfo);
  15367. if(it.Block() == currentBlock && value == indexValue)
  15368. {
  15369. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15370. }
  15371. }
  15372. }
  15373. }
  15374. }
  15375. else
  15376. {
  15377. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  15378. IR::Opnd* upperBound = baseOwnerIndir->GetIndexOpnd();
  15379. upperBound->SetIsJITOptimizedReg(true);
  15380. const int offset = 0;
  15381. IR::Instr *boundCheck;
  15382. if(shareableBailOutInfo)
  15383. {
  15384. ShareBailOut();
  15385. boundCheck = CreateBoundsCheckInstr(
  15386. lowerBound,
  15387. upperBound,
  15388. offset,
  15389. IR::BailOutOnArrayAccessHelperCall,
  15390. shareableBailOutInfo,
  15391. shareableBailOutInfo->bailOutFunc);
  15392. }
  15393. else
  15394. {
  15395. boundCheck = CreateBoundsCheckInstr(
  15396. lowerBound,
  15397. upperBound,
  15398. offset,
  15399. instr->m_func);
  15400. }
  15401. boundCheck->SetByteCodeOffset(instr);
  15402. insertBeforeInstr->InsertBefore(boundCheck);
  15403. if(!shareableBailOutInfo)
  15404. {
  15405. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  15406. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  15407. shareableBailOutInfoOriginalOwner = boundCheck;
  15408. }
  15409. TRACE_PHASE_INSTR(
  15410. Js::Phase::BoundCheckEliminationPhase,
  15411. instr,
  15412. _u("Separating array lower bound check, as (0 <= s%u)\n"),
  15413. indexVarSym->m_id);
  15414. TESTTRACE_PHASE_INSTR(
  15415. Js::Phase::BoundCheckEliminationPhase,
  15416. instr,
  15417. _u("Separating array lower bound check\n"));
  15418. if(DoBoundCheckHoist())
  15419. {
  15420. // Record the bound check instruction as available
  15421. const bool added =
  15422. blockData.availableIntBoundChecks->AddNew(
  15423. IntBoundCheck(ZeroValueNumber, indexValue->GetValueNumber(), boundCheck, currentBlock)) >= 0;
  15424. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  15425. }
  15426. }
  15427. // Update the index value to reflect the bound check
  15428. ValueInfo *const newValueInfo =
  15429. UpdateIntBoundsForGreaterThanOrEqual(
  15430. indexValue,
  15431. indexConstantBounds,
  15432. nullptr,
  15433. IntConstantBounds(0, 0),
  15434. false);
  15435. if(newValueInfo)
  15436. {
  15437. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  15438. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15439. }
  15440. }
  15441. if(!eliminatedUpperBoundCheck)
  15442. {
  15443. eliminatedUpperBoundCheck = true;
  15444. ArrayUpperBoundCheckHoistInfo &hoistInfo = upperBoundCheckHoistInfo;
  15445. if(hoistInfo.HasAnyInfo())
  15446. {
  15447. BasicBlock *hoistBlock;
  15448. if(hoistInfo.CompatibleBoundCheckBlock())
  15449. {
  15450. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  15451. TRACE_PHASE_INSTR(
  15452. Js::Phase::BoundCheckHoistPhase,
  15453. instr,
  15454. _u("Hoisting array upper bound check into existing bound check instruction in block %u\n"),
  15455. hoistBlock->GetBlockNum());
  15456. TESTTRACE_PHASE_INSTR(
  15457. Js::Phase::BoundCheckHoistPhase,
  15458. instr,
  15459. _u("Hoisting array upper bound check into existing bound check instruction\n"));
  15460. }
  15461. else
  15462. {
  15463. Assert(hoistInfo.Loop());
  15464. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  15465. hoistBlock = landingPad;
  15466. StackSym *indexIntSym;
  15467. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  15468. {
  15469. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad))
  15470. {
  15471. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize it
  15472. // in this block if it is invariant, as the conversion will be hoisted along with value updates.
  15473. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  15474. IR::Instr *specializeBeforeInstr = nullptr;
  15475. if(!IsInt32TypeSpecialized(hoistInfo.IndexSym(), &blockData) &&
  15476. OptIsInvariant(
  15477. hoistInfo.IndexSym(),
  15478. currentBlock,
  15479. hoistInfo.Loop(),
  15480. FindValue(hoistInfo.IndexSym()),
  15481. false,
  15482. true))
  15483. {
  15484. specializationBlock = currentBlock;
  15485. specializeBeforeInstr = insertBeforeInstr;
  15486. }
  15487. Assert(tempBv->IsEmpty());
  15488. tempBv->Set(hoistInfo.IndexSym()->m_id);
  15489. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  15490. tempBv->ClearAll();
  15491. Assert(IsInt32TypeSpecialized(hoistInfo.IndexSym(), landingPad));
  15492. }
  15493. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  15494. Assert(indexIntSym);
  15495. }
  15496. else
  15497. {
  15498. indexIntSym = hoistInfo.IndexSym();
  15499. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  15500. }
  15501. // The info in the landing pad may be better than the info in the current block due to changes made to the
  15502. // index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the landing pad.
  15503. if(!ValueInfo::IsLessThanOrEqualTo(
  15504. hoistInfo.IndexValue(),
  15505. hoistInfo.IndexConstantBounds().LowerBound(),
  15506. hoistInfo.IndexConstantBounds().UpperBound(),
  15507. hoistInfo.HeadSegmentLengthValue(),
  15508. hoistInfo.HeadSegmentLengthConstantBounds().LowerBound(),
  15509. hoistInfo.HeadSegmentLengthConstantBounds().UpperBound(),
  15510. hoistInfo.Offset()))
  15511. {
  15512. Assert(hoistInfo.Loop()->bailOutInfo);
  15513. EnsureBailTarget(hoistInfo.Loop());
  15514. if(hoistInfo.LoopCount())
  15515. {
  15516. // Generate the loop count and loop count based bound that will be used for the bound check
  15517. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  15518. {
  15519. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  15520. }
  15521. GenerateSecondaryInductionVariableBound(
  15522. hoistInfo.Loop(),
  15523. indexVarSym->GetInt32EquivSym(nullptr),
  15524. hoistInfo.LoopCount(),
  15525. hoistInfo.MaxMagnitudeChange(),
  15526. hoistInfo.IndexSym());
  15527. }
  15528. IR::Opnd* lowerBound = indexIntSym
  15529. ? static_cast<IR::Opnd *>(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func))
  15530. : IR::IntConstOpnd::New(
  15531. hoistInfo.IndexConstantBounds().LowerBound(),
  15532. TyInt32,
  15533. instr->m_func);
  15534. lowerBound->SetIsJITOptimizedReg(true);
  15535. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  15536. upperBound->SetIsJITOptimizedReg(true);
  15537. // indexSym <= headSegmentLength + offset (src1 <= src2 + dst)
  15538. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  15539. lowerBound,
  15540. upperBound,
  15541. hoistInfo.Offset(),
  15542. hoistInfo.IsLoopCountBasedBound()
  15543. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  15544. : IR::BailOutOnFailedHoistedBoundCheck,
  15545. hoistInfo.Loop()->bailOutInfo,
  15546. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  15547. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  15548. if(indexIntSym)
  15549. {
  15550. TRACE_PHASE_INSTR(
  15551. Js::Phase::BoundCheckHoistPhase,
  15552. instr,
  15553. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"),
  15554. hoistInfo.Loop()->GetLoopNumber(),
  15555. landingPad->GetBlockNum(),
  15556. hoistInfo.IndexSym()->m_id,
  15557. headSegmentLengthSym->m_id,
  15558. hoistInfo.Offset());
  15559. }
  15560. else
  15561. {
  15562. TRACE_PHASE_INSTR(
  15563. Js::Phase::BoundCheckHoistPhase,
  15564. instr,
  15565. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"),
  15566. hoistInfo.Loop()->GetLoopNumber(),
  15567. landingPad->GetBlockNum(),
  15568. hoistInfo.IndexConstantBounds().LowerBound(),
  15569. headSegmentLengthSym->m_id,
  15570. hoistInfo.Offset());
  15571. }
  15572. TESTTRACE_PHASE_INSTR(
  15573. Js::Phase::BoundCheckHoistPhase,
  15574. instr,
  15575. _u("Hoisting array upper bound check out of loop\n"));
  15576. // Record the bound check instruction as available
  15577. const IntBoundCheck boundCheckInfo(
  15578. hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber,
  15579. hoistInfo.HeadSegmentLengthValue()->GetValueNumber(),
  15580. boundCheck,
  15581. landingPad);
  15582. {
  15583. const bool added = blockData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15584. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15585. }
  15586. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  15587. it.IsValid();
  15588. it.MoveNext())
  15589. {
  15590. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  15591. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15592. }
  15593. }
  15594. }
  15595. // Update values of the syms involved in the bound check to reflect the bound check
  15596. Assert(!hoistInfo.Loop() || hoistBlock != currentBlock);
  15597. if(hoistBlock != currentBlock)
  15598. {
  15599. for(InvariantBlockBackwardIterator it(this, currentBlock->next, hoistBlock, nullptr);
  15600. it.IsValid();
  15601. it.MoveNext())
  15602. {
  15603. BasicBlock *const block = it.Block();
  15604. Value *leftValue;
  15605. IntConstantBounds leftConstantBounds;
  15606. if(hoistInfo.IndexSym())
  15607. {
  15608. leftValue = FindValue(block->globOptData.symToValueMap, hoistInfo.IndexSym());
  15609. if(!leftValue || leftValue->GetValueNumber() != hoistInfo.IndexValueNumber())
  15610. {
  15611. continue;
  15612. }
  15613. AssertVerify(leftValue->GetValueInfo()->TryGetIntConstantBounds(&leftConstantBounds, true));
  15614. }
  15615. else
  15616. {
  15617. leftValue = nullptr;
  15618. leftConstantBounds = hoistInfo.IndexConstantBounds();
  15619. }
  15620. Value *const rightValue = FindValue(block->globOptData.symToValueMap, headSegmentLengthSym);
  15621. if(!rightValue)
  15622. {
  15623. continue;
  15624. }
  15625. Assert(rightValue->GetValueNumber() == headSegmentLengthValue->GetValueNumber());
  15626. IntConstantBounds rightConstantBounds;
  15627. AssertVerify(rightValue->GetValueInfo()->TryGetIntConstantBounds(&rightConstantBounds));
  15628. ValueInfo *const newValueInfoForLessThanOrEqual =
  15629. UpdateIntBoundsForLessThanOrEqual(
  15630. leftValue,
  15631. leftConstantBounds,
  15632. rightValue,
  15633. rightConstantBounds,
  15634. hoistInfo.Offset(),
  15635. false);
  15636. if (newValueInfoForLessThanOrEqual)
  15637. {
  15638. ChangeValueInfo(nullptr, leftValue, newValueInfoForLessThanOrEqual);
  15639. AssertVerify(newValueInfoForLessThanOrEqual->TryGetIntConstantBounds(&leftConstantBounds, true));
  15640. if(block == currentBlock && leftValue == indexValue)
  15641. {
  15642. Assert(newValueInfoForLessThanOrEqual->IsInt());
  15643. indexConstantBounds = leftConstantBounds;
  15644. }
  15645. }
  15646. if(hoistInfo.Offset() != INT32_MIN)
  15647. {
  15648. ValueInfo *const newValueInfoForGreaterThanOrEqual =
  15649. UpdateIntBoundsForGreaterThanOrEqual(
  15650. rightValue,
  15651. rightConstantBounds,
  15652. leftValue,
  15653. leftConstantBounds,
  15654. -hoistInfo.Offset(),
  15655. false);
  15656. if (newValueInfoForGreaterThanOrEqual)
  15657. {
  15658. ChangeValueInfo(nullptr, rightValue, newValueInfoForGreaterThanOrEqual);
  15659. if(block == currentBlock)
  15660. {
  15661. Assert(rightValue == headSegmentLengthValue);
  15662. AssertVerify(newValueInfoForGreaterThanOrEqual->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  15663. }
  15664. }
  15665. }
  15666. }
  15667. }
  15668. }
  15669. else
  15670. {
  15671. IR::Opnd* lowerBound = baseOwnerIndir->GetIndexOpnd()
  15672. ? static_cast<IR::Opnd *>(baseOwnerIndir->GetIndexOpnd())
  15673. : IR::IntConstOpnd::New(baseOwnerIndir->GetOffset(), TyInt32, instr->m_func);
  15674. lowerBound->SetIsJITOptimizedReg(true);
  15675. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  15676. upperBound->SetIsJITOptimizedReg(true);
  15677. const int offset = GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1);
  15678. IR::Instr *boundCheck;
  15679. // index <= headSegmentLength - 1 (src1 <= src2 + dst)
  15680. if (shareableBailOutInfo)
  15681. {
  15682. ShareBailOut();
  15683. boundCheck = CreateBoundsCheckInstr(
  15684. lowerBound,
  15685. upperBound,
  15686. offset,
  15687. IR::BailOutOnArrayAccessHelperCall,
  15688. shareableBailOutInfo,
  15689. shareableBailOutInfo->bailOutFunc);
  15690. }
  15691. else
  15692. {
  15693. boundCheck = CreateBoundsCheckInstr(
  15694. lowerBound,
  15695. upperBound,
  15696. offset,
  15697. instr->m_func);
  15698. }
  15699. boundCheck->SetByteCodeOffset(instr);
  15700. insertBeforeInstr->InsertBefore(boundCheck);
  15701. if(!shareableBailOutInfo)
  15702. {
  15703. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  15704. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  15705. shareableBailOutInfoOriginalOwner = boundCheck;
  15706. }
  15707. instr->extractedUpperBoundCheckWithoutHoisting = true;
  15708. if(baseOwnerIndir->GetIndexOpnd())
  15709. {
  15710. TRACE_PHASE_INSTR(
  15711. Js::Phase::BoundCheckEliminationPhase,
  15712. instr,
  15713. _u("Separating array upper bound check, as (s%u < s%u)\n"),
  15714. indexVarSym->m_id,
  15715. headSegmentLengthSym->m_id);
  15716. }
  15717. else
  15718. {
  15719. TRACE_PHASE_INSTR(
  15720. Js::Phase::BoundCheckEliminationPhase,
  15721. instr,
  15722. _u("Separating array upper bound check, as (%d < s%u)\n"),
  15723. baseOwnerIndir->GetOffset(),
  15724. headSegmentLengthSym->m_id);
  15725. }
  15726. TESTTRACE_PHASE_INSTR(
  15727. Js::Phase::BoundCheckEliminationPhase,
  15728. instr,
  15729. _u("Separating array upper bound check\n"));
  15730. if(DoBoundCheckHoist())
  15731. {
  15732. // Record the bound check instruction as available
  15733. const bool added =
  15734. blockData.availableIntBoundChecks->AddNew(
  15735. IntBoundCheck(
  15736. indexValue ? indexValue->GetValueNumber() : ZeroValueNumber,
  15737. headSegmentLengthValue->GetValueNumber(),
  15738. boundCheck,
  15739. currentBlock)) >= 0;
  15740. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  15741. }
  15742. }
  15743. // Update the index and head segment length values to reflect the bound check
  15744. ValueInfo *newValueInfo =
  15745. UpdateIntBoundsForLessThan(
  15746. indexValue,
  15747. indexConstantBounds,
  15748. headSegmentLengthValue,
  15749. headSegmentLengthConstantBounds,
  15750. false);
  15751. if(newValueInfo)
  15752. {
  15753. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  15754. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  15755. }
  15756. newValueInfo =
  15757. UpdateIntBoundsForGreaterThan(
  15758. headSegmentLengthValue,
  15759. headSegmentLengthConstantBounds,
  15760. indexValue,
  15761. indexConstantBounds,
  15762. false);
  15763. if(newValueInfo)
  15764. {
  15765. ChangeValueInfo(nullptr, headSegmentLengthValue, newValueInfo);
  15766. }
  15767. }
  15768. }
  15769. if(doHeadSegmentLoad && !isLikelyJsArray)
  15770. {
  15771. // For typed arrays, load the length first, followed by the bound checks, and then load the head segment. This
  15772. // allows the length sym to become dead by the time of the head segment load, freeing up the register for use by the
  15773. // head segment sym.
  15774. InsertHeadSegmentLoad();
  15775. }
  15776. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  15777. {
  15778. UpdateValue(newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  15779. baseValueInfo = baseValue->GetValueInfo();
  15780. baseArrayValueInfo = baseValueInfo->IsArrayValueInfo() ? baseValueInfo->AsArrayValueInfo() : nullptr;
  15781. // Iterate up to the root loop's landing pad until all necessary value info is updated
  15782. uint hoistItemCount =
  15783. static_cast<uint>(!!hoistChecksOutOfLoop) +
  15784. !!hoistHeadSegmentLoadOutOfLoop +
  15785. !!hoistHeadSegmentLengthLoadOutOfLoop +
  15786. !!hoistLengthLoadOutOfLoop;
  15787. if(hoistItemCount != 0)
  15788. {
  15789. Loop *rootLoop = nullptr;
  15790. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  15791. {
  15792. rootLoop = loop;
  15793. }
  15794. Assert(rootLoop);
  15795. ValueInfo *valueInfoToHoist = baseValueInfo;
  15796. bool removeHeadSegment, removeHeadSegmentLength, removeLength;
  15797. if(baseArrayValueInfo)
  15798. {
  15799. removeHeadSegment = baseArrayValueInfo->HeadSegmentSym() && !hoistHeadSegmentLoadOutOfLoop;
  15800. removeHeadSegmentLength =
  15801. baseArrayValueInfo->HeadSegmentLengthSym() && !hoistHeadSegmentLengthLoadOutOfLoop;
  15802. removeLength = baseArrayValueInfo->LengthSym() && !hoistLengthLoadOutOfLoop;
  15803. }
  15804. else
  15805. {
  15806. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  15807. }
  15808. for(InvariantBlockBackwardIterator it(
  15809. this,
  15810. currentBlock,
  15811. rootLoop->landingPad,
  15812. baseOpnd->m_sym,
  15813. baseValue->GetValueNumber());
  15814. it.IsValid();
  15815. it.MoveNext())
  15816. {
  15817. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  15818. {
  15819. // Remove information that shouldn't be there anymore, from the value info
  15820. valueInfoToHoist =
  15821. valueInfoToHoist->AsArrayValueInfo()->Copy(
  15822. alloc,
  15823. !removeHeadSegment,
  15824. !removeHeadSegmentLength,
  15825. !removeLength);
  15826. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  15827. }
  15828. BasicBlock *const block = it.Block();
  15829. Value *const blockBaseValue = it.InvariantSymValue();
  15830. HoistInvariantValueInfo(valueInfoToHoist, blockBaseValue, block);
  15831. // See if we have completed hoisting value info for one of the items
  15832. if(hoistChecksOutOfLoop && block == hoistChecksOutOfLoop->landingPad)
  15833. {
  15834. // All other items depend on array checks, so we can just stop here
  15835. hoistChecksOutOfLoop = nullptr;
  15836. break;
  15837. }
  15838. if(hoistHeadSegmentLoadOutOfLoop && block == hoistHeadSegmentLoadOutOfLoop->landingPad)
  15839. {
  15840. hoistHeadSegmentLoadOutOfLoop = nullptr;
  15841. if(--hoistItemCount == 0)
  15842. break;
  15843. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentSym())
  15844. removeHeadSegment = true;
  15845. }
  15846. if(hoistHeadSegmentLengthLoadOutOfLoop && block == hoistHeadSegmentLengthLoadOutOfLoop->landingPad)
  15847. {
  15848. hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  15849. if(--hoistItemCount == 0)
  15850. break;
  15851. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentLengthSym())
  15852. removeHeadSegmentLength = true;
  15853. }
  15854. if(hoistLengthLoadOutOfLoop && block == hoistLengthLoadOutOfLoop->landingPad)
  15855. {
  15856. hoistLengthLoadOutOfLoop = nullptr;
  15857. if(--hoistItemCount == 0)
  15858. break;
  15859. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->LengthSym())
  15860. removeLength = true;
  15861. }
  15862. }
  15863. }
  15864. }
  15865. }
  15866. IR::ArrayRegOpnd *baseArrayOpnd;
  15867. if(baseArrayValueInfo)
  15868. {
  15869. // Update the opnd to include the associated syms
  15870. baseArrayOpnd =
  15871. baseArrayValueInfo->CreateOpnd(
  15872. baseOpnd,
  15873. needsHeadSegment,
  15874. needsHeadSegmentLength || (!isLikelyJsArray && needsLength),
  15875. needsLength,
  15876. eliminatedLowerBoundCheck,
  15877. eliminatedUpperBoundCheck,
  15878. instr->m_func);
  15879. if(baseOwnerInstr)
  15880. {
  15881. Assert(baseOwnerInstr->GetSrc1() == baseOpnd);
  15882. baseOwnerInstr->ReplaceSrc1(baseArrayOpnd);
  15883. }
  15884. else
  15885. {
  15886. Assert(baseOwnerIndir);
  15887. Assert(baseOwnerIndir->GetBaseOpnd() == baseOpnd);
  15888. baseOwnerIndir->ReplaceBaseOpnd(baseArrayOpnd);
  15889. }
  15890. baseOpnd = baseArrayOpnd;
  15891. }
  15892. else
  15893. {
  15894. baseArrayOpnd = nullptr;
  15895. }
  15896. if(isLikelyJsArray)
  15897. {
  15898. // Insert an instruction to indicate to the dead-store pass that implicit calls need to be kept disabled until this
  15899. // instruction. Operations other than LdElem and StElem don't benefit much from arrays having no missing values, so
  15900. // no need to ensure that the array still has no missing values. For a particular array, if none of the accesses
  15901. // benefit much from the no-missing-values information, it may be beneficial to avoid checking for no missing
  15902. // values, especially in the case for a single array access, where the cost of the check could be relatively
  15903. // significant. An StElem has to do additional checks in the common path if the array may have missing values, and
  15904. // a StElem that operates on an array that has no missing values is more likely to keep the no-missing-values info
  15905. // on the array more precise, so it still benefits a little from the no-missing-values info.
  15906. CaptureNoImplicitCallUses(baseOpnd, isLoad || isStore);
  15907. }
  15908. else if(baseArrayOpnd && baseArrayOpnd->HeadSegmentLengthSym())
  15909. {
  15910. // A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the typed
  15911. // array's length is set to zero. Insert an instruction to indicate to the dead-store pass that implicit calls need to
  15912. // be disabled until this instruction.
  15913. IR::RegOpnd *const headSegmentLengthOpnd =
  15914. IR::RegOpnd::New(
  15915. baseArrayOpnd->HeadSegmentLengthSym(),
  15916. baseArrayOpnd->HeadSegmentLengthSym()->GetType(),
  15917. instr->m_func);
  15918. const IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd(headSegmentLengthOpnd, instr->m_func);
  15919. CaptureNoImplicitCallUses(headSegmentLengthOpnd, false);
  15920. }
  15921. const auto OnEliminated = [&](const Js::Phase phase, const char *const eliminatedLoad)
  15922. {
  15923. TRACE_TESTTRACE_PHASE_INSTR(phase, instr, _u("Eliminating array %S\n"), eliminatedLoad);
  15924. };
  15925. OnEliminated(Js::Phase::ArrayCheckHoistPhase, "checks");
  15926. if(baseArrayOpnd)
  15927. {
  15928. if(baseArrayOpnd->HeadSegmentSym())
  15929. {
  15930. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment load");
  15931. }
  15932. if(baseArrayOpnd->HeadSegmentLengthSym())
  15933. {
  15934. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment length load");
  15935. }
  15936. if(baseArrayOpnd->LengthSym())
  15937. {
  15938. OnEliminated(Js::Phase::ArrayLengthHoistPhase, "length load");
  15939. }
  15940. if(baseArrayOpnd->EliminatedLowerBoundCheck())
  15941. {
  15942. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "lower bound check");
  15943. }
  15944. if(baseArrayOpnd->EliminatedUpperBoundCheck())
  15945. {
  15946. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "upper bound check");
  15947. }
  15948. }
  15949. if(!canBailOutOnArrayAccessHelperCall)
  15950. {
  15951. return;
  15952. }
  15953. // Bail out instead of generating a helper call. This helps to remove the array reference when the head segment and head
  15954. // segment length are available, reduces code size, and allows bound checks to be separated.
  15955. if(instr->HasBailOutInfo())
  15956. {
  15957. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  15958. Assert(
  15959. !(bailOutKind & ~IR::BailOutKindBits) ||
  15960. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  15961. instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
  15962. }
  15963. else
  15964. {
  15965. GenerateBailAtOperation(&instr, IR::BailOutOnArrayAccessHelperCall);
  15966. }
  15967. }
  15968. void
  15969. GlobOpt::CaptureNoImplicitCallUses(
  15970. IR::Opnd *opnd,
  15971. const bool usesNoMissingValuesInfo,
  15972. IR::Instr *const includeCurrentInstr)
  15973. {
  15974. Assert(!IsLoopPrePass());
  15975. Assert(noImplicitCallUsesToInsert);
  15976. Assert(opnd);
  15977. // The opnd may be deleted later, so make a copy to ensure it is alive for inserting NoImplicitCallUses later
  15978. opnd = opnd->Copy(func);
  15979. if(!usesNoMissingValuesInfo)
  15980. {
  15981. const ValueType valueType(opnd->GetValueType());
  15982. if(valueType.IsArrayOrObjectWithArray() && valueType.HasNoMissingValues())
  15983. {
  15984. // Inserting NoImplicitCallUses for an opnd with a definitely-array-with-no-missing-values value type means that the
  15985. // instruction following it uses the information that the array has no missing values in some way, for instance, it
  15986. // may omit missing value checks. Based on that, the dead-store phase in turn ensures that the necessary bailouts
  15987. // are inserted to ensure that the array still has no missing values until the following instruction. Since
  15988. // 'usesNoMissingValuesInfo' is false, change the value type to indicate to the dead-store phase that the following
  15989. // instruction does not use the no-missing-values information.
  15990. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  15991. }
  15992. }
  15993. if(includeCurrentInstr)
  15994. {
  15995. IR::Instr *const noImplicitCallUses =
  15996. IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, includeCurrentInstr->m_func);
  15997. noImplicitCallUses->SetSrc1(opnd);
  15998. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  15999. includeCurrentInstr->InsertAfter(noImplicitCallUses);
  16000. return;
  16001. }
  16002. noImplicitCallUsesToInsert->Add(opnd);
  16003. }
  16004. void
  16005. GlobOpt::InsertNoImplicitCallUses(IR::Instr *const instr)
  16006. {
  16007. Assert(noImplicitCallUsesToInsert);
  16008. const int n = noImplicitCallUsesToInsert->Count();
  16009. if(n == 0)
  16010. {
  16011. return;
  16012. }
  16013. IR::Instr *const insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  16014. for(int i = 0; i < n;)
  16015. {
  16016. IR::Instr *const noImplicitCallUses = IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, instr->m_func);
  16017. noImplicitCallUses->SetSrc1(noImplicitCallUsesToInsert->Item(i));
  16018. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  16019. ++i;
  16020. if(i < n)
  16021. {
  16022. noImplicitCallUses->SetSrc2(noImplicitCallUsesToInsert->Item(i));
  16023. noImplicitCallUses->GetSrc2()->SetIsJITOptimizedReg(true);
  16024. ++i;
  16025. }
  16026. noImplicitCallUses->SetByteCodeOffset(instr);
  16027. insertBeforeInstr->InsertBefore(noImplicitCallUses);
  16028. }
  16029. noImplicitCallUsesToInsert->Clear();
  16030. }
  16031. void
  16032. GlobOpt::PrepareLoopArrayCheckHoist()
  16033. {
  16034. if(IsLoopPrePass() || !currentBlock->loop || !currentBlock->isLoopHeader || !currentBlock->loop->parent)
  16035. {
  16036. return;
  16037. }
  16038. if(currentBlock->loop->parent->needImplicitCallBailoutChecksForJsArrayCheckHoist)
  16039. {
  16040. // If the parent loop is an array check elimination candidate, so is the current loop. Even though the current loop may
  16041. // not have array accesses, if the parent loop hoists array checks, the current loop also needs implicit call checks.
  16042. currentBlock->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  16043. }
  16044. }
  16045. JsArrayKills
  16046. GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
  16047. {
  16048. Assert(instr);
  16049. JsArrayKills kills;
  16050. if(instr->UsesAllFields())
  16051. {
  16052. // Calls can (but are unlikely to) change a javascript array into an ES5 array, which may have different behavior for
  16053. // index properties.
  16054. kills.SetKillsAllArrays();
  16055. return kills;
  16056. }
  16057. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  16058. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  16059. const bool doArraySegmentHoist = DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array));
  16060. Assert(doArraySegmentHoist == DoArraySegmentHoist(ValueType::GetObject(ObjectType::ObjectWithArray)));
  16061. const bool doArrayLengthHoist = DoArrayLengthHoist();
  16062. if(!doArrayMissingValueCheckHoist && !doNativeArrayTypeSpec && !doArraySegmentHoist && !doArrayLengthHoist)
  16063. {
  16064. return kills;
  16065. }
  16066. // The following operations may create missing values in an array in an unlikely circumstance. Even though they don't kill
  16067. // the fact that the 'this' parameter is an array (when implicit calls are disabled), we don't have a way to say the value
  16068. // type is definitely array but it likely has no missing values. So, these will kill the definite value type as well, making
  16069. // it likely array, such that the array checks will have to be redone.
  16070. const bool useValueTypes = !IsLoopPrePass(); // Source value types are not guaranteed to be correct in a loop prepass
  16071. switch(instr->m_opcode)
  16072. {
  16073. case Js::OpCode::StElemI_A:
  16074. case Js::OpCode::StElemI_A_Strict:
  16075. {
  16076. Assert(instr->GetDst());
  16077. if(!instr->GetDst()->IsIndirOpnd())
  16078. {
  16079. break;
  16080. }
  16081. const ValueType baseValueType =
  16082. useValueTypes ? instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType() : ValueType::Uninitialized;
  16083. if(useValueTypes && baseValueType.IsNotArrayOrObjectWithArray())
  16084. {
  16085. break;
  16086. }
  16087. if(instr->IsProfiledInstr())
  16088. {
  16089. const Js::StElemInfo *const stElemInfo = instr->AsProfiledInstr()->u.stElemInfo;
  16090. if(doArraySegmentHoist && stElemInfo->LikelyStoresOutsideHeadSegmentBounds())
  16091. {
  16092. kills.SetKillsArrayHeadSegments();
  16093. kills.SetKillsArrayHeadSegmentLengths();
  16094. }
  16095. if(doArrayLengthHoist &&
  16096. !(useValueTypes && baseValueType.IsNotArray()) &&
  16097. stElemInfo->LikelyStoresOutsideArrayBounds())
  16098. {
  16099. kills.SetKillsArrayLengths();
  16100. }
  16101. }
  16102. break;
  16103. }
  16104. case Js::OpCode::DeleteElemI_A:
  16105. case Js::OpCode::DeleteElemIStrict_A:
  16106. Assert(instr->GetSrc1());
  16107. if(!instr->GetSrc1()->IsIndirOpnd() ||
  16108. (useValueTypes && instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsNotArrayOrObjectWithArray()))
  16109. {
  16110. break;
  16111. }
  16112. if(doArrayMissingValueCheckHoist)
  16113. {
  16114. kills.SetKillsArraysWithNoMissingValues();
  16115. }
  16116. if(doArraySegmentHoist)
  16117. {
  16118. kills.SetKillsArrayHeadSegmentLengths();
  16119. }
  16120. break;
  16121. case Js::OpCode::StFld:
  16122. case Js::OpCode::StFldStrict:
  16123. {
  16124. Assert(instr->GetDst());
  16125. if(!doArraySegmentHoist && !doArrayLengthHoist)
  16126. {
  16127. break;
  16128. }
  16129. IR::SymOpnd *const symDst = instr->GetDst()->AsSymOpnd();
  16130. if(!symDst->IsPropertySymOpnd())
  16131. {
  16132. break;
  16133. }
  16134. IR::PropertySymOpnd *const dst = symDst->AsPropertySymOpnd();
  16135. if(dst->m_sym->AsPropertySym()->m_propertyId != Js::PropertyIds::length)
  16136. {
  16137. break;
  16138. }
  16139. if(useValueTypes && dst->GetPropertyOwnerValueType().IsNotArray())
  16140. {
  16141. // Setting the 'length' property of an object that is not an array, even if it has an internal array, does
  16142. // not kill the head segment or head segment length of any arrays.
  16143. break;
  16144. }
  16145. if(doArraySegmentHoist)
  16146. {
  16147. kills.SetKillsArrayHeadSegmentLengths();
  16148. }
  16149. if(doArrayLengthHoist)
  16150. {
  16151. kills.SetKillsArrayLengths();
  16152. }
  16153. break;
  16154. }
  16155. case Js::OpCode::InlineArrayPush:
  16156. {
  16157. Assert(instr->GetSrc2());
  16158. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  16159. Assert(arrayOpnd);
  16160. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16161. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16162. {
  16163. break;
  16164. }
  16165. if(doArrayMissingValueCheckHoist)
  16166. {
  16167. kills.SetKillsArraysWithNoMissingValues();
  16168. }
  16169. if(doArraySegmentHoist)
  16170. {
  16171. kills.SetKillsArrayHeadSegments();
  16172. kills.SetKillsArrayHeadSegmentLengths();
  16173. }
  16174. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16175. {
  16176. kills.SetKillsArrayLengths();
  16177. }
  16178. // Don't kill NativeArray, if there is no mismatch between array's type and element's type.
  16179. if(doNativeArrayTypeSpec &&
  16180. !(useValueTypes && arrayValueType.IsNativeArray() &&
  16181. ((arrayValueType.IsLikelyNativeIntArray() && instr->GetSrc2()->IsInt32()) ||
  16182. (arrayValueType.IsLikelyNativeFloatArray() && instr->GetSrc2()->IsFloat()))
  16183. ) &&
  16184. !(useValueTypes && arrayValueType.IsNotNativeArray()))
  16185. {
  16186. kills.SetKillsNativeArrays();
  16187. }
  16188. break;
  16189. }
  16190. case Js::OpCode::InlineArrayPop:
  16191. {
  16192. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  16193. Assert(arrayOpnd);
  16194. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16195. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16196. {
  16197. break;
  16198. }
  16199. if(doArraySegmentHoist)
  16200. {
  16201. kills.SetKillsArrayHeadSegmentLengths();
  16202. }
  16203. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16204. {
  16205. kills.SetKillsArrayLengths();
  16206. }
  16207. break;
  16208. }
  16209. case Js::OpCode::CallDirect:
  16210. {
  16211. Assert(instr->GetSrc1());
  16212. // Find the 'this' parameter and check if it's possible for it to be an array
  16213. IR::Opnd *const arrayOpnd = instr->FindCallArgumentOpnd(1);
  16214. Assert(arrayOpnd);
  16215. const ValueType arrayValueType(arrayOpnd->GetValueType());
  16216. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  16217. {
  16218. break;
  16219. }
  16220. const IR::JnHelperMethod helperMethod = instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  16221. if(doArrayMissingValueCheckHoist)
  16222. {
  16223. switch(helperMethod)
  16224. {
  16225. case IR::HelperArray_Reverse:
  16226. case IR::HelperArray_Shift:
  16227. case IR::HelperArray_Splice:
  16228. case IR::HelperArray_Unshift:
  16229. kills.SetKillsArraysWithNoMissingValues();
  16230. break;
  16231. }
  16232. }
  16233. if(doArraySegmentHoist)
  16234. {
  16235. switch(helperMethod)
  16236. {
  16237. case IR::HelperArray_Reverse:
  16238. case IR::HelperArray_Shift:
  16239. case IR::HelperArray_Splice:
  16240. case IR::HelperArray_Unshift:
  16241. kills.SetKillsArrayHeadSegments();
  16242. kills.SetKillsArrayHeadSegmentLengths();
  16243. break;
  16244. }
  16245. }
  16246. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  16247. {
  16248. switch(helperMethod)
  16249. {
  16250. case IR::HelperArray_Shift:
  16251. case IR::HelperArray_Splice:
  16252. case IR::HelperArray_Unshift:
  16253. kills.SetKillsArrayLengths();
  16254. break;
  16255. }
  16256. }
  16257. if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
  16258. {
  16259. switch(helperMethod)
  16260. {
  16261. case IR::HelperArray_Reverse:
  16262. case IR::HelperArray_Shift:
  16263. case IR::HelperArray_Slice:
  16264. // Currently not inlined.
  16265. //case IR::HelperArray_Sort:
  16266. case IR::HelperArray_Splice:
  16267. case IR::HelperArray_Unshift:
  16268. kills.SetKillsNativeArrays();
  16269. break;
  16270. }
  16271. }
  16272. break;
  16273. }
  16274. }
  16275. return kills;
  16276. }
  16277. bool
  16278. GlobOpt::IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr)
  16279. {
  16280. // StElem is profiled with information indicating whether it will likely create a missing value in the array. In that case,
  16281. // we prefer to kill the no-missing-values information in the value so that we don't bail out in a likely circumstance.
  16282. return
  16283. (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  16284. DoArrayMissingValueCheckHoist() &&
  16285. instr->IsProfiledInstr() &&
  16286. instr->AsProfiledInstr()->u.stElemInfo->LikelyCreatesMissingValue();
  16287. }
  16288. bool
  16289. GlobOpt::NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock *const block, const bool isForwardPass) const
  16290. {
  16291. Assert(block);
  16292. return isForwardPass && block->loop && block->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist;
  16293. }
  16294. bool
  16295. GlobOpt::PrepareForIgnoringIntOverflow(IR::Instr *const instr)
  16296. {
  16297. Assert(instr);
  16298. const bool isBoundary = instr->m_opcode == Js::OpCode::NoIntOverflowBoundary;
  16299. // Update the instruction's "int overflow matters" flag based on whether we are currently allowing ignoring int overflows.
  16300. // Some operations convert their srcs to int32s, those can still ignore int overflow.
  16301. if(instr->ignoreIntOverflowInRange)
  16302. {
  16303. instr->ignoreIntOverflowInRange = !intOverflowCurrentlyMattersInRange || OpCodeAttr::IsInt32(instr->m_opcode);
  16304. }
  16305. if(!intOverflowDoesNotMatterRange)
  16306. {
  16307. Assert(intOverflowCurrentlyMattersInRange);
  16308. // There are no more ranges of instructions where int overflow does not matter, in this block.
  16309. return isBoundary;
  16310. }
  16311. if(instr == intOverflowDoesNotMatterRange->LastInstr())
  16312. {
  16313. Assert(isBoundary);
  16314. // Reached the last instruction in the range
  16315. intOverflowCurrentlyMattersInRange = true;
  16316. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  16317. return isBoundary;
  16318. }
  16319. if(!intOverflowCurrentlyMattersInRange)
  16320. {
  16321. return isBoundary;
  16322. }
  16323. if(instr != intOverflowDoesNotMatterRange->FirstInstr())
  16324. {
  16325. // Have not reached the next range
  16326. return isBoundary;
  16327. }
  16328. Assert(isBoundary);
  16329. // This is the first instruction in a range of instructions where int overflow does not matter. There can be many inputs to
  16330. // instructions in the range, some of which are inputs to the range itself (that is, the values are not defined in the
  16331. // range). Ignoring int overflow is only valid for int operations, so we need to ensure that all inputs to the range are
  16332. // int (not "likely int") before ignoring any overflows in the range. Ensuring that a sym with a "likely int" value is an
  16333. // int requires a bail-out. These bail-out check need to happen before any overflows are ignored, otherwise it's too late.
  16334. // The backward pass tracked all inputs into the range. Iterate over them and verify the values, and insert lossless
  16335. // conversions to int as necessary, before the first instruction in the range. If for any reason all values cannot be
  16336. // guaranteed to be ints, the optimization will be disabled for this range.
  16337. intOverflowCurrentlyMattersInRange = false;
  16338. {
  16339. BVSparse<JitArenaAllocator> tempBv1(tempAlloc);
  16340. BVSparse<JitArenaAllocator> tempBv2(tempAlloc);
  16341. {
  16342. // Just renaming the temp BVs for this section to indicate how they're used so that it makes sense
  16343. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  16344. BVSparse<JitArenaAllocator> &symsToInclude = tempBv2;
  16345. #if DBG_DUMP
  16346. SymID couldNotConvertSymId = 0;
  16347. #endif
  16348. FOREACH_BITSET_IN_SPARSEBV(id, intOverflowDoesNotMatterRange->SymsRequiredToBeInt())
  16349. {
  16350. Sym *const sym = func->m_symTable->Find(id);
  16351. Assert(sym);
  16352. // Some instructions with property syms are also tracked by the backward pass, and may be included in the range
  16353. // (LdSlot for instance). These property syms don't get their values until either copy-prop resolves a value for
  16354. // them, or a new value is created once the use of the property sym is reached. In either case, we're not that
  16355. // far yet, so we need to find the future value of the property sym by evaluating copy-prop in reverse.
  16356. Value *const value = sym->IsStackSym() ? FindValue(sym) : FindFuturePropertyValue(sym->AsPropertySym());
  16357. if(!value)
  16358. {
  16359. #if DBG_DUMP
  16360. couldNotConvertSymId = id;
  16361. #endif
  16362. intOverflowCurrentlyMattersInRange = true;
  16363. BREAK_BITSET_IN_SPARSEBV;
  16364. }
  16365. const bool isInt32OrUInt32Float =
  16366. value->GetValueInfo()->IsFloatConstant() &&
  16367. Js::JavascriptNumber::IsInt32OrUInt32(value->GetValueInfo()->AsFloatConstant()->FloatValue());
  16368. if(value->GetValueInfo()->IsInt() || isInt32OrUInt32Float)
  16369. {
  16370. if(!IsLoopPrePass())
  16371. {
  16372. // Input values that are already int can be excluded from int-specialization. We can treat unsigned
  16373. // int32 values as int32 values (ignoring the overflow), since the values will only be used inside the
  16374. // range where overflow does not matter.
  16375. symsToExclude.Set(sym->m_id);
  16376. }
  16377. continue;
  16378. }
  16379. if(!DoAggressiveIntTypeSpec() || !value->GetValueInfo()->IsLikelyInt())
  16380. {
  16381. // When aggressive int specialization is off, syms with "likely int" values cannot be forced to int since
  16382. // int bail-out checks are not allowed in that mode. Similarly, with aggressive int specialization on, it
  16383. // wouldn't make sense to force non-"likely int" values to int since it would almost guarantee a bail-out at
  16384. // runtime. In both cases, just disable ignoring overflow for this range.
  16385. #if DBG_DUMP
  16386. couldNotConvertSymId = id;
  16387. #endif
  16388. intOverflowCurrentlyMattersInRange = true;
  16389. BREAK_BITSET_IN_SPARSEBV;
  16390. }
  16391. if(IsLoopPrePass())
  16392. {
  16393. // The loop prepass does not modify bit-vectors. Since it doesn't add bail-out checks, it also does not need
  16394. // to specialize anything up-front. It only needs to be consistent in how it determines whether to allow
  16395. // ignoring overflow for a range, based on the values of inputs into the range.
  16396. continue;
  16397. }
  16398. // Since input syms are tracked in the backward pass, where there is no value tracking, it will not be aware of
  16399. // copy-prop. If a copy-prop sym is available, it will be used instead, so exclude the original sym and include
  16400. // the copy-prop sym for specialization.
  16401. StackSym *const copyPropSym = GetCopyPropSym(sym, value);
  16402. if(copyPropSym)
  16403. {
  16404. symsToExclude.Set(sym->m_id);
  16405. Assert(!symsToExclude.Test(copyPropSym->m_id));
  16406. const bool needsToBeLossless =
  16407. !intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(sym->m_id);
  16408. if(intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(copyPropSym->m_id) ||
  16409. symsToInclude.TestAndSet(copyPropSym->m_id))
  16410. {
  16411. // The copy-prop sym is already included
  16412. if(needsToBeLossless)
  16413. {
  16414. // The original sym needs to be lossless, so make the copy-prop sym lossless as well.
  16415. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(copyPropSym->m_id);
  16416. }
  16417. }
  16418. else if(!needsToBeLossless)
  16419. {
  16420. // The copy-prop sym was not included before, and the original sym can be lossy, so make it lossy.
  16421. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(copyPropSym->m_id);
  16422. }
  16423. }
  16424. else if(!sym->IsStackSym())
  16425. {
  16426. // Only stack syms can be converted to int, and copy-prop syms are stack syms. If a copy-prop sym was not
  16427. // found for the property sym, we can't ignore overflows in this range.
  16428. #if DBG_DUMP
  16429. couldNotConvertSymId = id;
  16430. #endif
  16431. intOverflowCurrentlyMattersInRange = true;
  16432. BREAK_BITSET_IN_SPARSEBV;
  16433. }
  16434. } NEXT_BITSET_IN_SPARSEBV;
  16435. if(intOverflowCurrentlyMattersInRange)
  16436. {
  16437. #if DBG_DUMP
  16438. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func) && !IsLoopPrePass())
  16439. {
  16440. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16441. Output::Print(
  16442. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u, Disabled ignoring overflows\n"),
  16443. func->GetJITFunctionBody()->GetDisplayName(),
  16444. func->GetDebugNumberSet(debugStringBuffer),
  16445. Js::PhaseNames[Js::ForwardPhase],
  16446. currentBlock->GetBlockNum());
  16447. Output::Print(_u(" Input sym could not be turned into an int: %u\n"), couldNotConvertSymId);
  16448. Output::Print(_u(" First instr: "));
  16449. instr->m_next->Dump();
  16450. Output::Flush();
  16451. }
  16452. #endif
  16453. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  16454. return isBoundary;
  16455. }
  16456. if(IsLoopPrePass())
  16457. {
  16458. return isBoundary;
  16459. }
  16460. // Update the syms to specialize after enumeration
  16461. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  16462. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  16463. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Or(&symsToInclude);
  16464. }
  16465. {
  16466. // Exclude syms that are already live as lossless int32, and exclude lossy conversions of syms that are already live
  16467. // as lossy int32.
  16468. // symsToExclude = liveInt32Syms - liveLossyInt32Syms // syms live as lossless int
  16469. // lossySymsToExclude = symsRequiredToBeLossyInt & liveLossyInt32Syms; // syms we want as lossy int that are already live as lossy int
  16470. // symsToExclude |= lossySymsToExclude
  16471. // symsRequiredToBeInt -= symsToExclude
  16472. // symsRequiredToBeLossyInt -= symsToExclude
  16473. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  16474. BVSparse<JitArenaAllocator> &lossySymsToExclude = tempBv2;
  16475. symsToExclude.Minus(currentBlock->globOptData.liveInt32Syms, currentBlock->globOptData.liveLossyInt32Syms);
  16476. lossySymsToExclude.And(
  16477. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(),
  16478. currentBlock->globOptData.liveLossyInt32Syms);
  16479. symsToExclude.Or(&lossySymsToExclude);
  16480. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  16481. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  16482. }
  16483. #if DBG
  16484. {
  16485. // Verify that the syms to be converted are live
  16486. // liveSyms = liveInt32Syms | liveFloat64Syms | liveVarSyms
  16487. // deadSymsRequiredToBeInt = symsRequiredToBeInt - liveSyms
  16488. BVSparse<JitArenaAllocator> &liveSyms = tempBv1;
  16489. BVSparse<JitArenaAllocator> &deadSymsRequiredToBeInt = tempBv2;
  16490. liveSyms.Or(currentBlock->globOptData.liveInt32Syms, currentBlock->globOptData.liveFloat64Syms);
  16491. liveSyms.Or(currentBlock->globOptData.liveVarSyms);
  16492. deadSymsRequiredToBeInt.Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), &liveSyms);
  16493. Assert(deadSymsRequiredToBeInt.IsEmpty());
  16494. }
  16495. #endif
  16496. }
  16497. // Int-specialize the syms before the first instruction of the range (the current instruction)
  16498. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt());
  16499. #if DBG_DUMP
  16500. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  16501. {
  16502. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16503. Output::Print(
  16504. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  16505. func->GetJITFunctionBody()->GetDisplayName(),
  16506. func->GetDebugNumberSet(debugStringBuffer),
  16507. Js::PhaseNames[Js::ForwardPhase],
  16508. currentBlock->GetBlockNum());
  16509. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  16510. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Dump();
  16511. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  16512. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  16513. Output::Print(_u(" First instr: "));
  16514. instr->m_next->Dump();
  16515. Output::Flush();
  16516. }
  16517. #endif
  16518. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), currentBlock, false /* lossy */, instr);
  16519. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(), currentBlock, true /* lossy */, instr);
  16520. return isBoundary;
  16521. }
  16522. void
  16523. GlobOpt::VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr)
  16524. {
  16525. if(intOverflowCurrentlyMattersInRange || IsLoopPrePass())
  16526. {
  16527. return;
  16528. }
  16529. Assert(instr->m_opcode != Js::OpCode::Mul_I4 ||
  16530. (instr->m_opcode == Js::OpCode::Mul_I4 && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow() ));
  16531. // Instructions that are marked as "overflow doesn't matter" in the range must guarantee that they operate on int values and
  16532. // result in int values, for ignoring overflow to be valid. So, int-specialization is required for such instructions in the
  16533. // range. Ld_A is an exception because it only specializes if the src sym is available as a required specialized sym, and it
  16534. // doesn't generate bailouts or cause ignoring int overflow to be invalid.
  16535. // MULs are allowed to start a region and have BailOutInfo since they will bailout on non-32 bit overflow.
  16536. if(instr->m_opcode == Js::OpCode::Ld_A ||
  16537. ((!instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::Mul_I4) &&
  16538. (!instr->GetDst() || instr->GetDst()->IsInt32()) &&
  16539. (!instr->GetSrc1() || instr->GetSrc1()->IsInt32()) &&
  16540. (!instr->GetSrc2() || instr->GetSrc2()->IsInt32())))
  16541. {
  16542. return;
  16543. }
  16544. if (!instr->HasBailOutInfo() && !instr->HasAnySideEffects())
  16545. {
  16546. return;
  16547. }
  16548. // This can happen for Neg_A if it needs to bail out on negative zero, and perhaps other cases as well. It's too late to fix
  16549. // the problem (overflows may already be ignored), so handle it by bailing out at compile-time and disabling tracking int
  16550. // overflow.
  16551. Assert(!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled());
  16552. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  16553. {
  16554. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  16555. Output::Print(
  16556. _u("BailOut (compile-time): function: %s (%s) instr: "),
  16557. func->GetJITFunctionBody()->GetDisplayName(),
  16558. func->GetDebugNumberSet(debugStringBuffer));
  16559. #if DBG_DUMP
  16560. instr->Dump();
  16561. #else
  16562. Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  16563. #endif
  16564. Output::Print(_u("(overflow does not matter but could not int-spec or needed bailout)\n"));
  16565. Output::Flush();
  16566. }
  16567. if(func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsTrackCompoundedIntOverflowDisabled())
  16568. {
  16569. // Tracking int overflows is already off for some reason. Prevent trying to rejit again because it won't help and the
  16570. // same thing will happen again and cause an infinite loop. Just abort jitting this function.
  16571. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  16572. {
  16573. Output::Print(_u(" Aborting JIT because TrackIntOverflow is already off\n"));
  16574. Output::Flush();
  16575. }
  16576. throw Js::OperationAbortedException();
  16577. }
  16578. throw Js::RejitException(RejitReason::TrackIntOverflowDisabled);
  16579. }
  16580. // It makes lowering easier if it can assume that the first src is never a constant,
  16581. // at least for commutative operators. For non-commutative, just hoist the constant.
  16582. void
  16583. GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val)
  16584. {
  16585. IR::Opnd *dst = instr->GetDst();
  16586. IR::Opnd *src1 = instr->GetSrc1();
  16587. IR::Opnd *src2 = instr->GetSrc2();
  16588. if (src1->IsImmediateOpnd())
  16589. {
  16590. // Swap for dst, src
  16591. }
  16592. else if (src2 && dst && src2->IsRegOpnd())
  16593. {
  16594. if (src2->GetIsDead() && !src1->GetIsDead() && !src1->IsEqual(dst))
  16595. {
  16596. // Swap if src2 is dead, as the reg can be reuse for the dst for opEqs like on x86 (ADD r1, r2)
  16597. }
  16598. else if (src2->IsEqual(dst))
  16599. {
  16600. // Helps lowering of opEqs
  16601. }
  16602. else
  16603. {
  16604. return;
  16605. }
  16606. // Make sure we don't swap 2 srcs with valueOf calls.
  16607. if (OpCodeAttr::OpndHasImplicitCall(instr->m_opcode))
  16608. {
  16609. if (instr->IsBranchInstr())
  16610. {
  16611. if (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive())
  16612. {
  16613. return;
  16614. }
  16615. }
  16616. else if (!src1->GetValueType().IsPrimitive() && !src2->GetValueType().IsPrimitive())
  16617. {
  16618. return;
  16619. }
  16620. }
  16621. }
  16622. else
  16623. {
  16624. return;
  16625. }
  16626. Js::OpCode opcode = instr->m_opcode;
  16627. switch (opcode)
  16628. {
  16629. case Js::OpCode::And_A:
  16630. case Js::OpCode::Mul_A:
  16631. case Js::OpCode::Or_A:
  16632. case Js::OpCode::Xor_A:
  16633. case Js::OpCode::And_I4:
  16634. case Js::OpCode::Mul_I4:
  16635. case Js::OpCode::Or_I4:
  16636. case Js::OpCode::Xor_I4:
  16637. case Js::OpCode::Add_I4:
  16638. case Js::OpCode::Add_Ptr:
  16639. swap_srcs:
  16640. if (!instr->GetSrc2()->IsImmediateOpnd())
  16641. {
  16642. instr->m_opcode = opcode;
  16643. src1 = instr->UnlinkSrc1();
  16644. src2 = instr->UnlinkSrc2();
  16645. instr->SetSrc1(src2);
  16646. instr->SetSrc2(src1);
  16647. Value *tempVal = *pSrc1Val;
  16648. *pSrc1Val = *pSrc2Val;
  16649. *pSrc2Val = tempVal;
  16650. return;
  16651. }
  16652. break;
  16653. case Js::OpCode::BrSrEq_A:
  16654. case Js::OpCode::BrSrNotNeq_A:
  16655. case Js::OpCode::BrEq_I4:
  16656. goto swap_srcs;
  16657. case Js::OpCode::BrSrNeq_A:
  16658. case Js::OpCode::BrNeq_A:
  16659. case Js::OpCode::BrSrNotEq_A:
  16660. case Js::OpCode::BrNotEq_A:
  16661. case Js::OpCode::BrNeq_I4:
  16662. goto swap_srcs;
  16663. case Js::OpCode::BrGe_A:
  16664. opcode = Js::OpCode::BrLe_A;
  16665. goto swap_srcs;
  16666. case Js::OpCode::BrNotGe_A:
  16667. opcode = Js::OpCode::BrNotLe_A;
  16668. goto swap_srcs;
  16669. case Js::OpCode::BrGe_I4:
  16670. opcode = Js::OpCode::BrLe_I4;
  16671. goto swap_srcs;
  16672. case Js::OpCode::BrGt_A:
  16673. opcode = Js::OpCode::BrLt_A;
  16674. goto swap_srcs;
  16675. case Js::OpCode::BrNotGt_A:
  16676. opcode = Js::OpCode::BrNotLt_A;
  16677. goto swap_srcs;
  16678. case Js::OpCode::BrGt_I4:
  16679. opcode = Js::OpCode::BrLt_I4;
  16680. goto swap_srcs;
  16681. case Js::OpCode::BrLe_A:
  16682. opcode = Js::OpCode::BrGe_A;
  16683. goto swap_srcs;
  16684. case Js::OpCode::BrNotLe_A:
  16685. opcode = Js::OpCode::BrNotGe_A;
  16686. goto swap_srcs;
  16687. case Js::OpCode::BrLe_I4:
  16688. opcode = Js::OpCode::BrGe_I4;
  16689. goto swap_srcs;
  16690. case Js::OpCode::BrLt_A:
  16691. opcode = Js::OpCode::BrGt_A;
  16692. goto swap_srcs;
  16693. case Js::OpCode::BrNotLt_A:
  16694. opcode = Js::OpCode::BrNotGt_A;
  16695. goto swap_srcs;
  16696. case Js::OpCode::BrLt_I4:
  16697. opcode = Js::OpCode::BrGt_I4;
  16698. goto swap_srcs;
  16699. case Js::OpCode::BrEq_A:
  16700. case Js::OpCode::BrNotNeq_A:
  16701. case Js::OpCode::CmEq_A:
  16702. case Js::OpCode::CmNeq_A:
  16703. // this == "" not the same as "" == this...
  16704. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16705. {
  16706. return;
  16707. }
  16708. goto swap_srcs;
  16709. case Js::OpCode::CmGe_A:
  16710. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16711. {
  16712. return;
  16713. }
  16714. opcode = Js::OpCode::CmLe_A;
  16715. goto swap_srcs;
  16716. case Js::OpCode::CmGt_A:
  16717. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16718. {
  16719. return;
  16720. }
  16721. opcode = Js::OpCode::CmLt_A;
  16722. goto swap_srcs;
  16723. case Js::OpCode::CmLe_A:
  16724. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16725. {
  16726. return;
  16727. }
  16728. opcode = Js::OpCode::CmGe_A;
  16729. goto swap_srcs;
  16730. case Js::OpCode::CmLt_A:
  16731. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  16732. {
  16733. return;
  16734. }
  16735. opcode = Js::OpCode::CmGt_A;
  16736. goto swap_srcs;
  16737. case Js::OpCode::CallI:
  16738. case Js::OpCode::CallIFixed:
  16739. case Js::OpCode::NewScObject:
  16740. case Js::OpCode::NewScObjectSpread:
  16741. case Js::OpCode::NewScObjArray:
  16742. case Js::OpCode::NewScObjArraySpread:
  16743. case Js::OpCode::NewScObjectNoCtor:
  16744. // Don't insert load to register if the function operand is a fixed function.
  16745. if (instr->HasFixedFunctionAddressTarget())
  16746. {
  16747. return;
  16748. }
  16749. break;
  16750. // Can't do add because <32 + "Hello"> isn't equal to <"Hello" + 32>
  16751. // Lower can do the swap. Other op-codes listed below don't need immediate source hoisting, as the fast paths handle it,
  16752. // or the lowering handles the hoisting.
  16753. case Js::OpCode::Add_A:
  16754. if (src1->IsFloat())
  16755. {
  16756. goto swap_srcs;
  16757. }
  16758. return;
  16759. case Js::OpCode::Sub_I4:
  16760. case Js::OpCode::Neg_I4:
  16761. case Js::OpCode::Not_I4:
  16762. case Js::OpCode::NewScFunc:
  16763. case Js::OpCode::NewScGenFunc:
  16764. case Js::OpCode::NewScArray:
  16765. case Js::OpCode::NewScIntArray:
  16766. case Js::OpCode::NewScFltArray:
  16767. case Js::OpCode::NewScArrayWithMissingValues:
  16768. case Js::OpCode::NewRegEx:
  16769. case Js::OpCode::Ld_A:
  16770. case Js::OpCode::Ld_I4:
  16771. case Js::OpCode::FromVar:
  16772. case Js::OpCode::Conv_Prim:
  16773. case Js::OpCode::LdC_A_I4:
  16774. case Js::OpCode::LdStr:
  16775. case Js::OpCode::InitFld:
  16776. case Js::OpCode::InitRootFld:
  16777. case Js::OpCode::StartCall:
  16778. case Js::OpCode::ArgOut_A:
  16779. case Js::OpCode::ArgOut_A_Inline:
  16780. case Js::OpCode::ArgOut_A_Dynamic:
  16781. case Js::OpCode::ArgOut_A_FromStackArgs:
  16782. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  16783. case Js::OpCode::ArgOut_A_InlineSpecialized:
  16784. case Js::OpCode::ArgOut_A_SpreadArg:
  16785. case Js::OpCode::InlineeEnd:
  16786. case Js::OpCode::EndCallForPolymorphicInlinee:
  16787. case Js::OpCode::InlineeMetaArg:
  16788. case Js::OpCode::InlineBuiltInEnd:
  16789. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  16790. case Js::OpCode::CallHelper:
  16791. case Js::OpCode::LdElemUndef:
  16792. case Js::OpCode::LdElemUndefScoped:
  16793. case Js::OpCode::RuntimeTypeError:
  16794. case Js::OpCode::RuntimeReferenceError:
  16795. case Js::OpCode::Ret:
  16796. case Js::OpCode::NewScObjectSimple:
  16797. case Js::OpCode::NewScObjectLiteral:
  16798. case Js::OpCode::StFld:
  16799. case Js::OpCode::StRootFld:
  16800. case Js::OpCode::StSlot:
  16801. case Js::OpCode::StSlotChkUndecl:
  16802. case Js::OpCode::StElemC:
  16803. case Js::OpCode::StArrSegElemC:
  16804. case Js::OpCode::StElemI_A:
  16805. case Js::OpCode::StElemI_A_Strict:
  16806. case Js::OpCode::CallDirect:
  16807. case Js::OpCode::BrNotHasSideEffects:
  16808. case Js::OpCode::NewConcatStrMulti:
  16809. case Js::OpCode::NewConcatStrMultiBE:
  16810. case Js::OpCode::ExtendArg_A:
  16811. #ifdef ENABLE_DOM_FAST_PATH
  16812. case Js::OpCode::DOMFastPathGetter:
  16813. case Js::OpCode::DOMFastPathSetter:
  16814. #endif
  16815. case Js::OpCode::NewScopeSlots:
  16816. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  16817. case Js::OpCode::NewStackScopeSlots:
  16818. case Js::OpCode::IsInst:
  16819. case Js::OpCode::BailOnEqual:
  16820. case Js::OpCode::BailOnNotEqual:
  16821. case Js::OpCode::StArrViewElem:
  16822. return;
  16823. }
  16824. if (!src1->IsImmediateOpnd())
  16825. {
  16826. return;
  16827. }
  16828. // The fast paths or lowering of the remaining instructions may not support handling immediate opnds for the first src. The
  16829. // immediate src1 is hoisted here into a separate instruction.
  16830. if (src1->IsIntConstOpnd())
  16831. {
  16832. IR::Instr *newInstr = instr->HoistSrc1(Js::OpCode::Ld_I4);
  16833. ToInt32Dst(newInstr, newInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  16834. }
  16835. else
  16836. {
  16837. instr->HoistSrc1(Js::OpCode::Ld_A);
  16838. }
  16839. src1 = instr->GetSrc1();
  16840. src1->AsRegOpnd()->m_sym->SetIsConst();
  16841. }
  16842. // Clear the ValueMap pf the values invalidated by this instr.
  16843. void
  16844. GlobOpt::ProcessKills(IR::Instr *instr)
  16845. {
  16846. this->ProcessFieldKills(instr);
  16847. this->ProcessValueKills(instr);
  16848. this->ProcessArrayValueKills(instr);
  16849. }
  16850. bool
  16851. GlobOpt::OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives)
  16852. {
  16853. if(!loop->CanHoistInvariants())
  16854. {
  16855. return false;
  16856. }
  16857. Sym *sym;
  16858. switch(src->GetKind())
  16859. {
  16860. case IR::OpndKindAddr:
  16861. case IR::OpndKindFloatConst:
  16862. case IR::OpndKindIntConst:
  16863. return true;
  16864. case IR::OpndKindReg:
  16865. sym = src->AsRegOpnd()->m_sym;
  16866. break;
  16867. case IR::OpndKindSym:
  16868. sym = src->AsSymOpnd()->m_sym;
  16869. if (src->AsSymOpnd()->IsPropertySymOpnd())
  16870. {
  16871. if (src->AsSymOpnd()->AsPropertySymOpnd()->IsTypeChecked())
  16872. {
  16873. // We do not handle hoisting these yet. We might be hoisting this across the instr with the type check protecting this one.
  16874. // And somehow, the dead-store pass now removes the type check on that instr later on...
  16875. // For CheckFixedFld, there is no benefit hoisting these if they don't have a type check as they won't generate code.
  16876. return false;
  16877. }
  16878. }
  16879. break;
  16880. case IR::OpndKindHelperCall:
  16881. // Helper calls, like the private slot getter, can be invariant.
  16882. // Consider moving more math builtin to invariant?
  16883. return HelperMethodAttributes::IsInVariant(src->AsHelperCallOpnd()->m_fnHelper);
  16884. default:
  16885. return false;
  16886. }
  16887. return OptIsInvariant(sym, block, loop, srcVal, isNotTypeSpecConv, allowNonPrimitives);
  16888. }
  16889. bool
  16890. GlobOpt::OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef)
  16891. {
  16892. Value *localLoopHeadVal;
  16893. if(!loopHeadValRef)
  16894. {
  16895. loopHeadValRef = &localLoopHeadVal;
  16896. }
  16897. Value *&loopHeadVal = *loopHeadValRef;
  16898. loopHeadVal = nullptr;
  16899. if(!loop->CanHoistInvariants())
  16900. {
  16901. return false;
  16902. }
  16903. if (sym->IsStackSym())
  16904. {
  16905. if (sym->AsStackSym()->IsTypeSpec())
  16906. {
  16907. StackSym *varSym = sym->AsStackSym()->GetVarEquivSym(this->func);
  16908. // Make sure the int32/float64 version of this is available.
  16909. // Note: We could handle this by converting the src, but usually the
  16910. // conversion is hoistable if this is hoistable anyway.
  16911. // In some weird cases it may not be however, so we'll bail out.
  16912. if (sym->AsStackSym()->IsInt32())
  16913. {
  16914. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  16915. if (!loop->landingPad->globOptData.liveInt32Syms->Test(varSym->m_id) ||
  16916. (loop->landingPad->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  16917. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)))
  16918. {
  16919. // Either the int32 sym is not live in the landing pad, or it's lossy in the landing pad and the
  16920. // instruction's block is using the lossless version. In either case, the instruction cannot be hoisted
  16921. // without doing a conversion of this operand.
  16922. return false;
  16923. }
  16924. }
  16925. else if (sym->AsStackSym()->IsFloat64())
  16926. {
  16927. if (!loop->landingPad->globOptData.liveFloat64Syms->Test(varSym->m_id))
  16928. {
  16929. return false;
  16930. }
  16931. }
  16932. else
  16933. {
  16934. Assert(sym->AsStackSym()->IsSimd128());
  16935. if (!loop->landingPad->globOptData.liveSimd128F4Syms->Test(varSym->m_id) && !loop->landingPad->globOptData.liveSimd128I4Syms->Test(varSym->m_id))
  16936. {
  16937. return false;
  16938. }
  16939. }
  16940. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  16941. }
  16942. else
  16943. {
  16944. // Make sure the var version of this is available.
  16945. // Note: We could handle this by converting the src, but usually the
  16946. // conversion is hoistable if this is hoistable anyway.
  16947. // In some weird cases it may not be however, so we'll bail out.
  16948. if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->m_id))
  16949. {
  16950. return false;
  16951. }
  16952. }
  16953. }
  16954. else if (sym->IsPropertySym())
  16955. {
  16956. if (!loop->landingPad->globOptData.liveFields->Test(sym->m_id))
  16957. {
  16958. return false;
  16959. }
  16960. }
  16961. else
  16962. {
  16963. return false;
  16964. }
  16965. // We rely on having a value.
  16966. if (srcVal == NULL)
  16967. {
  16968. return false;
  16969. }
  16970. // Can't hoist non-primitives, unless we have safeguards against valueof/tostring.
  16971. if (!allowNonPrimitives && !srcVal->GetValueInfo()->IsPrimitive() && !this->IsTypeSpecialized(sym, loop->landingPad))
  16972. {
  16973. return false;
  16974. }
  16975. if(!isNotTypeSpecConv && loop->symsDefInLoop->Test(sym->m_id))
  16976. {
  16977. // Typically, a sym is considered invariant if it has the same value in the current block and in the loop landing pad.
  16978. // The sym may have had a different value earlier in the loop or on the back-edge, but as long as it's reassigned to its
  16979. // value outside the loop, it would be considered invariant in this block. Consider that case:
  16980. // s1 = s2[invariant]
  16981. // <loop start>
  16982. // s1 = s2[invariant]
  16983. // // s1 now has the same value as in the landing pad, and is considered invariant
  16984. // s1 += s3
  16985. // // s1 is not invariant here, or on the back-edge
  16986. // ++s3 // s3 is not invariant, so the add above cannot be hoisted
  16987. // <loop end>
  16988. //
  16989. // A problem occurs at the point of (s1 += s3) when:
  16990. // - At (s1 = s2) inside the loop, s1 was made to be the sym store of that value. This by itself is legal, because
  16991. // after that transfer, s1 and s2 have the same value.
  16992. // - (s1 += s3) is type-specialized but s1 is not specialized in the loop header. This happens when s1 is not
  16993. // specialized entering the loop, and since s1 is not used before it's defined in the loop, it's not specialized
  16994. // on back-edges.
  16995. //
  16996. // With that, at (s1 += s3), the conversion of s1 to the type-specialized version would be hoisted because s1 is
  16997. // invariant just before that instruction. Since this add is specialized, the specialized version of the sym is modified
  16998. // in the loop without a reassignment at (s1 = s2) inside the loop, and (s1 += s3) would then use an incorrect value of
  16999. // s1 (it would use the value of s1 from the previous loop iteration, instead of using the value of s2).
  17000. //
  17001. // The problem here, is that we cannot hoist the conversion of s1 into its specialized version across the assignment
  17002. // (s1 = s2) inside the loop. So for the purposes of type specialization, don't consider a sym invariant if it has a def
  17003. // inside the loop.
  17004. return false;
  17005. }
  17006. // A symbol is invariant if it's current value is the same as it was upon entering the loop.
  17007. loopHeadVal = this->FindValue(loop->landingPad->globOptData.symToValueMap, sym);
  17008. if (loopHeadVal == NULL || loopHeadVal->GetValueNumber() != srcVal->GetValueNumber())
  17009. {
  17010. return false;
  17011. }
  17012. // For values with an int range, require additionally that the range is the same as in the landing pad, as the range may
  17013. // have been changed on this path based on branches, and int specialization and invariant hoisting may rely on the range
  17014. // being the same. For type spec conversions, only require that if the value is an int constant in the current block, that
  17015. // it is also an int constant with the same value in the landing pad. Other range differences don't matter for type spec.
  17016. IntConstantBounds srcIntConstantBounds, loopHeadIntConstantBounds;
  17017. if(srcVal->GetValueInfo()->TryGetIntConstantBounds(&srcIntConstantBounds) &&
  17018. (isNotTypeSpecConv || srcIntConstantBounds.IsConstant()) &&
  17019. (
  17020. !loopHeadVal->GetValueInfo()->TryGetIntConstantBounds(&loopHeadIntConstantBounds) ||
  17021. loopHeadIntConstantBounds.LowerBound() != srcIntConstantBounds.LowerBound() ||
  17022. loopHeadIntConstantBounds.UpperBound() != srcIntConstantBounds.UpperBound()
  17023. ))
  17024. {
  17025. return false;
  17026. }
  17027. return true;
  17028. }
  17029. bool
  17030. GlobOpt::OptIsInvariant(
  17031. IR::Instr *instr,
  17032. BasicBlock *block,
  17033. Loop *loop,
  17034. Value *src1Val,
  17035. Value *src2Val,
  17036. bool isNotTypeSpecConv,
  17037. const bool forceInvariantHoisting)
  17038. {
  17039. if (!loop->CanHoistInvariants())
  17040. {
  17041. return false;
  17042. }
  17043. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  17044. {
  17045. return false;
  17046. }
  17047. bool allowNonPrimitives = !OpCodeAttr::OpndHasImplicitCall(instr->m_opcode);
  17048. switch(instr->m_opcode)
  17049. {
  17050. // Can't legally hoist these
  17051. case Js::OpCode::LdLen_A:
  17052. return false;
  17053. //Can't Hoist BailOnNotStackArgs, as it is necessary as InlineArgsOptimization relies on this opcode
  17054. //to decide whether to throw rejit exception or not.
  17055. case Js::OpCode::BailOnNotStackArgs:
  17056. return false;
  17057. // Usually not worth hoisting these
  17058. case Js::OpCode::LdStr:
  17059. case Js::OpCode::Ld_A:
  17060. case Js::OpCode::Ld_I4:
  17061. case Js::OpCode::LdC_A_I4:
  17062. if(!forceInvariantHoisting)
  17063. {
  17064. return false;
  17065. }
  17066. break;
  17067. // Can't hoist these outside the function it's for. The LdArgumentsFromFrame for an inlinee depends on the inlinee meta arg
  17068. // that holds the arguments object, which is only initialized at the start of the inlinee. So, can't hoist this outside the
  17069. // inlinee.
  17070. case Js::OpCode::LdArgumentsFromFrame:
  17071. if(instr->m_func != loop->GetFunc())
  17072. {
  17073. return false;
  17074. }
  17075. break;
  17076. case Js::OpCode::FromVar:
  17077. if (instr->HasBailOutInfo())
  17078. {
  17079. allowNonPrimitives = true;
  17080. }
  17081. break;
  17082. }
  17083. IR::Opnd *dst = instr->GetDst();
  17084. if (dst && !dst->IsRegOpnd())
  17085. {
  17086. return false;
  17087. }
  17088. IR::Opnd *src1 = instr->GetSrc1();
  17089. if (src1)
  17090. {
  17091. if (!this->OptIsInvariant(src1, block, loop, src1Val, isNotTypeSpecConv, allowNonPrimitives))
  17092. {
  17093. return false;
  17094. }
  17095. IR::Opnd *src2 = instr->GetSrc2();
  17096. if (src2)
  17097. {
  17098. if (!this->OptIsInvariant(src2, block, loop, src2Val, isNotTypeSpecConv, allowNonPrimitives))
  17099. {
  17100. return false;
  17101. }
  17102. }
  17103. }
  17104. return true;
  17105. }
  17106. bool
  17107. GlobOpt::OptDstIsInvariant(IR::RegOpnd *dst)
  17108. {
  17109. StackSym *dstSym = dst->m_sym;
  17110. if (dstSym->IsTypeSpec())
  17111. {
  17112. // The type-specialized sym may be single def, but not the original...
  17113. dstSym = dstSym->GetVarEquivSym(this->func);
  17114. }
  17115. return (dstSym->m_isSingleDef);
  17116. }
  17117. void
  17118. GlobOpt::OptHoistToLandingPadUpdateValueType(
  17119. BasicBlock* landingPad,
  17120. IR::Instr* instr,
  17121. IR::Opnd* opnd,
  17122. Value* opndVal)
  17123. {
  17124. if (instr->m_opcode == Js::OpCode::FromVar)
  17125. {
  17126. return;
  17127. }
  17128. Sym* opndSym = opnd->GetSym();;
  17129. if (opndSym)
  17130. {
  17131. if (opndVal == nullptr)
  17132. {
  17133. opndVal = FindValue(opndSym);
  17134. }
  17135. Value* opndValueInLandingPad = FindValue(landingPad->globOptData.symToValueMap, opndSym);
  17136. Assert(opndVal->GetValueNumber() == opndValueInLandingPad->GetValueNumber());
  17137. opnd->SetValueType(opndValueInLandingPad->GetValueInfo()->Type());
  17138. if (opndSym->IsPropertySym())
  17139. {
  17140. // Also fix valueInfo on objPtr
  17141. StackSym* opndObjPtrSym = opndSym->AsPropertySym()->m_stackSym;
  17142. Value* opndObjPtrSymValInLandingPad = FindValue(landingPad->globOptData.symToValueMap, opndObjPtrSym);
  17143. ValueInfo* opndObjPtrSymValueInfoInLandingPad = opndObjPtrSymValInLandingPad->GetValueInfo();
  17144. opnd->AsSymOpnd()->SetPropertyOwnerValueType(opndObjPtrSymValueInfoInLandingPad->Type());
  17145. }
  17146. }
  17147. }
  17148. void
  17149. GlobOpt::OptHoistInvariant(
  17150. IR::Instr *instr,
  17151. BasicBlock *block,
  17152. Loop *loop,
  17153. Value *dstVal,
  17154. Value *const src1Val,
  17155. bool isNotTypeSpecConv,
  17156. bool lossy,
  17157. IR::BailOutKind bailoutKind)
  17158. {
  17159. BasicBlock *landingPad = loop->landingPad;
  17160. IR::Opnd* src1 = instr->GetSrc1();
  17161. if (src1)
  17162. {
  17163. // We are hoisting this instruction possibly past other uses, which might invalidate the last use info. Clear it.
  17164. OptHoistToLandingPadUpdateValueType(landingPad, instr, src1, src1Val);
  17165. if (src1->IsRegOpnd())
  17166. {
  17167. src1->AsRegOpnd()->m_isTempLastUse = false;
  17168. }
  17169. IR::Opnd* src2 = instr->GetSrc2();
  17170. if (src2)
  17171. {
  17172. OptHoistToLandingPadUpdateValueType(landingPad, instr, src2, nullptr);
  17173. if (src2->IsRegOpnd())
  17174. {
  17175. src2->AsRegOpnd()->m_isTempLastUse = false;
  17176. }
  17177. }
  17178. }
  17179. IR::RegOpnd *dst = instr->GetDst() ? instr->GetDst()->AsRegOpnd() : nullptr;
  17180. if(dst)
  17181. {
  17182. switch (instr->m_opcode)
  17183. {
  17184. case Js::OpCode::CmEq_I4:
  17185. case Js::OpCode::CmNeq_I4:
  17186. case Js::OpCode::CmLt_I4:
  17187. case Js::OpCode::CmLe_I4:
  17188. case Js::OpCode::CmGt_I4:
  17189. case Js::OpCode::CmGe_I4:
  17190. case Js::OpCode::CmUnLt_I4:
  17191. case Js::OpCode::CmUnLe_I4:
  17192. case Js::OpCode::CmUnGt_I4:
  17193. case Js::OpCode::CmUnGe_I4:
  17194. // These operations are a special case. They generate a lossy int value, and the var sym is initialized using
  17195. // Conv_Bool. A sym cannot be live only as a lossy int sym, the var needs to be live as well since the lossy int
  17196. // sym cannot be used to convert to var. We don't know however, whether the Conv_Bool will be hoisted. The idea
  17197. // currently is that the sym is only used on the path in which it is initialized inside the loop. So, don't
  17198. // hoist any liveness info for the dst.
  17199. if (!this->GetIsAsmJSFunc())
  17200. {
  17201. lossy = true;
  17202. }
  17203. break;
  17204. case Js::OpCode::FromVar:
  17205. {
  17206. StackSym* src1StackSym = IR::RegOpnd::TryGetStackSym(instr->GetSrc1());
  17207. if (instr->HasBailOutInfo())
  17208. {
  17209. IR::BailOutKind instrBailoutKind = instr->GetBailOutKind();
  17210. Assert(instrBailoutKind == IR::BailOutIntOnly ||
  17211. instrBailoutKind == IR::BailOutExpectingInteger ||
  17212. instrBailoutKind == IR::BailOutOnNotPrimitive ||
  17213. instrBailoutKind == IR::BailOutNumberOnly ||
  17214. instrBailoutKind == IR::BailOutPrimitiveButString ||
  17215. instrBailoutKind == IR::BailOutSimd128F4Only ||
  17216. instrBailoutKind == IR::BailOutSimd128I4Only);
  17217. }
  17218. else if (src1StackSym && bailoutKind != IR::BailOutInvalid)
  17219. {
  17220. // We may be hoisting FromVar from a region where it didn't need a bailout (src1 had a definite value type) to a region
  17221. // where it would. In such cases, the FromVar needs a bailout based on the value type of src1 in its new position.
  17222. Assert(!src1StackSym->IsTypeSpec());
  17223. Value* landingPadSrc1val = FindValue(landingPad->globOptData.symToValueMap, src1StackSym);
  17224. Assert(src1Val->GetValueNumber() == landingPadSrc1val->GetValueNumber());
  17225. ValueInfo *src1ValueInfo = src1Val->GetValueInfo();
  17226. ValueInfo *landingPadSrc1ValueInfo = landingPadSrc1val->GetValueInfo();
  17227. IRType dstType = dst->GetType();
  17228. const auto AddBailOutToFromVar = [&]()
  17229. {
  17230. instr->GetSrc1()->SetValueType(landingPadSrc1val->GetValueInfo()->Type());
  17231. EnsureBailTarget(loop);
  17232. if (block->IsLandingPad())
  17233. {
  17234. instr = instr->ConvertToBailOutInstr(instr, bailoutKind, loop->bailOutInfo->bailOutOffset);
  17235. }
  17236. else
  17237. {
  17238. instr = instr->ConvertToBailOutInstr(instr, bailoutKind);
  17239. }
  17240. };
  17241. // A definite type in the source position and not a definite type in the destination (landing pad)
  17242. // and no bailout on the instruction; we should put a bailout on the hoisted instruction.
  17243. if (dstType == TyInt32)
  17244. {
  17245. if (lossy)
  17246. {
  17247. if ((src1ValueInfo->IsPrimitive() || IsTypeSpecialized(src1StackSym, block)) && // didn't need a lossy type spec bailout in the source block
  17248. (!landingPadSrc1ValueInfo->IsPrimitive() && !IsTypeSpecialized(src1StackSym, landingPad))) // needs a lossy type spec bailout in the landing pad
  17249. {
  17250. bailoutKind = IR::BailOutOnNotPrimitive;
  17251. AddBailOutToFromVar();
  17252. }
  17253. }
  17254. else if (src1ValueInfo->IsInt() && !landingPadSrc1ValueInfo->IsInt())
  17255. {
  17256. AddBailOutToFromVar();
  17257. }
  17258. }
  17259. else if ((dstType == TyFloat64 && src1ValueInfo->IsNumber() && !landingPadSrc1ValueInfo->IsNumber()) ||
  17260. (IRType_IsSimd128(dstType) && src1ValueInfo->IsSimd128() && !landingPadSrc1ValueInfo->IsSimd128()))
  17261. {
  17262. AddBailOutToFromVar();
  17263. }
  17264. }
  17265. break;
  17266. }
  17267. }
  17268. if (dstVal == NULL)
  17269. {
  17270. dstVal = this->NewGenericValue(ValueType::Uninitialized, dst);
  17271. }
  17272. // ToVar/FromVar don't need a new dst because it has to be invariant if their src is invariant.
  17273. bool dstDoesntNeedLoad = (!isNotTypeSpecConv && instr->m_opcode != Js::OpCode::LdC_A_I4);
  17274. StackSym *varSym = dst->m_sym;
  17275. if (varSym->IsTypeSpec())
  17276. {
  17277. varSym = varSym->GetVarEquivSym(this->func);
  17278. }
  17279. Value *const landingPadDstVal = FindValue(loop->landingPad->globOptData.symToValueMap, varSym);
  17280. if(landingPadDstVal
  17281. ? dstVal->GetValueNumber() != landingPadDstVal->GetValueNumber()
  17282. : loop->symsDefInLoop->Test(varSym->m_id))
  17283. {
  17284. // We need a temp for FromVar/ToVar if dst changes in the loop.
  17285. dstDoesntNeedLoad = false;
  17286. }
  17287. if (!dstDoesntNeedLoad && this->OptDstIsInvariant(dst) == false)
  17288. {
  17289. // Keep dst in place, hoist instr using a new dst.
  17290. instr->UnlinkDst();
  17291. // Set type specialization info correctly for this new sym
  17292. StackSym *copyVarSym;
  17293. IR::RegOpnd *copyReg;
  17294. if (dst->m_sym->IsTypeSpec())
  17295. {
  17296. copyVarSym = StackSym::New(TyVar, instr->m_func);
  17297. StackSym *copySym = copyVarSym;
  17298. if (dst->m_sym->IsInt32())
  17299. {
  17300. if(lossy)
  17301. {
  17302. // The new sym would only be live as a lossy int since we're only hoisting the store to the int version
  17303. // of the sym, and cannot be converted to var. It is not legal to have a sym only live as a lossy int,
  17304. // so don't update liveness info for this sym.
  17305. }
  17306. else
  17307. {
  17308. block->globOptData.liveInt32Syms->Set(copyVarSym->m_id);
  17309. }
  17310. copySym = copySym->GetInt32EquivSym(instr->m_func);
  17311. }
  17312. else if (dst->m_sym->IsFloat64())
  17313. {
  17314. block->globOptData.liveFloat64Syms->Set(copyVarSym->m_id);
  17315. copySym = copySym->GetFloat64EquivSym(instr->m_func);
  17316. }
  17317. else if (dst->IsSimd128())
  17318. {
  17319. // SIMD_JS
  17320. if (dst->IsSimd128F4())
  17321. {
  17322. block->globOptData.liveSimd128F4Syms->Set(copyVarSym->m_id);
  17323. copySym = copySym->GetSimd128F4EquivSym(instr->m_func);
  17324. }
  17325. else
  17326. {
  17327. Assert(dst->IsSimd128I4());
  17328. block->globOptData.liveSimd128I4Syms->Set(copyVarSym->m_id);
  17329. copySym = copySym->GetSimd128I4EquivSym(instr->m_func);
  17330. }
  17331. }
  17332. copyReg = IR::RegOpnd::New(copySym, copySym->GetType(), instr->m_func);
  17333. }
  17334. else
  17335. {
  17336. copyReg = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  17337. copyVarSym = copyReg->m_sym;
  17338. block->globOptData.liveVarSyms->Set(copyVarSym->m_id);
  17339. }
  17340. copyReg->SetValueType(dst->GetValueType());
  17341. IR::Instr *copyInstr = IR::Instr::New(Js::OpCode::Ld_A, dst, copyReg, instr->m_func);
  17342. copyInstr->SetByteCodeOffset(instr);
  17343. instr->SetDst(copyReg);
  17344. instr->InsertBefore(copyInstr);
  17345. dst->m_sym->m_mayNotBeTempLastUse = true;
  17346. if (instr->GetSrc1() && instr->GetSrc1()->IsImmediateOpnd())
  17347. {
  17348. // Propagate IsIntConst if appropriate
  17349. switch(instr->m_opcode)
  17350. {
  17351. case Js::OpCode::Ld_A:
  17352. case Js::OpCode::Ld_I4:
  17353. case Js::OpCode::LdC_A_I4:
  17354. copyReg->m_sym->SetIsConst();
  17355. break;
  17356. }
  17357. }
  17358. ValueInfo *dstValueInfo = dstVal->GetValueInfo();
  17359. if((!dstValueInfo->GetSymStore() || dstValueInfo->GetSymStore() == varSym) && !lossy)
  17360. {
  17361. // The destination's value may have been transferred from one of the invariant sources, in which case we should
  17362. // keep the sym store intact, as that sym will likely have a better lifetime than this new copy sym. For
  17363. // instance, if we're inside a conditioned block, because we don't make the copy sym live and set its value in
  17364. // all preceding blocks, this sym would not be live after exiting this block, causing this value to not
  17365. // participate in copy-prop after this block.
  17366. this->SetSymStoreDirect(dstValueInfo, copyVarSym);
  17367. }
  17368. this->InsertNewValue(&block->globOptData, dstVal, copyReg);
  17369. dst = copyReg;
  17370. }
  17371. }
  17372. // Move to landing pad
  17373. block->UnlinkInstr(instr);
  17374. if (loop->bailOutInfo->bailOutInstr)
  17375. {
  17376. loop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  17377. }
  17378. else
  17379. {
  17380. landingPad->InsertAfter(instr);
  17381. }
  17382. GlobOpt::MarkNonByteCodeUsed(instr);
  17383. if (instr->HasBailOutInfo() || instr->HasAuxBailOut())
  17384. {
  17385. Assert(loop->bailOutInfo);
  17386. EnsureBailTarget(loop);
  17387. // Copy bailout info of loop top.
  17388. if (instr->ReplaceBailOutInfo(loop->bailOutInfo))
  17389. {
  17390. // if the old bailout is deleted, reset capturedvalues cached in block
  17391. block->globOptData.capturedValues = nullptr;
  17392. block->globOptData.capturedValuesCandidate = nullptr;
  17393. }
  17394. }
  17395. if(!dst)
  17396. {
  17397. return;
  17398. }
  17399. // The bailout info's liveness for the dst sym is not updated in loop landing pads because bailout instructions previously
  17400. // hoisted into the loop's landing pad may bail out before the current type of the dst sym became live (perhaps due to this
  17401. // instruction). Since the landing pad will have a shared bailout point, the bailout info cannot assume that the current
  17402. // type of the dst sym was live during every bailout hoisted into the landing pad.
  17403. StackSym *const dstSym = dst->m_sym;
  17404. StackSym *const dstVarSym = dstSym->IsTypeSpec() ? dstSym->GetVarEquivSym(nullptr) : dstSym;
  17405. Assert(dstVarSym);
  17406. if(isNotTypeSpecConv || !IsLive(dstVarSym, loop->landingPad))
  17407. {
  17408. // A new dst is being hoisted, or the same single-def dst that would not be live before this block. So, make it live and
  17409. // update the value info with the same value info in this block.
  17410. if(lossy)
  17411. {
  17412. // This is a lossy conversion to int. The instruction was given a new dst specifically for hoisting, so this new dst
  17413. // will not be live as a var before this block. A sym cannot be live only as a lossy int sym, the var needs to be
  17414. // live as well since the lossy int sym cannot be used to convert to var. Since the var version of the sym is not
  17415. // going to be initialized, don't hoist any liveness info for the dst. The sym is only going to be used on the path
  17416. // in which it is initialized inside the loop.
  17417. Assert(dstSym->IsTypeSpec());
  17418. Assert(dstSym->IsInt32());
  17419. return;
  17420. }
  17421. // Check if the dst value was transferred from the src. If so, the value transfer needs to be replicated.
  17422. bool isTransfer = dstVal == src1Val;
  17423. StackSym *transferValueOfSym = nullptr;
  17424. if(isTransfer)
  17425. {
  17426. Assert(instr->GetSrc1());
  17427. if(instr->GetSrc1()->IsRegOpnd())
  17428. {
  17429. StackSym *src1Sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17430. if(src1Sym->IsTypeSpec())
  17431. {
  17432. src1Sym = src1Sym->GetVarEquivSym(nullptr);
  17433. Assert(src1Sym);
  17434. }
  17435. if(dstVal == FindValue(block->globOptData.symToValueMap, src1Sym))
  17436. {
  17437. transferValueOfSym = src1Sym;
  17438. }
  17439. }
  17440. }
  17441. // SIMD_JS
  17442. if (instr->m_opcode == Js::OpCode::ExtendArg_A)
  17443. {
  17444. // Check if we should have CSE'ed this EA
  17445. Assert(instr->GetSrc1());
  17446. // If the dstVal symstore is not the dst itself, then we copied the Value from another expression.
  17447. if (dstVal->GetValueInfo()->GetSymStore() != instr->GetDst()->GetStackSym())
  17448. {
  17449. isTransfer = true;
  17450. transferValueOfSym = dstVal->GetValueInfo()->GetSymStore()->AsStackSym();
  17451. }
  17452. }
  17453. const ValueNumber dstValueNumber = dstVal->GetValueNumber();
  17454. ValueNumber dstNewValueNumber = InvalidValueNumber;
  17455. for(InvariantBlockBackwardIterator it(this, block, loop->landingPad, nullptr); it.IsValid(); it.MoveNext())
  17456. {
  17457. BasicBlock *const hoistBlock = it.Block();
  17458. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  17459. Assert(!IsLive(dstVarSym, &hoistBlockData));
  17460. MakeLive(dstSym, &hoistBlockData, lossy);
  17461. Value *newDstValue;
  17462. do
  17463. {
  17464. if(isTransfer)
  17465. {
  17466. if(transferValueOfSym)
  17467. {
  17468. newDstValue = FindValue(hoistBlockData.symToValueMap, transferValueOfSym);
  17469. if(newDstValue && newDstValue->GetValueNumber() == dstValueNumber)
  17470. {
  17471. break;
  17472. }
  17473. }
  17474. // It's a transfer, but we don't have a sym whose value number matches in the target block. Use a new value
  17475. // number since we don't know if there is already a value with the current number for the target block.
  17476. if(dstNewValueNumber == InvalidValueNumber)
  17477. {
  17478. dstNewValueNumber = NewValueNumber();
  17479. }
  17480. newDstValue = CopyValue(dstVal, dstNewValueNumber);
  17481. break;
  17482. }
  17483. newDstValue = CopyValue(dstVal, dstValueNumber);
  17484. } while(false);
  17485. SetValue(&hoistBlockData, newDstValue, dstVarSym);
  17486. }
  17487. return;
  17488. }
  17489. #if DBG
  17490. if(instr->GetSrc1()->IsRegOpnd()) // Type spec conversion may load a constant into a dst sym
  17491. {
  17492. StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17493. Assert(srcSym != dstSym); // Type spec conversion must be changing the type, so the syms must be different
  17494. StackSym *const srcVarSym = srcSym->IsTypeSpec() ? srcSym->GetVarEquivSym(nullptr) : srcSym;
  17495. Assert(srcVarSym == dstVarSym); // Type spec conversion must be between variants of the same var sym
  17496. }
  17497. #endif
  17498. bool changeValueType = false, changeValueTypeToInt = false;
  17499. if(dstSym->IsTypeSpec())
  17500. {
  17501. if(dst->IsInt32())
  17502. {
  17503. if(!lossy)
  17504. {
  17505. Assert(
  17506. !instr->HasBailOutInfo() ||
  17507. instr->GetBailOutKind() == IR::BailOutIntOnly ||
  17508. instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  17509. changeValueType = changeValueTypeToInt = true;
  17510. }
  17511. }
  17512. else if (dst->IsFloat64())
  17513. {
  17514. if(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutNumberOnly)
  17515. {
  17516. changeValueType = true;
  17517. }
  17518. }
  17519. else
  17520. {
  17521. // SIMD_JS
  17522. Assert(dst->IsSimd128());
  17523. if (instr->HasBailOutInfo() &&
  17524. (instr->GetBailOutKind() == IR::BailOutSimd128F4Only || instr->GetBailOutKind() == IR::BailOutSimd128I4Only))
  17525. {
  17526. changeValueType = true;
  17527. }
  17528. }
  17529. }
  17530. ValueInfo *previousValueInfoBeforeUpdate = nullptr, *previousValueInfoAfterUpdate = nullptr;
  17531. for(InvariantBlockBackwardIterator it(
  17532. this,
  17533. block,
  17534. loop->landingPad,
  17535. dstVarSym,
  17536. dstVal->GetValueNumber());
  17537. it.IsValid();
  17538. it.MoveNext())
  17539. {
  17540. BasicBlock *const hoistBlock = it.Block();
  17541. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  17542. #if DBG
  17543. // TODO: There are some odd cases with field hoisting where the sym is invariant in only part of the loop and the info
  17544. // does not flow through all blocks. Un-comment the verification below after PRE replaces field hoisting.
  17545. //// Verify that the src sym is live as the required type, and that the conversion is valid
  17546. //Assert(IsLive(dstVarSym, &hoistBlockData));
  17547. //if(instr->GetSrc1()->IsRegOpnd())
  17548. //{
  17549. // IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  17550. // StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  17551. // if(srcSym->IsTypeSpec())
  17552. // {
  17553. // if(src->IsInt32())
  17554. // {
  17555. // Assert(hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  17556. // Assert(!hoistBlockData.liveLossyInt32Syms->Test(dstVarSym->m_id)); // shouldn't try to convert a lossy int32 to anything
  17557. // }
  17558. // else
  17559. // {
  17560. // Assert(src->IsFloat64());
  17561. // Assert(hoistBlockData.liveFloat64Syms->Test(dstVarSym->m_id));
  17562. // if(dstSym->IsTypeSpec() && dst->IsInt32())
  17563. // {
  17564. // Assert(lossy); // shouldn't try to do a lossless conversion from float64 to int32
  17565. // }
  17566. // }
  17567. // }
  17568. // else
  17569. // {
  17570. // Assert(hoistBlockData.liveVarSyms->Test(dstVarSym->m_id));
  17571. // }
  17572. //}
  17573. //if(dstSym->IsTypeSpec() && dst->IsInt32())
  17574. //{
  17575. // // If the sym is already specialized as required in the block to which we are attempting to hoist the conversion,
  17576. // // that info should have flowed into this block
  17577. // if(lossy)
  17578. // {
  17579. // Assert(!hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  17580. // }
  17581. // else
  17582. // {
  17583. // Assert(!IsInt32TypeSpecialized(dstVarSym, hoistBlock));
  17584. // }
  17585. //}
  17586. #endif
  17587. MakeLive(dstSym, &hoistBlockData, lossy);
  17588. if(!changeValueType)
  17589. {
  17590. continue;
  17591. }
  17592. Value *const hoistBlockValue = it.InvariantSymValue();
  17593. ValueInfo *const hoistBlockValueInfo = hoistBlockValue->GetValueInfo();
  17594. if(hoistBlockValueInfo == previousValueInfoBeforeUpdate)
  17595. {
  17596. if(hoistBlockValueInfo != previousValueInfoAfterUpdate)
  17597. {
  17598. HoistInvariantValueInfo(previousValueInfoAfterUpdate, hoistBlockValue, hoistBlock);
  17599. }
  17600. }
  17601. else
  17602. {
  17603. previousValueInfoBeforeUpdate = hoistBlockValueInfo;
  17604. ValueInfo *const newValueInfo =
  17605. changeValueTypeToInt
  17606. ? hoistBlockValueInfo->SpecializeToInt32(alloc)
  17607. : hoistBlockValueInfo->SpecializeToFloat64(alloc);
  17608. previousValueInfoAfterUpdate = newValueInfo;
  17609. ChangeValueInfo(changeValueTypeToInt ? nullptr : hoistBlock, hoistBlockValue, newValueInfo);
  17610. }
  17611. }
  17612. }
  17613. bool
  17614. GlobOpt::TryHoistInvariant(
  17615. IR::Instr *instr,
  17616. BasicBlock *block,
  17617. Value *dstVal,
  17618. Value *src1Val,
  17619. Value *src2Val,
  17620. bool isNotTypeSpecConv,
  17621. const bool lossy,
  17622. const bool forceInvariantHoisting,
  17623. IR::BailOutKind bailoutKind)
  17624. {
  17625. Assert(!this->IsLoopPrePass());
  17626. if (OptIsInvariant(instr, block, block->loop, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  17627. {
  17628. #if DBG
  17629. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::InvariantsPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  17630. {
  17631. Output::Print(_u(" **** INVARIANT *** "));
  17632. instr->Dump();
  17633. }
  17634. #endif
  17635. #if ENABLE_DEBUG_CONFIG_OPTIONS
  17636. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::InvariantsPhase))
  17637. {
  17638. Output::Print(_u(" **** INVARIANT *** "));
  17639. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  17640. }
  17641. #endif
  17642. Loop *loop = block->loop;
  17643. // Try hoisting from to outer most loop
  17644. while (loop->parent && OptIsInvariant(instr, block, loop->parent, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  17645. {
  17646. loop = loop->parent;
  17647. }
  17648. // Record the byte code use here since we are going to move this instruction up
  17649. if (isNotTypeSpecConv)
  17650. {
  17651. InsertNoImplicitCallUses(instr);
  17652. this->CaptureByteCodeSymUses(instr);
  17653. this->InsertByteCodeUses(instr, true);
  17654. }
  17655. #if DBG
  17656. else
  17657. {
  17658. PropertySym *propertySymUse = NULL;
  17659. NoRecoverMemoryJitArenaAllocator tempAllocator(_u("BE-GlobOpt-Temp"), this->alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  17660. BVSparse<JitArenaAllocator> * tempByteCodeUse = JitAnew(&tempAllocator, BVSparse<JitArenaAllocator>, &tempAllocator);
  17661. GlobOpt::TrackByteCodeSymUsed(instr, tempByteCodeUse, &propertySymUse);
  17662. Assert(tempByteCodeUse->Count() == 0 && propertySymUse == NULL);
  17663. }
  17664. #endif
  17665. OptHoistInvariant(instr, block, loop, dstVal, src1Val, isNotTypeSpecConv, lossy, bailoutKind);
  17666. return true;
  17667. }
  17668. return false;
  17669. }
  17670. InvariantBlockBackwardIterator::InvariantBlockBackwardIterator(
  17671. GlobOpt *const globOpt,
  17672. BasicBlock *const exclusiveBeginBlock,
  17673. BasicBlock *const inclusiveEndBlock,
  17674. StackSym *const invariantSym,
  17675. const ValueNumber invariantSymValueNumber)
  17676. : globOpt(globOpt),
  17677. exclusiveEndBlock(inclusiveEndBlock->prev),
  17678. invariantSym(invariantSym),
  17679. invariantSymValueNumber(invariantSymValueNumber),
  17680. block(exclusiveBeginBlock)
  17681. #if DBG
  17682. ,
  17683. inclusiveEndBlock(inclusiveEndBlock)
  17684. #endif
  17685. {
  17686. Assert(exclusiveBeginBlock);
  17687. Assert(inclusiveEndBlock);
  17688. Assert(!inclusiveEndBlock->isDeleted);
  17689. Assert(exclusiveBeginBlock != inclusiveEndBlock);
  17690. Assert(!invariantSym == (invariantSymValueNumber == InvalidValueNumber));
  17691. MoveNext();
  17692. }
  17693. bool
  17694. InvariantBlockBackwardIterator::IsValid() const
  17695. {
  17696. return block != exclusiveEndBlock;
  17697. }
  17698. void
  17699. InvariantBlockBackwardIterator::MoveNext()
  17700. {
  17701. Assert(IsValid());
  17702. while(true)
  17703. {
  17704. #if DBG
  17705. BasicBlock *const previouslyIteratedBlock = block;
  17706. #endif
  17707. block = block->prev;
  17708. if(!IsValid())
  17709. {
  17710. Assert(previouslyIteratedBlock == inclusiveEndBlock);
  17711. break;
  17712. }
  17713. if(block->isDeleted)
  17714. {
  17715. continue;
  17716. }
  17717. if(!block->globOptData.HasData())
  17718. {
  17719. // This block's info has already been merged with all of its successors
  17720. continue;
  17721. }
  17722. if(!invariantSym)
  17723. {
  17724. break;
  17725. }
  17726. invariantSymValue = globOpt->FindValue(block->globOptData.symToValueMap, invariantSym);
  17727. if(!invariantSymValue || invariantSymValue->GetValueNumber() != invariantSymValueNumber)
  17728. {
  17729. // BailOnNoProfile and throw blocks are not moved outside loops. A sym table cleanup on these paths may delete the
  17730. // values. Field hoisting also has some odd cases where the hoisted stack sym is invariant in only part of the loop.
  17731. continue;
  17732. }
  17733. break;
  17734. }
  17735. }
  17736. BasicBlock *
  17737. InvariantBlockBackwardIterator::Block() const
  17738. {
  17739. Assert(IsValid());
  17740. return block;
  17741. }
  17742. Value *
  17743. InvariantBlockBackwardIterator::InvariantSymValue() const
  17744. {
  17745. Assert(IsValid());
  17746. Assert(invariantSym);
  17747. return invariantSymValue;
  17748. }
  17749. void
  17750. GlobOpt::HoistInvariantValueInfo(
  17751. ValueInfo *const invariantValueInfoToHoist,
  17752. Value *const valueToUpdate,
  17753. BasicBlock *const targetBlock)
  17754. {
  17755. Assert(invariantValueInfoToHoist);
  17756. Assert(valueToUpdate);
  17757. Assert(targetBlock);
  17758. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  17759. Assert(!invariantValueInfoToHoist->IsJsType());
  17760. Sym *const symStore = valueToUpdate->GetValueInfo()->GetSymStore();
  17761. ValueInfo *newValueInfo;
  17762. if(invariantValueInfoToHoist->GetSymStore() == symStore)
  17763. {
  17764. newValueInfo = invariantValueInfoToHoist;
  17765. }
  17766. else
  17767. {
  17768. newValueInfo = invariantValueInfoToHoist->Copy(alloc);
  17769. this->SetSymStoreDirect(newValueInfo, symStore);
  17770. }
  17771. ChangeValueInfo(targetBlock, valueToUpdate, newValueInfo);
  17772. }
  17773. // static
  17774. bool
  17775. GlobOpt::DoInlineArgsOpt(Func* func)
  17776. {
  17777. Func* topFunc = func->GetTopFunc();
  17778. Assert(topFunc != func);
  17779. bool doInlineArgsOpt =
  17780. !PHASE_OFF(Js::InlineArgsOptPhase, topFunc) &&
  17781. !func->GetHasCalls() &&
  17782. !func->GetHasUnoptimizedArgumentsAcccess() &&
  17783. func->m_canDoInlineArgsOpt;
  17784. return doInlineArgsOpt;
  17785. }
  17786. bool
  17787. GlobOpt::IsSwitchOptEnabled(Func* func)
  17788. {
  17789. Assert(func->IsTopFunc());
  17790. return !PHASE_OFF(Js::SwitchOptPhase, func) && (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsSwitchOptDisabled()) && !IsTypeSpecPhaseOff(func)
  17791. && func->DoGlobOpt() && !func->HasTry();
  17792. }
  17793. bool
  17794. GlobOpt::DoConstFold() const
  17795. {
  17796. return !PHASE_OFF(Js::ConstFoldPhase, func);
  17797. }
  17798. bool
  17799. GlobOpt::IsTypeSpecPhaseOff(Func *func)
  17800. {
  17801. return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
  17802. }
  17803. bool
  17804. GlobOpt::DoTypeSpec() const
  17805. {
  17806. return doTypeSpec;
  17807. }
  17808. bool
  17809. GlobOpt::DoAggressiveIntTypeSpec(Func* func)
  17810. {
  17811. return
  17812. !PHASE_OFF(Js::AggressiveIntTypeSpecPhase, func) &&
  17813. !IsTypeSpecPhaseOff(func) &&
  17814. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveIntTypeSpecDisabled(func->IsLoopBody()));
  17815. }
  17816. bool
  17817. GlobOpt::DoAggressiveIntTypeSpec() const
  17818. {
  17819. return doAggressiveIntTypeSpec;
  17820. }
  17821. bool
  17822. GlobOpt::DoAggressiveMulIntTypeSpec() const
  17823. {
  17824. return doAggressiveMulIntTypeSpec;
  17825. }
  17826. bool
  17827. GlobOpt::DoDivIntTypeSpec() const
  17828. {
  17829. return doDivIntTypeSpec;
  17830. }
  17831. // static
  17832. bool
  17833. GlobOpt::DoLossyIntTypeSpec(Func* func)
  17834. {
  17835. return
  17836. !PHASE_OFF(Js::LossyIntTypeSpecPhase, func) &&
  17837. !IsTypeSpecPhaseOff(func) &&
  17838. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLossyIntTypeSpecDisabled());
  17839. }
  17840. bool
  17841. GlobOpt::DoLossyIntTypeSpec() const
  17842. {
  17843. return doLossyIntTypeSpec;
  17844. }
  17845. // static
  17846. bool
  17847. GlobOpt::DoFloatTypeSpec(Func* func)
  17848. {
  17849. return
  17850. !PHASE_OFF(Js::FloatTypeSpecPhase, func) &&
  17851. !IsTypeSpecPhaseOff(func) &&
  17852. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsFloatTypeSpecDisabled()) &&
  17853. AutoSystemInfo::Data.SSE2Available();
  17854. }
  17855. bool
  17856. GlobOpt::DoFloatTypeSpec() const
  17857. {
  17858. return doFloatTypeSpec;
  17859. }
  17860. bool
  17861. GlobOpt::DoStringTypeSpec(Func* func)
  17862. {
  17863. return !PHASE_OFF(Js::StringTypeSpecPhase, func) && !IsTypeSpecPhaseOff(func);
  17864. }
  17865. // static
  17866. bool
  17867. GlobOpt::DoTypedArrayTypeSpec(Func* func)
  17868. {
  17869. return !PHASE_OFF(Js::TypedArrayTypeSpecPhase, func) &&
  17870. !IsTypeSpecPhaseOff(func) &&
  17871. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTypedArrayTypeSpecDisabled(func->IsLoopBody()))
  17872. #if defined(_M_IX86)
  17873. && AutoSystemInfo::Data.SSE2Available()
  17874. #endif
  17875. ;
  17876. }
  17877. // static
  17878. bool
  17879. GlobOpt::DoNativeArrayTypeSpec(Func* func)
  17880. {
  17881. return !PHASE_OFF(Js::NativeArrayPhase, func) &&
  17882. !IsTypeSpecPhaseOff(func)
  17883. #if defined(_M_IX86)
  17884. && AutoSystemInfo::Data.SSE2Available()
  17885. #endif
  17886. ;
  17887. }
  17888. bool
  17889. GlobOpt::DoArrayCheckHoist(Func *const func)
  17890. {
  17891. Assert(func->IsTopFunc());
  17892. return
  17893. !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
  17894. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(func->IsLoopBody())) &&
  17895. !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
  17896. func->DoGlobOptsForGeneratorFunc();
  17897. }
  17898. bool
  17899. GlobOpt::DoArrayCheckHoist() const
  17900. {
  17901. return doArrayCheckHoist;
  17902. }
  17903. bool
  17904. GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr *const instr) const
  17905. {
  17906. if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  17907. {
  17908. return false;
  17909. }
  17910. if(!baseValueType.IsLikelyArrayOrObjectWithArray() ||
  17911. (loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func)))
  17912. {
  17913. return true;
  17914. }
  17915. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant JS array checks
  17916. #if DBG_DUMP
  17917. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  17918. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  17919. {
  17920. Output::Print(_u("DoArrayCheckHoist disabled for JS arrays because of external: "));
  17921. func->DumpFullFunctionName();
  17922. Output::Print(_u("\n"));
  17923. Output::Flush();
  17924. }
  17925. #endif
  17926. return false;
  17927. }
  17928. bool
  17929. GlobOpt::DoArrayMissingValueCheckHoist(Func *const func)
  17930. {
  17931. return
  17932. DoArrayCheckHoist(func) &&
  17933. !PHASE_OFF(Js::ArrayMissingValueCheckHoistPhase, func) &&
  17934. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayMissingValueCheckHoistDisabled(func->IsLoopBody()));
  17935. }
  17936. bool
  17937. GlobOpt::DoArrayMissingValueCheckHoist() const
  17938. {
  17939. return doArrayMissingValueCheckHoist;
  17940. }
  17941. bool
  17942. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType, Func *const func)
  17943. {
  17944. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  17945. if(!DoArrayCheckHoist(func) || PHASE_OFF(Js::ArraySegmentHoistPhase, func))
  17946. {
  17947. return false;
  17948. }
  17949. if(!baseValueType.IsLikelyArrayOrObjectWithArray())
  17950. {
  17951. return true;
  17952. }
  17953. return
  17954. !PHASE_OFF(Js::JsArraySegmentHoistPhase, func) &&
  17955. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsJsArraySegmentHoistDisabled(func->IsLoopBody()));
  17956. }
  17957. bool
  17958. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType) const
  17959. {
  17960. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  17961. return baseValueType.IsLikelyArrayOrObjectWithArray() ? doJsArraySegmentHoist : doArraySegmentHoist;
  17962. }
  17963. bool
  17964. GlobOpt::DoTypedArraySegmentLengthHoist(Loop *const loop) const
  17965. {
  17966. if(!DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array)))
  17967. {
  17968. return false;
  17969. }
  17970. if(loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func))
  17971. {
  17972. return true;
  17973. }
  17974. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant typed array
  17975. // segment length loads.
  17976. #if DBG_DUMP
  17977. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  17978. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  17979. {
  17980. Output::Print(_u("DoArraySegmentLengthHoist disabled for typed arrays because of external: "));
  17981. func->DumpFullFunctionName();
  17982. Output::Print(_u("\n"));
  17983. Output::Flush();
  17984. }
  17985. #endif
  17986. return false;
  17987. }
  17988. bool
  17989. GlobOpt::DoArrayLengthHoist(Func *const func)
  17990. {
  17991. return
  17992. DoArrayCheckHoist(func) &&
  17993. !PHASE_OFF(Js::Phase::ArrayLengthHoistPhase, func) &&
  17994. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayLengthHoistDisabled(func->IsLoopBody()));
  17995. }
  17996. bool
  17997. GlobOpt::DoArrayLengthHoist() const
  17998. {
  17999. return doArrayLengthHoist;
  18000. }
  18001. bool
  18002. GlobOpt::DoEliminateArrayAccessHelperCall(Func *const func)
  18003. {
  18004. return DoArrayCheckHoist(func);
  18005. }
  18006. bool
  18007. GlobOpt::DoEliminateArrayAccessHelperCall() const
  18008. {
  18009. return doEliminateArrayAccessHelperCall;
  18010. }
  18011. bool
  18012. GlobOpt::DoLdLenIntSpec(IR::Instr *const instr, const ValueType baseValueType) const
  18013. {
  18014. Assert(!instr || instr->m_opcode == Js::OpCode::LdLen_A);
  18015. Assert(!instr || instr->GetDst());
  18016. Assert(!instr || instr->GetSrc1());
  18017. if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
  18018. IsTypeSpecPhaseOff(func) ||
  18019. (func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
  18020. (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  18021. {
  18022. return false;
  18023. }
  18024. if(instr &&
  18025. instr->IsProfiledInstr() &&
  18026. (
  18027. !instr->AsProfiledInstr()->u.ldElemInfo->GetElementType().IsLikelyInt() ||
  18028. instr->GetDst()->AsRegOpnd()->m_sym->m_isNotInt
  18029. ))
  18030. {
  18031. return false;
  18032. }
  18033. Assert(!instr || baseValueType == instr->GetSrc1()->GetValueType());
  18034. return
  18035. baseValueType.HasBeenString() ||
  18036. (baseValueType.IsLikelyAnyOptimizedArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray);
  18037. }
  18038. bool
  18039. GlobOpt::DoPathDependentValues() const
  18040. {
  18041. return !PHASE_OFF(Js::Phase::PathDependentValuesPhase, func);
  18042. }
  18043. bool
  18044. GlobOpt::DoTrackRelativeIntBounds() const
  18045. {
  18046. return doTrackRelativeIntBounds;
  18047. }
  18048. bool
  18049. GlobOpt::DoBoundCheckElimination() const
  18050. {
  18051. return doBoundCheckElimination;
  18052. }
  18053. bool
  18054. GlobOpt::DoBoundCheckHoist() const
  18055. {
  18056. return doBoundCheckHoist;
  18057. }
  18058. bool
  18059. GlobOpt::DoLoopCountBasedBoundCheckHoist() const
  18060. {
  18061. return doLoopCountBasedBoundCheckHoist;
  18062. }
  18063. bool
  18064. GlobOpt::DoPowIntIntTypeSpec() const
  18065. {
  18066. return doPowIntIntTypeSpec;
  18067. }
  18068. bool
  18069. GlobOpt::DoTagChecks() const
  18070. {
  18071. return doTagChecks;
  18072. }
  18073. bool
  18074. GlobOpt::TrackArgumentsObject()
  18075. {
  18076. if (PHASE_OFF(Js::StackArgOptPhase, this->func))
  18077. {
  18078. this->CannotAllocateArgumentsObjectOnStack();
  18079. return false;
  18080. }
  18081. return func->GetHasStackArgs();
  18082. }
  18083. void
  18084. GlobOpt::CannotAllocateArgumentsObjectOnStack()
  18085. {
  18086. func->SetHasStackArgs(false);
  18087. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  18088. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  18089. {
  18090. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  18091. Output::Print(_u("Stack args disabled for function %s(%s)\n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetDebugNumberSet(debugStringBuffer));
  18092. Output::Flush();
  18093. }
  18094. #endif
  18095. }
  18096. IR::Instr *
  18097. GlobOpt::PreOptPeep(IR::Instr *instr)
  18098. {
  18099. if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
  18100. {
  18101. switch (instr->m_opcode)
  18102. {
  18103. case Js::OpCode::BailOnNoProfile:
  18104. {
  18105. // Handle BailOnNoProfile
  18106. if (instr->HasBailOutInfo())
  18107. {
  18108. if (!this->prePassLoop)
  18109. {
  18110. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  18111. }
  18112. // Already processed.
  18113. return instr;
  18114. }
  18115. // Convert to bailout instr
  18116. IR::Instr *nextBytecodeOffsetInstr = instr->GetNextRealInstrOrLabel();
  18117. while(nextBytecodeOffsetInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  18118. {
  18119. nextBytecodeOffsetInstr = nextBytecodeOffsetInstr->GetNextRealInstrOrLabel();
  18120. Assert(!nextBytecodeOffsetInstr->IsLabelInstr());
  18121. }
  18122. instr = instr->ConvertToBailOutInstr(nextBytecodeOffsetInstr, IR::BailOutOnNoProfile);
  18123. instr->ClearByteCodeOffset();
  18124. instr->SetByteCodeOffset(nextBytecodeOffsetInstr);
  18125. if (!this->currentBlock->loop)
  18126. {
  18127. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  18128. }
  18129. else
  18130. {
  18131. Assert(this->prePassLoop);
  18132. }
  18133. break;
  18134. }
  18135. case Js::OpCode::BailOnException:
  18136. {
  18137. Assert(this->func->HasTry() && this->func->DoOptimizeTryCatch() &&
  18138. instr->m_prev->m_opcode == Js::OpCode::Catch &&
  18139. instr->m_prev->m_prev->IsLabelInstr() &&
  18140. instr->m_prev->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeCatch); // Should also handle RegionTypeFinally
  18141. break;
  18142. }
  18143. default:
  18144. {
  18145. if(this->currentBlock->loop && !this->IsLoopPrePass())
  18146. {
  18147. return instr;
  18148. }
  18149. break;
  18150. }
  18151. }
  18152. RemoveCodeAfterNoFallthroughInstr(instr);
  18153. }
  18154. return instr;
  18155. }
  18156. void
  18157. GlobOpt::RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr)
  18158. {
  18159. if (instr != this->currentBlock->GetLastInstr())
  18160. {
  18161. // Remove dead code after bailout
  18162. IR::Instr *instrDead = instr->m_next;
  18163. IR::Instr *instrNext;
  18164. for (; instrDead != this->currentBlock->GetLastInstr(); instrDead = instrNext)
  18165. {
  18166. instrNext = instrDead->m_next;
  18167. if (instrNext->m_opcode == Js::OpCode::FunctionExit)
  18168. {
  18169. break;
  18170. }
  18171. this->func->m_fg->RemoveInstr(instrDead, this);
  18172. }
  18173. IR::Instr *instrNextBlock = instrDead->m_next;
  18174. this->func->m_fg->RemoveInstr(instrDead, this);
  18175. this->currentBlock->SetLastInstr(instrNextBlock->m_prev);
  18176. }
  18177. // Cleanup dead successors
  18178. FOREACH_SUCCESSOR_BLOCK_EDITING(deadBlock, this->currentBlock, iter)
  18179. {
  18180. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  18181. if (this->currentBlock->GetDataUseCount() > 0)
  18182. {
  18183. this->currentBlock->DecrementDataUseCount();
  18184. }
  18185. } NEXT_SUCCESSOR_BLOCK_EDITING;
  18186. }
  18187. void
  18188. GlobOpt::ProcessTryCatch(IR::Instr* instr)
  18189. {
  18190. Assert(instr->m_next->IsLabelInstr() && instr->m_next->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeTry);
  18191. Region* tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  18192. BVSparse<JitArenaAllocator> * writeThroughSymbolsSet = tryRegion->writeThroughSymbolsSet;
  18193. ToVar(writeThroughSymbolsSet, this->currentBlock);
  18194. }
  18195. void
  18196. GlobOpt::InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd)
  18197. {
  18198. if (this->currentRegion->GetType() == RegionTypeTry && dstOpnd->IsRegOpnd() && dstOpnd->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  18199. {
  18200. StackSym * sym = dstOpnd->AsRegOpnd()->m_sym;
  18201. if (sym->IsVar())
  18202. {
  18203. return;
  18204. }
  18205. StackSym * varSym = sym->GetVarEquivSym(nullptr);
  18206. if (this->currentRegion->writeThroughSymbolsSet->Test(varSym->m_id))
  18207. {
  18208. IR::RegOpnd * regOpnd = IR::RegOpnd::New(varSym, IRType::TyVar, instr->m_func);
  18209. this->ToVar(instr->m_next, regOpnd, this->currentBlock, NULL, false);
  18210. }
  18211. }
  18212. }
  18213. void
  18214. GlobOpt::RemoveFlowEdgeToCatchBlock(IR::Instr * instr)
  18215. {
  18216. Assert(instr->IsBranchInstr());
  18217. BasicBlock * catchBlock = nullptr;
  18218. BasicBlock * predBlock = nullptr;
  18219. if (instr->m_opcode == Js::OpCode::BrOnException)
  18220. {
  18221. catchBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  18222. predBlock = this->currentBlock;
  18223. }
  18224. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  18225. {
  18226. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  18227. Assert(nextInstr->IsLabelInstr());
  18228. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  18229. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeCatch)
  18230. {
  18231. catchBlock = nextLabel->GetBasicBlock();
  18232. predBlock = this->currentBlock;
  18233. }
  18234. else
  18235. {
  18236. Assert(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional());
  18237. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  18238. IR::BranchInstr * branchToCatchBlock = nextLabel->m_next->AsBranchInstr();
  18239. IR::LabelInstr * catchBlockLabel = branchToCatchBlock->GetTarget();
  18240. Assert(catchBlockLabel->GetRegion()->GetType() == RegionTypeCatch);
  18241. catchBlock = catchBlockLabel->GetBasicBlock();
  18242. predBlock = nextBlock;
  18243. }
  18244. }
  18245. Assert(catchBlock);
  18246. Assert(predBlock);
  18247. if (this->func->m_fg->FindEdge(predBlock, catchBlock))
  18248. {
  18249. predBlock->RemoveDeadSucc(catchBlock, this->func->m_fg);
  18250. if (predBlock == this->currentBlock)
  18251. {
  18252. predBlock->DecrementDataUseCount();
  18253. }
  18254. }
  18255. }
  18256. IR::Instr *
  18257. GlobOpt::OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val)
  18258. {
  18259. IR::Opnd *dst, *src1, *src2;
  18260. if (this->IsLoopPrePass())
  18261. {
  18262. return instr;
  18263. }
  18264. switch (instr->m_opcode)
  18265. {
  18266. case Js::OpCode::DeadBrEqual:
  18267. case Js::OpCode::DeadBrRelational:
  18268. case Js::OpCode::DeadBrSrEqual:
  18269. src1 = instr->GetSrc1();
  18270. src2 = instr->GetSrc2();
  18271. // These branches were turned into dead branches because they were unnecessary (branch to next, ...).
  18272. // The DeadBr are necessary in case the evaluation of the sources have side-effects.
  18273. // If we know for sure the srcs are primitive or have been type specialized, we don't need these instructions
  18274. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && this->IsTypeSpecialized(src1->AsRegOpnd()->m_sym, this->currentBlock))) &&
  18275. ((src2Val && src2Val->GetValueInfo()->IsPrimitive()) || (src2->IsRegOpnd() && this->IsTypeSpecialized(src2->AsRegOpnd()->m_sym, this->currentBlock))))
  18276. {
  18277. this->CaptureByteCodeSymUses(instr);
  18278. instr->m_opcode = Js::OpCode::Nop;
  18279. }
  18280. break;
  18281. case Js::OpCode::DeadBrOnHasProperty:
  18282. src1 = instr->GetSrc1();
  18283. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && this->IsTypeSpecialized(src1->AsRegOpnd()->m_sym, this->currentBlock))))
  18284. {
  18285. this->CaptureByteCodeSymUses(instr);
  18286. instr->m_opcode = Js::OpCode::Nop;
  18287. }
  18288. break;
  18289. case Js::OpCode::Ld_A:
  18290. case Js::OpCode::Ld_I4:
  18291. src1 = instr->GetSrc1();
  18292. dst = instr->GetDst();
  18293. if (dst->IsRegOpnd() && dst->IsEqual(src1))
  18294. {
  18295. dst = instr->UnlinkDst();
  18296. if (!dst->GetIsJITOptimizedReg())
  18297. {
  18298. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(instr);
  18299. bytecodeUse->SetDst(dst);
  18300. instr->InsertAfter(bytecodeUse);
  18301. }
  18302. instr->FreeSrc1();
  18303. instr->m_opcode = Js::OpCode::Nop;
  18304. }
  18305. break;
  18306. }
  18307. return instr;
  18308. }
  18309. void
  18310. GlobOpt::OptimizeIndirUses(IR::IndirOpnd *indirOpnd, IR::Instr * *pInstr, Value **indirIndexValRef)
  18311. {
  18312. IR::Instr * &instr = *pInstr;
  18313. Assert(!indirIndexValRef || !*indirIndexValRef);
  18314. // Update value types and copy-prop the base
  18315. OptSrc(indirOpnd->GetBaseOpnd(), &instr, nullptr, indirOpnd);
  18316. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  18317. if (!indexOpnd)
  18318. {
  18319. return;
  18320. }
  18321. // Update value types and copy-prop the index
  18322. Value *indexVal = OptSrc(indexOpnd, &instr, nullptr, indirOpnd);
  18323. if(indirIndexValRef)
  18324. {
  18325. *indirIndexValRef = indexVal;
  18326. }
  18327. }
  18328. bool
  18329. ValueInfo::IsGeneric() const
  18330. {
  18331. return structureKind == ValueStructureKind::Generic;
  18332. }
  18333. bool
  18334. ValueInfo::IsIntConstant() const
  18335. {
  18336. return IsInt() && structureKind == ValueStructureKind::IntConstant;
  18337. }
  18338. const IntConstantValueInfo *
  18339. ValueInfo::AsIntConstant() const
  18340. {
  18341. Assert(IsIntConstant());
  18342. return static_cast<const IntConstantValueInfo *>(this);
  18343. }
  18344. bool
  18345. ValueInfo::IsIntRange() const
  18346. {
  18347. return IsInt() && structureKind == ValueStructureKind::IntRange;
  18348. }
  18349. const IntRangeValueInfo *
  18350. ValueInfo::AsIntRange() const
  18351. {
  18352. Assert(IsIntRange());
  18353. return static_cast<const IntRangeValueInfo *>(this);
  18354. }
  18355. bool
  18356. ValueInfo::IsIntBounded() const
  18357. {
  18358. const bool isIntBounded = IsLikelyInt() && structureKind == ValueStructureKind::IntBounded;
  18359. // Bounds for definitely int values should have relative bounds, otherwise those values should use one of the other value
  18360. // infos
  18361. Assert(!isIntBounded || static_cast<const IntBoundedValueInfo *>(this)->Bounds()->RequiresIntBoundedValueInfo(Type()));
  18362. return isIntBounded;
  18363. }
  18364. const IntBoundedValueInfo *
  18365. ValueInfo::AsIntBounded() const
  18366. {
  18367. Assert(IsIntBounded());
  18368. return static_cast<const IntBoundedValueInfo *>(this);
  18369. }
  18370. bool
  18371. ValueInfo::IsFloatConstant() const
  18372. {
  18373. return IsFloat() && structureKind == ValueStructureKind::FloatConstant;
  18374. }
  18375. FloatConstantValueInfo *
  18376. ValueInfo::AsFloatConstant()
  18377. {
  18378. Assert(IsFloatConstant());
  18379. return static_cast<FloatConstantValueInfo *>(this);
  18380. }
  18381. const FloatConstantValueInfo *
  18382. ValueInfo::AsFloatConstant() const
  18383. {
  18384. Assert(IsFloatConstant());
  18385. return static_cast<const FloatConstantValueInfo *>(this);
  18386. }
  18387. bool
  18388. ValueInfo::IsVarConstant() const
  18389. {
  18390. return structureKind == ValueStructureKind::VarConstant;
  18391. }
  18392. VarConstantValueInfo *
  18393. ValueInfo::AsVarConstant()
  18394. {
  18395. Assert(IsVarConstant());
  18396. return static_cast<VarConstantValueInfo *>(this);
  18397. }
  18398. bool
  18399. ValueInfo::IsJsType() const
  18400. {
  18401. Assert(!(structureKind == ValueStructureKind::JsType && !IsUninitialized()));
  18402. return structureKind == ValueStructureKind::JsType;
  18403. }
  18404. JsTypeValueInfo *
  18405. ValueInfo::AsJsType()
  18406. {
  18407. Assert(IsJsType());
  18408. return static_cast<JsTypeValueInfo *>(this);
  18409. }
  18410. const JsTypeValueInfo *
  18411. ValueInfo::AsJsType() const
  18412. {
  18413. Assert(IsJsType());
  18414. return static_cast<const JsTypeValueInfo *>(this);
  18415. }
  18416. bool
  18417. ValueInfo::IsArrayValueInfo() const
  18418. {
  18419. return IsAnyOptimizedArray() && structureKind == ValueStructureKind::Array;
  18420. }
  18421. const
  18422. ArrayValueInfo *ValueInfo::AsArrayValueInfo() const
  18423. {
  18424. Assert(IsArrayValueInfo());
  18425. return static_cast<const ArrayValueInfo *>(this);
  18426. }
  18427. ArrayValueInfo *
  18428. ValueInfo::AsArrayValueInfo()
  18429. {
  18430. Assert(IsArrayValueInfo());
  18431. return static_cast<ArrayValueInfo *>(this);
  18432. }
  18433. ValueInfo *
  18434. ValueInfo::SpecializeToInt32(JitArenaAllocator *const allocator, const bool isForLoopBackEdgeCompensation)
  18435. {
  18436. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of arbitrary
  18437. // types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop prepass. In all
  18438. // other cases, only values that are likely int may be int-specialized.
  18439. Assert(IsUninitialized() || IsLikelyInt() || isForLoopBackEdgeCompensation);
  18440. if(IsInt())
  18441. {
  18442. return this;
  18443. }
  18444. if(!IsIntBounded())
  18445. {
  18446. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18447. newValueInfo->Type() = ValueType::GetInt(true);
  18448. return newValueInfo;
  18449. }
  18450. const IntBoundedValueInfo *const boundedValueInfo = AsIntBounded();
  18451. const IntBounds *const bounds = boundedValueInfo->Bounds();
  18452. const IntConstantBounds constantBounds = bounds->ConstantBounds();
  18453. if(bounds->RequiresIntBoundedValueInfo())
  18454. {
  18455. IntBoundedValueInfo *const newValueInfo = boundedValueInfo->Copy(allocator);
  18456. newValueInfo->Type() = constantBounds.GetValueType();
  18457. return newValueInfo;
  18458. }
  18459. ValueInfo *const newValueInfo =
  18460. constantBounds.IsConstant()
  18461. ? static_cast<ValueInfo *>(IntConstantValueInfo::New(allocator, constantBounds.LowerBound()))
  18462. : IntRangeValueInfo::New(allocator, constantBounds.LowerBound(), constantBounds.UpperBound(), false);
  18463. newValueInfo->SetSymStore(GetSymStore());
  18464. return newValueInfo;
  18465. }
  18466. ValueInfo *
  18467. ValueInfo::SpecializeToFloat64(JitArenaAllocator *const allocator)
  18468. {
  18469. if(IsNumber())
  18470. {
  18471. return this;
  18472. }
  18473. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18474. // If the value type was likely int, after float-specializing, it's preferable to use Int_Number rather than Float, as the
  18475. // former is also likely int and allows int specialization later.
  18476. newValueInfo->Type() = IsLikelyInt() ? Type().ToDefiniteAnyNumber() : Type().ToDefiniteAnyFloat();
  18477. return newValueInfo;
  18478. }
  18479. // SIMD_JS
  18480. ValueInfo *
  18481. ValueInfo::SpecializeToSimd128(IRType type, JitArenaAllocator *const allocator)
  18482. {
  18483. switch (type)
  18484. {
  18485. case TySimd128F4:
  18486. return SpecializeToSimd128F4(allocator);
  18487. case TySimd128I4:
  18488. return SpecializeToSimd128I4(allocator);
  18489. default:
  18490. Assert(UNREACHED);
  18491. return nullptr;
  18492. }
  18493. }
  18494. ValueInfo *
  18495. ValueInfo::SpecializeToSimd128F4(JitArenaAllocator *const allocator)
  18496. {
  18497. if (IsSimd128Float32x4())
  18498. {
  18499. return this;
  18500. }
  18501. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18502. newValueInfo->Type() = ValueType::GetSimd128(ObjectType::Simd128Float32x4);
  18503. return newValueInfo;
  18504. }
  18505. ValueInfo *
  18506. ValueInfo::SpecializeToSimd128I4(JitArenaAllocator *const allocator)
  18507. {
  18508. if (IsSimd128Int32x4())
  18509. {
  18510. return this;
  18511. }
  18512. ValueInfo *const newValueInfo = CopyWithGenericStructureKind(allocator);
  18513. newValueInfo->Type() = ValueType::GetSimd128(ObjectType::Simd128Int32x4);
  18514. return newValueInfo;
  18515. }
  18516. bool
  18517. ValueInfo::GetIsShared() const
  18518. {
  18519. return IsJsType() ? AsJsType()->GetIsShared() : false;
  18520. }
  18521. void
  18522. ValueInfo::SetIsShared()
  18523. {
  18524. if (IsJsType()) AsJsType()->SetIsShared();
  18525. }
  18526. ValueInfo *
  18527. ValueInfo::Copy(JitArenaAllocator * allocator)
  18528. {
  18529. if(IsIntConstant())
  18530. {
  18531. return AsIntConstant()->Copy(allocator);
  18532. }
  18533. if(IsIntRange())
  18534. {
  18535. return AsIntRange()->Copy(allocator);
  18536. }
  18537. if(IsIntBounded())
  18538. {
  18539. return AsIntBounded()->Copy(allocator);
  18540. }
  18541. if(IsFloatConstant())
  18542. {
  18543. return AsFloatConstant()->Copy(allocator);
  18544. }
  18545. if(IsJsType())
  18546. {
  18547. return AsJsType()->Copy(allocator);
  18548. }
  18549. if(IsArrayValueInfo())
  18550. {
  18551. return AsArrayValueInfo()->Copy(allocator);
  18552. }
  18553. return CopyWithGenericStructureKind(allocator);
  18554. }
  18555. bool
  18556. ValueInfo::GetIntValMinMax(int *pMin, int *pMax, bool doAggressiveIntTypeSpec)
  18557. {
  18558. IntConstantBounds intConstantBounds;
  18559. if (TryGetIntConstantBounds(&intConstantBounds, doAggressiveIntTypeSpec))
  18560. {
  18561. *pMin = intConstantBounds.LowerBound();
  18562. *pMax = intConstantBounds.UpperBound();
  18563. return true;
  18564. }
  18565. Assert(!IsInt());
  18566. Assert(!doAggressiveIntTypeSpec || !IsLikelyInt());
  18567. return false;
  18568. }
  18569. bool
  18570. GlobOpt::IsPREInstrCandidateLoad(Js::OpCode opcode)
  18571. {
  18572. switch (opcode)
  18573. {
  18574. case Js::OpCode::LdFld:
  18575. case Js::OpCode::LdFldForTypeOf:
  18576. case Js::OpCode::LdRootFld:
  18577. case Js::OpCode::LdRootFldForTypeOf:
  18578. case Js::OpCode::LdMethodFld:
  18579. case Js::OpCode::LdRootMethodFld:
  18580. case Js::OpCode::LdSlot:
  18581. case Js::OpCode::LdSlotArr:
  18582. return true;
  18583. }
  18584. return false;
  18585. }
  18586. bool
  18587. GlobOpt::IsPREInstrCandidateStore(Js::OpCode opcode)
  18588. {
  18589. switch (opcode)
  18590. {
  18591. case Js::OpCode::StFld:
  18592. case Js::OpCode::StRootFld:
  18593. case Js::OpCode::StSlot:
  18594. return true;
  18595. }
  18596. return false;
  18597. }
  18598. bool
  18599. GlobOpt::ImplicitCallFlagsAllowOpts(Loop *loop)
  18600. {
  18601. return loop->GetImplicitCallFlags() != Js::ImplicitCall_HasNoInfo &&
  18602. (((loop->GetImplicitCallFlags() & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  18603. }
  18604. bool
  18605. GlobOpt::ImplicitCallFlagsAllowOpts(Func *func)
  18606. {
  18607. return func->m_fg->implicitCallFlags != Js::ImplicitCall_HasNoInfo &&
  18608. (((func->m_fg->implicitCallFlags & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  18609. }
  18610. #if DBG_DUMP
  18611. void ValueInfo::Dump()
  18612. {
  18613. if(!IsJsType()) // The value type is uninitialized for a type value
  18614. {
  18615. char typeStr[VALUE_TYPE_MAX_STRING_SIZE];
  18616. Type().ToString(typeStr);
  18617. Output::Print(_u("%S"), typeStr);
  18618. }
  18619. IntConstantBounds intConstantBounds;
  18620. if(TryGetIntConstantBounds(&intConstantBounds))
  18621. {
  18622. if(intConstantBounds.IsConstant())
  18623. {
  18624. Output::Print(_u(" constant:%d"), intConstantBounds.LowerBound());
  18625. return;
  18626. }
  18627. Output::Print(_u(" range:%d - %d"), intConstantBounds.LowerBound(), intConstantBounds.UpperBound());
  18628. }
  18629. else if(IsFloatConstant())
  18630. {
  18631. Output::Print(_u(" constant:%g"), AsFloatConstant()->FloatValue());
  18632. }
  18633. else if(IsJsType())
  18634. {
  18635. const JITTypeHolder type(AsJsType()->GetJsType());
  18636. type != nullptr ? Output::Print(_u("type: 0x%p, "), type->GetAddr()) : Output::Print(_u("type: null, "));
  18637. Output::Print(_u("type Set: "));
  18638. Js::EquivalentTypeSet* typeSet = AsJsType()->GetJsTypeSet();
  18639. if (typeSet != nullptr)
  18640. {
  18641. uint16 typeCount = typeSet->GetCount();
  18642. for (uint16 ti = 0; ti < typeCount - 1; ti++)
  18643. {
  18644. Output::Print(_u("0x%p, "), typeSet->GetType(ti));
  18645. }
  18646. Output::Print(_u("0x%p"), typeSet->GetType(typeCount - 1));
  18647. }
  18648. else
  18649. {
  18650. Output::Print(_u("null"));
  18651. }
  18652. }
  18653. else if(IsArrayValueInfo())
  18654. {
  18655. const ArrayValueInfo *const arrayValueInfo = AsArrayValueInfo();
  18656. if(arrayValueInfo->HeadSegmentSym())
  18657. {
  18658. Output::Print(_u(" seg: "));
  18659. arrayValueInfo->HeadSegmentSym()->Dump();
  18660. }
  18661. if(arrayValueInfo->HeadSegmentLengthSym())
  18662. {
  18663. Output::Print(_u(" segLen: "));
  18664. arrayValueInfo->HeadSegmentLengthSym()->Dump();
  18665. }
  18666. if(arrayValueInfo->LengthSym())
  18667. {
  18668. Output::Print(_u(" len: "));
  18669. arrayValueInfo->LengthSym()->Dump();
  18670. }
  18671. }
  18672. if (this->GetSymStore())
  18673. {
  18674. Output::Print(_u("\t\tsym:"));
  18675. this->GetSymStore()->Dump();
  18676. }
  18677. }
  18678. void
  18679. GlobOpt::Dump()
  18680. {
  18681. this->DumpSymToValueMap();
  18682. }
  18683. void
  18684. GlobOpt::DumpSymToValueMap(GlobHashTable* symToValueMap)
  18685. {
  18686. if (symToValueMap != nullptr)
  18687. {
  18688. symToValueMap->Dump(GlobOpt::DumpSym);
  18689. }
  18690. }
  18691. void
  18692. GlobOpt::DumpSymToValueMap(BasicBlock *block)
  18693. {
  18694. Output::Print(_u("\n*** SymToValueMap ***\n"));
  18695. DumpSymToValueMap(block->globOptData.symToValueMap);
  18696. }
  18697. void
  18698. GlobOpt::DumpSymToValueMap()
  18699. {
  18700. DumpSymToValueMap(this->currentBlock);
  18701. }
  18702. void
  18703. GlobOpt::DumpSym(Sym *sym)
  18704. {
  18705. sym->Dump();
  18706. }
  18707. void
  18708. GlobOpt::DumpSymVal(int index)
  18709. {
  18710. SymID id = index;
  18711. extern Func *CurrentFunc;
  18712. Sym *sym = this->func->m_symTable->Find(id);
  18713. AssertMsg(sym, "Sym not found!!!");
  18714. Output::Print(_u("Sym: "));
  18715. sym->Dump();
  18716. Output::Print(_u("\t\tValueNumber: "));
  18717. Value ** pValue = this->blockData.symToValueMap->Get(sym->m_id);
  18718. (*pValue)->Dump();
  18719. Output::Print(_u("\n"));
  18720. }
  18721. void
  18722. GlobOpt::Trace(BasicBlock * block, bool before)
  18723. {
  18724. bool globOptTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18725. bool typeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::TypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18726. bool floatTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FloatTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18727. bool fieldHoistTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldHoistPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18728. bool fieldCopyPropTrace = fieldHoistTrace || Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18729. bool objTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ObjTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18730. bool valueTableTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ValueTablePhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18731. bool fieldPRETrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  18732. bool anyTrace = globOptTrace || typeSpecTrace || floatTypeSpecTrace || fieldCopyPropTrace || fieldHoistTrace || objTypeSpecTrace || valueTableTrace || fieldPRETrace;
  18733. if (!anyTrace)
  18734. {
  18735. return;
  18736. }
  18737. if (fieldPRETrace && this->IsLoopPrePass())
  18738. {
  18739. if (block->isLoopHeader && before)
  18740. {
  18741. Output::Print(_u("==== Loop Prepass block header #%-3d, Visiting Loop block head #%-3d\n"),
  18742. this->prePassLoop->GetHeadBlock()->GetBlockNum(), block->GetBlockNum());
  18743. }
  18744. }
  18745. if (!typeSpecTrace && !floatTypeSpecTrace && !valueTableTrace && !Js::Configuration::Global.flags.Verbose)
  18746. {
  18747. return;
  18748. }
  18749. if (before)
  18750. {
  18751. Output::Print(_u("========================================================================\n"));
  18752. Output::Print(_u("Begin OptBlock: Block #%-3d"), block->GetBlockNum());
  18753. if (block->loop)
  18754. {
  18755. Output::Print(_u(" Loop block header:%-3d currentLoop block head:%-3d %s"),
  18756. block->loop->GetHeadBlock()->GetBlockNum(),
  18757. this->prePassLoop ? this->prePassLoop->GetHeadBlock()->GetBlockNum() : 0,
  18758. this->IsLoopPrePass() ? _u("PrePass") : _u(""));
  18759. }
  18760. Output::Print(_u("\n"));
  18761. }
  18762. else
  18763. {
  18764. Output::Print(_u("-----------------------------------------------------------------------\n"));
  18765. Output::Print(_u("After OptBlock: Block #%-3d\n"), block->GetBlockNum());
  18766. }
  18767. if ((typeSpecTrace || floatTypeSpecTrace) && !block->globOptData.liveVarSyms->IsEmpty())
  18768. {
  18769. Output::Print(_u(" Live var syms: "));
  18770. block->globOptData.liveVarSyms->Dump();
  18771. }
  18772. if (typeSpecTrace && !block->globOptData.liveInt32Syms->IsEmpty())
  18773. {
  18774. Assert(this->tempBv->IsEmpty());
  18775. this->tempBv->Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  18776. if(!this->tempBv->IsEmpty())
  18777. {
  18778. Output::Print(_u(" Int32 type specialized (lossless) syms: "));
  18779. this->tempBv->Dump();
  18780. }
  18781. this->tempBv->ClearAll();
  18782. if(!block->globOptData.liveLossyInt32Syms->IsEmpty())
  18783. {
  18784. Output::Print(_u(" Int32 converted (lossy) syms: "));
  18785. block->globOptData.liveLossyInt32Syms->Dump();
  18786. }
  18787. }
  18788. if (floatTypeSpecTrace && !block->globOptData.liveFloat64Syms->IsEmpty())
  18789. {
  18790. Output::Print(_u(" Float64 type specialized syms: "));
  18791. block->globOptData.liveFloat64Syms->Dump();
  18792. }
  18793. if ((fieldCopyPropTrace || objTypeSpecTrace) && this->DoFieldCopyProp(block->loop) && !block->globOptData.liveFields->IsEmpty())
  18794. {
  18795. Output::Print(_u(" Live field syms: "));
  18796. block->globOptData.liveFields->Dump();
  18797. }
  18798. if ((fieldHoistTrace || objTypeSpecTrace) && this->DoFieldHoisting(block->loop) && HasHoistableFields(block))
  18799. {
  18800. Output::Print(_u(" Hoistable field sym: "));
  18801. block->globOptData.hoistableFields->Dump();
  18802. }
  18803. if (objTypeSpecTrace || valueTableTrace)
  18804. {
  18805. Output::Print(_u(" Value table:\n"));
  18806. DumpSymToValueMap(block->globOptData.symToValueMap);
  18807. }
  18808. if (before)
  18809. {
  18810. Output::Print(_u("-----------------------------------------------------------------------\n")); \
  18811. }
  18812. Output::Flush();
  18813. }
  18814. void
  18815. GlobOpt::TraceSettings()
  18816. {
  18817. Output::Print(_u("GlobOpt Settings:\r\n"));
  18818. Output::Print(_u(" FloatTypeSpec: %s\r\n"), this->DoFloatTypeSpec() ? _u("enabled") : _u("disabled"));
  18819. Output::Print(_u(" AggressiveIntTypeSpec: %s\r\n"), this->DoAggressiveIntTypeSpec() ? _u("enabled") : _u("disabled"));
  18820. Output::Print(_u(" LossyIntTypeSpec: %s\r\n"), this->DoLossyIntTypeSpec() ? _u("enabled") : _u("disabled"));
  18821. Output::Print(_u(" ArrayCheckHoist: %s\r\n"), (this->func->HasProfileInfo() && this->func->GetReadOnlyProfileInfo()->IsArrayCheckHoistDisabled(func->IsLoopBody())) ? _u("disabled") : _u("enabled"));
  18822. Output::Print(_u(" ImplicitCallFlags: %s\r\n"), Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->func->m_fg->implicitCallFlags));
  18823. for (Loop * loop = this->func->m_fg->loopList; loop != NULL; loop = loop->next)
  18824. {
  18825. Output::Print(_u(" loop: %d, ImplicitCallFlags: %s\r\n"), loop->GetLoopNumber(),
  18826. Js::DynamicProfileInfo::GetImplicitCallFlagsString(loop->GetImplicitCallFlags()));
  18827. }
  18828. Output::Flush();
  18829. }
  18830. #endif // DBG_DUMP
  18831. IR::Instr *
  18832. GlobOpt::TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrLast)
  18833. {
  18834. if (!this->func->GetHasMarkTempObjects())
  18835. {
  18836. return instrLast;
  18837. }
  18838. IR::Instr * instr = instrStart;
  18839. IR::Instr * instrEnd = instrLast->m_next;
  18840. IR::Instr * lastInstr = nullptr;
  18841. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  18842. do
  18843. {
  18844. bool mayNeedBailOnImplicitCallsPreOp = !this->IsLoopPrePass()
  18845. && instr->HasAnyImplicitCalls()
  18846. && globOptData.maybeTempObjectSyms != nullptr;
  18847. if (mayNeedBailOnImplicitCallsPreOp)
  18848. {
  18849. IR::Opnd * src1 = instr->GetSrc1();
  18850. if (src1)
  18851. {
  18852. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src1, false);
  18853. IR::Opnd * src2 = instr->GetSrc2();
  18854. if (src2)
  18855. {
  18856. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src2, false);
  18857. }
  18858. }
  18859. }
  18860. IR::Opnd *dst = instr->GetDst();
  18861. if (dst)
  18862. {
  18863. if (dst->IsRegOpnd())
  18864. {
  18865. TrackTempObjectSyms(instr, dst->AsRegOpnd());
  18866. }
  18867. else if (mayNeedBailOnImplicitCallsPreOp)
  18868. {
  18869. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, dst, true);
  18870. }
  18871. }
  18872. lastInstr = instr;
  18873. instr = instr->m_next;
  18874. }
  18875. while (instr != instrEnd);
  18876. return lastInstr;
  18877. }
  18878. void
  18879. GlobOpt::TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd)
  18880. {
  18881. // If it is marked as dstIsTempObject, we should have mark temped it, or type specialized it to Ld_I4.
  18882. Assert(!instr->dstIsTempObject || ObjectTempVerify::CanMarkTemp(instr, nullptr));
  18883. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  18884. bool canStoreTemp = false;
  18885. bool maybeTemp = false;
  18886. if (OpCodeAttr::TempObjectProducing(instr->m_opcode))
  18887. {
  18888. maybeTemp = instr->dstIsTempObject;
  18889. // We have to make sure that lower will always generate code to do stack allocation
  18890. // before we can store any other stack instance onto it. Otherwise, we would not
  18891. // walk object to box the stack property.
  18892. canStoreTemp = instr->dstIsTempObject && ObjectTemp::CanStoreTemp(instr);
  18893. }
  18894. else if (OpCodeAttr::TempObjectTransfer(instr->m_opcode))
  18895. {
  18896. // Need to check both sources, GetNewScObject has two srcs for transfer.
  18897. // No need to get var equiv sym here as transfer of type spec value does not transfer a mark temp object.
  18898. maybeTemp = globOptData.maybeTempObjectSyms && (
  18899. (instr->GetSrc1()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  18900. || (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id)));
  18901. canStoreTemp = globOptData.canStoreTempObjectSyms && (
  18902. (instr->GetSrc1()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  18903. && (!instr->GetSrc2() || (instr->GetSrc2()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))));
  18904. Assert(!canStoreTemp || instr->dstIsTempObject);
  18905. Assert(!maybeTemp || instr->dstIsTempObject);
  18906. }
  18907. // Need to get the var equiv sym as assignment of type specialized sym kill the var sym value anyway.
  18908. StackSym * sym = opnd->m_sym;
  18909. if (!sym->IsVar())
  18910. {
  18911. sym = sym->GetVarEquivSym(nullptr);
  18912. if (sym == nullptr)
  18913. {
  18914. return;
  18915. }
  18916. }
  18917. SymID symId = sym->m_id;
  18918. if (maybeTemp)
  18919. {
  18920. // Only var sym should be temp objects
  18921. Assert(opnd->m_sym == sym);
  18922. if (globOptData.maybeTempObjectSyms == nullptr)
  18923. {
  18924. globOptData.maybeTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  18925. }
  18926. globOptData.maybeTempObjectSyms->Set(symId);
  18927. if (canStoreTemp)
  18928. {
  18929. if (instr->m_opcode == Js::OpCode::NewScObjectLiteral && !this->IsLoopPrePass())
  18930. {
  18931. // For object literal, we install the final type up front.
  18932. // If there are bailout before we finish initializing all the fields, we need to
  18933. // zero out the rest if we stack allocate the literal, so that the boxing would not
  18934. // try to box trash pointer in the properties.
  18935. // Although object Literal initialization can be done lexically, BailOnNoProfile may cause some path
  18936. // to disappear. Do it is flow base make it easier to stop propagate those entries.
  18937. IR::IntConstOpnd * propertyArrayIdOpnd = instr->GetSrc1()->AsIntConstOpnd();
  18938. const Js::PropertyIdArray * propIds = instr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  18939. // Duplicates are removed by parser
  18940. Assert(!propIds->hadDuplicates);
  18941. if (globOptData.stackLiteralInitFldDataMap == nullptr)
  18942. {
  18943. globOptData.stackLiteralInitFldDataMap = JitAnew(alloc, StackLiteralInitFldDataMap, alloc);
  18944. }
  18945. else
  18946. {
  18947. Assert(!globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  18948. }
  18949. StackLiteralInitFldData data = { propIds, 0};
  18950. globOptData.stackLiteralInitFldDataMap->AddNew(sym, data);
  18951. }
  18952. if (globOptData.canStoreTempObjectSyms == nullptr)
  18953. {
  18954. globOptData.canStoreTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  18955. }
  18956. globOptData.canStoreTempObjectSyms->Set(symId);
  18957. }
  18958. else if (globOptData.canStoreTempObjectSyms)
  18959. {
  18960. globOptData.canStoreTempObjectSyms->Clear(symId);
  18961. }
  18962. }
  18963. else
  18964. {
  18965. Assert(!canStoreTemp);
  18966. if (globOptData.maybeTempObjectSyms)
  18967. {
  18968. if (globOptData.canStoreTempObjectSyms)
  18969. {
  18970. globOptData.canStoreTempObjectSyms->Clear(symId);
  18971. }
  18972. globOptData.maybeTempObjectSyms->Clear(symId);
  18973. }
  18974. else
  18975. {
  18976. Assert(!globOptData.canStoreTempObjectSyms);
  18977. }
  18978. // The symbol is being assigned to, the sym shouldn't still be in the stackLiteralInitFldDataMap
  18979. Assert(this->IsLoopPrePass() ||
  18980. globOptData.stackLiteralInitFldDataMap == nullptr
  18981. || globOptData.stackLiteralInitFldDataMap->Count() == 0
  18982. || !globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  18983. }
  18984. }
  18985. IR::Instr *
  18986. GlobOpt::GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst)
  18987. {
  18988. Assert(opnd);
  18989. Assert(isDst == (opnd == instr->GetDst()));
  18990. Assert(opnd != instr->GetDst() || !opnd->IsRegOpnd());
  18991. Assert(!this->IsLoopPrePass());
  18992. Assert(instr->HasAnyImplicitCalls());
  18993. // Only dst reg opnd opcode or ArgOut_A should have dstIsTempObject marked
  18994. Assert(!isDst || !instr->dstIsTempObject || instr->m_opcode == Js::OpCode::ArgOut_A);
  18995. // Post-op implicit call shouldn't have installed yet
  18996. Assert(!instr->HasBailOutInfo() || (instr->GetBailOutKind() & IR::BailOutKindBits) != IR::BailOutOnImplicitCalls);
  18997. GlobOptBlockData& globOptData = this->currentBlock->globOptData;
  18998. Assert(globOptData.maybeTempObjectSyms != nullptr);
  18999. IR::PropertySymOpnd * propertySymOpnd = nullptr;
  19000. StackSym * stackSym = ObjectTemp::GetStackSym(opnd, &propertySymOpnd);
  19001. // It is okay to not get the var equiv sym here, as use of a type specialized sym is not use of the temp object
  19002. // so no need to add mark temp bailout.
  19003. // TempObjectSysm doesn't contain any type spec sym, so we will get false here for all type spec sym.
  19004. if (stackSym && globOptData.maybeTempObjectSyms->Test(stackSym->m_id))
  19005. {
  19006. if (instr->HasBailOutInfo())
  19007. {
  19008. instr->SetBailOutKind(instr->GetBailOutKind() | IR::BailOutMarkTempObject);
  19009. }
  19010. else
  19011. {
  19012. // On insert the pre op bailout if it is not Direct field access do nothing, don't check the dst yet.
  19013. // SetTypeCheckBailout will clear this out if it is direct field access.
  19014. if (isDst
  19015. || (instr->m_opcode == Js::OpCode::FromVar && !opnd->GetValueType().IsPrimitive())
  19016. || propertySymOpnd == nullptr
  19017. || !propertySymOpnd->IsTypeCheckProtected())
  19018. {
  19019. this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
  19020. }
  19021. }
  19022. if (!opnd->IsRegOpnd() && (!isDst || (globOptData.canStoreTempObjectSyms && globOptData.canStoreTempObjectSyms->Test(stackSym->m_id))))
  19023. {
  19024. // If this opnd is a dst, that means that the object pointer is a stack object,
  19025. // and we can store temp object/number on it.
  19026. // If the opnd is a src, that means that the object pointer may be a stack object
  19027. // so the load may be a temp object/number and we need to track its use.
  19028. // Don't mark start of indir as can store temp, because we don't actually know
  19029. // what it is assigning to.
  19030. if (!isDst || !opnd->IsIndirOpnd())
  19031. {
  19032. opnd->SetCanStoreTemp();
  19033. }
  19034. if (propertySymOpnd)
  19035. {
  19036. // Track initfld of stack literals
  19037. if (isDst && instr->m_opcode == Js::OpCode::InitFld)
  19038. {
  19039. const Js::PropertyId propertyId = propertySymOpnd->m_sym->AsPropertySym()->m_propertyId;
  19040. // We don't need to track numeric properties init
  19041. if (!this->func->GetThreadContextInfo()->IsNumericProperty(propertyId))
  19042. {
  19043. DebugOnly(bool found = false);
  19044. globOptData.stackLiteralInitFldDataMap->RemoveIf(stackSym,
  19045. [&](StackSym * key, StackLiteralInitFldData & data)
  19046. {
  19047. DebugOnly(found = true);
  19048. Assert(key == stackSym);
  19049. Assert(data.currentInitFldCount < data.propIds->count);
  19050. if (data.propIds->elements[data.currentInitFldCount] != propertyId)
  19051. {
  19052. #if DBG
  19053. bool duplicate = false;
  19054. for (uint i = 0; i < data.currentInitFldCount; i++)
  19055. {
  19056. if (data.propIds->elements[i] == propertyId)
  19057. {
  19058. duplicate = true;
  19059. break;
  19060. }
  19061. }
  19062. Assert(duplicate);
  19063. #endif
  19064. // duplicate initialization
  19065. return false;
  19066. }
  19067. bool finished = (++data.currentInitFldCount == data.propIds->count);
  19068. #if DBG
  19069. if (finished)
  19070. {
  19071. // We can still track the finished stack literal InitFld lexically.
  19072. this->finishedStackLiteralInitFld->Set(stackSym->m_id);
  19073. }
  19074. #endif
  19075. return finished;
  19076. });
  19077. // We might still see InitFld even we have finished with all the property Id because
  19078. // of duplicate entries at the end
  19079. Assert(found || finishedStackLiteralInitFld->Test(stackSym->m_id));
  19080. }
  19081. }
  19082. }
  19083. }
  19084. }
  19085. return instr;
  19086. }
  19087. void
  19088. GlobOpt::KillStateForGeneratorYield()
  19089. {
  19090. GlobOptBlockData* globOptData = &this->currentBlock->globOptData;
  19091. /*
  19092. TODO[generators][ianhall]: Do a ToVar on any typespec'd syms before the bailout so that we can enable typespec in generators without bailin having to restore typespec'd values
  19093. FOREACH_BITSET_IN_SPARSEBV(symId, globOptData->liveInt32Syms)
  19094. {
  19095. this->ToVar(instr, , this->currentBlock, , );
  19096. }
  19097. NEXT_BITSET_IN_SPARSEBV;
  19098. FOREACH_BITSET_IN_SPARSEBV(symId, globOptData->liveInt32Syms)
  19099. {
  19100. this->ToVar(instr, , this->currentBlock, , );
  19101. }
  19102. NEXT_BITSET_IN_SPARSEBV;
  19103. */
  19104. FOREACH_GLOBHASHTABLE_ENTRY(bucket, globOptData->symToValueMap)
  19105. {
  19106. ValueType type = bucket.element->GetValueInfo()->Type().ToLikely();
  19107. bucket.element = this->NewGenericValue(type);
  19108. }
  19109. NEXT_GLOBHASHTABLE_ENTRY;
  19110. globOptData->exprToValueMap->ClearAll();
  19111. globOptData->liveFields->ClearAll();
  19112. globOptData->liveArrayValues->ClearAll();
  19113. if (globOptData->maybeWrittenTypeSyms)
  19114. {
  19115. globOptData->maybeWrittenTypeSyms->ClearAll();
  19116. }
  19117. globOptData->isTempSrc->ClearAll();
  19118. globOptData->liveInt32Syms->ClearAll();
  19119. globOptData->liveLossyInt32Syms->ClearAll();
  19120. globOptData->liveFloat64Syms->ClearAll();
  19121. // SIMD_JS
  19122. globOptData->liveSimd128F4Syms->ClearAll();
  19123. globOptData->liveSimd128I4Syms->ClearAll();
  19124. if (globOptData->hoistableFields)
  19125. {
  19126. globOptData->hoistableFields->ClearAll();
  19127. }
  19128. // Keep globOptData->liveVarSyms as is
  19129. // Keep globOptData->argObjSyms as is
  19130. // MarkTemp should be disabled for generator functions for now
  19131. Assert(globOptData->maybeTempObjectSyms == nullptr || globOptData->maybeTempObjectSyms->IsEmpty());
  19132. Assert(globOptData->canStoreTempObjectSyms == nullptr || globOptData->canStoreTempObjectSyms->IsEmpty());
  19133. globOptData->valuesToKillOnCalls->Clear();
  19134. if (globOptData->inductionVariables)
  19135. {
  19136. globOptData->inductionVariables->Clear();
  19137. }
  19138. if (globOptData->availableIntBoundChecks)
  19139. {
  19140. globOptData->availableIntBoundChecks->Clear();
  19141. }
  19142. // Keep bailout data as is
  19143. globOptData->hasCSECandidates = false;
  19144. }
  19145. LoopCount *
  19146. GlobOpt::GetOrGenerateLoopCountForMemOp(Loop *loop)
  19147. {
  19148. LoopCount *loopCount = loop->loopCount;
  19149. if (loopCount && !loopCount->HasGeneratedLoopCountSym())
  19150. {
  19151. Assert(loop->bailOutInfo);
  19152. EnsureBailTarget(loop);
  19153. GenerateLoopCountPlusOne(loop, loopCount);
  19154. }
  19155. return loopCount;
  19156. }
  19157. IR::Opnd *
  19158. GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr)
  19159. {
  19160. LoopCount *loopCount = loop->loopCount;
  19161. IR::Opnd *sizeOpnd = nullptr;
  19162. Assert(loopCount);
  19163. Assert(loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  19164. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap->TryGetValue(unroll, &sizeOpnd))
  19165. {
  19166. return sizeOpnd;
  19167. }
  19168. Func *localFunc = loop->GetFunc();
  19169. const auto InsertInstr = [&](IR::Instr *instr)
  19170. {
  19171. if (insertBeforeInstr == nullptr)
  19172. {
  19173. loop->landingPad->InsertAfter(instr);
  19174. }
  19175. else
  19176. {
  19177. insertBeforeInstr->InsertBefore(instr);
  19178. }
  19179. };
  19180. if (loopCount->LoopCountMinusOneSym())
  19181. {
  19182. IRType type = loopCount->LoopCountSym()->GetType();
  19183. // Loop count is off by one, so add one
  19184. IR::RegOpnd *loopCountOpnd = IR::RegOpnd::New(loopCount->LoopCountSym(), type, localFunc);
  19185. sizeOpnd = loopCountOpnd;
  19186. if (unroll != 1)
  19187. {
  19188. sizeOpnd = IR::RegOpnd::New(TyUint32, this->func);
  19189. IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
  19190. InsertInstr(IR::Instr::New(Js::OpCode::Mul_I4,
  19191. sizeOpnd,
  19192. loopCountOpnd,
  19193. unrollOpnd,
  19194. localFunc));
  19195. }
  19196. }
  19197. else
  19198. {
  19199. uint size = (loopCount->LoopCountMinusOneConstantValue() + 1) * unroll;
  19200. sizeOpnd = IR::IntConstOpnd::New(size, IRType::TyUint32, localFunc);
  19201. }
  19202. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Add(unroll, sizeOpnd);
  19203. return sizeOpnd;
  19204. }
  19205. IR::RegOpnd*
  19206. GlobOpt::GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr)
  19207. {
  19208. IR::RegOpnd *startIndexOpnd = nullptr;
  19209. Func *localFunc = loop->GetFunc();
  19210. IRType type = indexOpnd->GetType();
  19211. const int cacheIndex = ((int)isInductionVariableChangeIncremental << 1) | (int)bIndexAlreadyChanged;
  19212. if (loop->memOpInfo->startIndexOpndCache[cacheIndex])
  19213. {
  19214. return loop->memOpInfo->startIndexOpndCache[cacheIndex];
  19215. }
  19216. const auto InsertInstr = [&](IR::Instr *instr)
  19217. {
  19218. if (insertBeforeInstr == nullptr)
  19219. {
  19220. loop->landingPad->InsertAfter(instr);
  19221. }
  19222. else
  19223. {
  19224. insertBeforeInstr->InsertBefore(instr);
  19225. }
  19226. };
  19227. startIndexOpnd = IR::RegOpnd::New(type, localFunc);
  19228. // If the 2 are different we can simply use indexOpnd
  19229. if (isInductionVariableChangeIncremental != bIndexAlreadyChanged)
  19230. {
  19231. InsertInstr(IR::Instr::New(Js::OpCode::Ld_A,
  19232. startIndexOpnd,
  19233. indexOpnd,
  19234. localFunc));
  19235. }
  19236. else
  19237. {
  19238. // Otherwise add 1 to it
  19239. InsertInstr(IR::Instr::New(Js::OpCode::Add_I4,
  19240. startIndexOpnd,
  19241. indexOpnd,
  19242. IR::IntConstOpnd::New(1, type, localFunc, true),
  19243. localFunc));
  19244. }
  19245. if (!isInductionVariableChangeIncremental)
  19246. {
  19247. InsertInstr(IR::Instr::New(Js::OpCode::Sub_I4,
  19248. startIndexOpnd,
  19249. startIndexOpnd,
  19250. sizeOpnd,
  19251. localFunc));
  19252. }
  19253. loop->memOpInfo->startIndexOpndCache[cacheIndex] = startIndexOpnd;
  19254. return startIndexOpnd;
  19255. }
  19256. IR::Instr*
  19257. GlobOpt::FindUpperBoundsCheckInstr(IR::Instr* fromInstr)
  19258. {
  19259. IR::Instr *upperBoundCheck = fromInstr;
  19260. do
  19261. {
  19262. upperBoundCheck = upperBoundCheck->m_prev;
  19263. Assert(upperBoundCheck);
  19264. Assert(!upperBoundCheck->IsLabelInstr());
  19265. } while (upperBoundCheck->m_opcode != Js::OpCode::BoundCheck);
  19266. return upperBoundCheck;
  19267. }
  19268. IR::Instr*
  19269. GlobOpt::FindArraySegmentLoadInstr(IR::Instr* fromInstr)
  19270. {
  19271. IR::Instr *headSegmentLengthLoad = fromInstr;
  19272. do
  19273. {
  19274. headSegmentLengthLoad = headSegmentLengthLoad->m_prev;
  19275. Assert(headSegmentLengthLoad);
  19276. Assert(!headSegmentLengthLoad->IsLabelInstr());
  19277. } while (headSegmentLengthLoad->m_opcode != Js::OpCode::LdIndir);
  19278. return headSegmentLengthLoad;
  19279. }
  19280. void
  19281. GlobOpt::RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block)
  19282. {
  19283. Assert(srcInstr && (srcInstr->m_opcode == Js::OpCode::LdElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict));
  19284. Assert(memopInstr && (memopInstr->m_opcode == Js::OpCode::Memcopy || memopInstr->m_opcode == Js::OpCode::Memset));
  19285. Assert(block);
  19286. const bool isDst = srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict;
  19287. IR::RegOpnd* opnd = (isDst ? memopInstr->GetDst() : memopInstr->GetSrc1())->AsIndirOpnd()->GetBaseOpnd();
  19288. IR::ArrayRegOpnd* arrayOpnd = opnd->IsArrayRegOpnd() ? opnd->AsArrayRegOpnd() : nullptr;
  19289. IR::Instr* topInstr = srcInstr;
  19290. if (srcInstr->extractedUpperBoundCheckWithoutHoisting)
  19291. {
  19292. IR::Instr *upperBoundCheck = FindUpperBoundsCheckInstr(srcInstr);
  19293. Assert(upperBoundCheck && upperBoundCheck != srcInstr);
  19294. topInstr = upperBoundCheck;
  19295. }
  19296. if (srcInstr->loadedArrayHeadSegmentLength && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  19297. {
  19298. IR::Instr *arrayLoadSegmentHeadLength = FindArraySegmentLoadInstr(topInstr);
  19299. Assert(arrayLoadSegmentHeadLength);
  19300. topInstr = arrayLoadSegmentHeadLength;
  19301. arrayOpnd->RemoveHeadSegmentLengthSym();
  19302. }
  19303. if (srcInstr->loadedArrayHeadSegment && arrayOpnd && arrayOpnd->HeadSegmentSym())
  19304. {
  19305. IR::Instr *arrayLoadSegmentHead = FindArraySegmentLoadInstr(topInstr);
  19306. Assert(arrayLoadSegmentHead);
  19307. topInstr = arrayLoadSegmentHead;
  19308. arrayOpnd->RemoveHeadSegmentSym();
  19309. }
  19310. // If no bounds check are present, simply look up for instruction added for instrumentation
  19311. if(topInstr == srcInstr)
  19312. {
  19313. bool checkPrev = true;
  19314. while (checkPrev)
  19315. {
  19316. switch (topInstr->m_prev->m_opcode)
  19317. {
  19318. case Js::OpCode::BailOnNotArray:
  19319. case Js::OpCode::NoImplicitCallUses:
  19320. case Js::OpCode::ByteCodeUses:
  19321. topInstr = topInstr->m_prev;
  19322. checkPrev = !!topInstr->m_prev;
  19323. break;
  19324. default:
  19325. checkPrev = false;
  19326. break;
  19327. }
  19328. }
  19329. }
  19330. while (topInstr != srcInstr)
  19331. {
  19332. IR::Instr* removeInstr = topInstr;
  19333. topInstr = topInstr->m_next;
  19334. Assert(
  19335. removeInstr->m_opcode == Js::OpCode::BailOnNotArray ||
  19336. removeInstr->m_opcode == Js::OpCode::NoImplicitCallUses ||
  19337. removeInstr->m_opcode == Js::OpCode::ByteCodeUses ||
  19338. removeInstr->m_opcode == Js::OpCode::LdIndir ||
  19339. removeInstr->m_opcode == Js::OpCode::BoundCheck
  19340. );
  19341. if (removeInstr->m_opcode != Js::OpCode::ByteCodeUses)
  19342. {
  19343. block->RemoveInstr(removeInstr);
  19344. }
  19345. }
  19346. this->ConvertToByteCodeUses(srcInstr);
  19347. }
  19348. void
  19349. GlobOpt::GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType)
  19350. {
  19351. Assert(instr && (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict));
  19352. IR::Opnd* arrayOpnd = instr->m_opcode == Js::OpCode::LdElemI_A ? instr->GetSrc1() : instr->GetDst();
  19353. Assert(arrayOpnd->IsIndirOpnd());
  19354. IR::IndirOpnd* indirArrayOpnd = arrayOpnd->AsIndirOpnd();
  19355. IR::RegOpnd* baseOpnd = (IR::RegOpnd*)indirArrayOpnd->GetBaseOpnd();
  19356. IR::RegOpnd* indexOpnd = (IR::RegOpnd*)indirArrayOpnd->GetIndexOpnd();
  19357. Assert(baseOpnd);
  19358. Assert(indexOpnd);
  19359. // Process Out Params
  19360. base = baseOpnd;
  19361. index = indexOpnd;
  19362. arrayType = indirArrayOpnd->GetType();
  19363. }
  19364. void
  19365. GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitData)
  19366. {
  19367. Assert(emitData);
  19368. Assert(emitData->candidate);
  19369. Assert(emitData->stElemInstr);
  19370. Assert(emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A || emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A_Strict);
  19371. IR::BailOutKind bailOutKind = emitData->bailOutKind;
  19372. const byte unroll = emitData->inductionVar.unroll;
  19373. Assert(unroll == 1);
  19374. const bool isInductionVariableChangeIncremental = emitData->inductionVar.isIncremental;
  19375. const bool bIndexAlreadyChanged = emitData->candidate->bIndexAlreadyChanged;
  19376. IR::RegOpnd *baseOpnd = nullptr;
  19377. IR::RegOpnd *indexOpnd = nullptr;
  19378. IRType dstType;
  19379. GetMemOpSrcInfo(loop, emitData->stElemInstr, baseOpnd, indexOpnd, dstType);
  19380. Func *localFunc = loop->GetFunc();
  19381. // Handle bailout info
  19382. EnsureBailTarget(loop);
  19383. Assert(bailOutKind != IR::BailOutInvalid);
  19384. // Keep only Array bits bailOuts. Consider handling these bailouts instead of simply ignoring them
  19385. bailOutKind &= IR::BailOutForArrayBits;
  19386. // Add our custom bailout to handle Op_MemCopy return value.
  19387. bailOutKind |= IR::BailOutOnMemOpError;
  19388. BailOutInfo *const bailOutInfo = loop->bailOutInfo;
  19389. Assert(bailOutInfo);
  19390. IR::Instr *insertBeforeInstr = bailOutInfo->bailOutInstr;
  19391. Assert(insertBeforeInstr);
  19392. IR::Opnd *sizeOpnd = GenerateInductionVariableChangeForMemOp(loop, unroll, insertBeforeInstr);
  19393. IR::RegOpnd *startIndexOpnd = GenerateStartIndexOpndForMemop(loop, indexOpnd, sizeOpnd, isInductionVariableChangeIncremental, bIndexAlreadyChanged, insertBeforeInstr);
  19394. IR::IndirOpnd* dstOpnd = IR::IndirOpnd::New(baseOpnd, startIndexOpnd, dstType, localFunc);
  19395. IR::Opnd *src1;
  19396. const bool isMemset = emitData->candidate->IsMemSet();
  19397. // Get the source according to the memop type
  19398. if (isMemset)
  19399. {
  19400. MemSetEmitData* data = (MemSetEmitData*)emitData;
  19401. const Loop::MemSetCandidate* candidate = data->candidate->AsMemSet();
  19402. if (candidate->srcSym)
  19403. {
  19404. IR::RegOpnd* regSrc = IR::RegOpnd::New(candidate->srcSym, candidate->srcSym->GetType(), func);
  19405. regSrc->SetIsJITOptimizedReg(true);
  19406. src1 = regSrc;
  19407. }
  19408. else
  19409. {
  19410. src1 = IR::AddrOpnd::New(candidate->constant.ToVar(localFunc), IR::AddrOpndKindConstantAddress, localFunc);
  19411. }
  19412. }
  19413. else
  19414. {
  19415. Assert(emitData->candidate->IsMemCopy());
  19416. MemCopyEmitData* data = (MemCopyEmitData*)emitData;
  19417. Assert(data->ldElemInstr);
  19418. Assert(data->ldElemInstr->m_opcode == Js::OpCode::LdElemI_A);
  19419. IR::RegOpnd *srcBaseOpnd = nullptr;
  19420. IR::RegOpnd *srcIndexOpnd = nullptr;
  19421. IRType srcType;
  19422. GetMemOpSrcInfo(loop, data->ldElemInstr, srcBaseOpnd, srcIndexOpnd, srcType);
  19423. Assert(GetVarSymID(srcIndexOpnd->GetStackSym()) == GetVarSymID(indexOpnd->GetStackSym()));
  19424. src1 = IR::IndirOpnd::New(srcBaseOpnd, startIndexOpnd, srcType, localFunc);
  19425. }
  19426. // Generate memcopy
  19427. IR::Instr* memopInstr = IR::BailOutInstr::New(isMemset ? Js::OpCode::Memset : Js::OpCode::Memcopy, bailOutKind, bailOutInfo, localFunc);
  19428. memopInstr->SetDst(dstOpnd);
  19429. memopInstr->SetSrc1(src1);
  19430. memopInstr->SetSrc2(sizeOpnd);
  19431. insertBeforeInstr->InsertBefore(memopInstr);
  19432. #if DBG_DUMP
  19433. if (DO_MEMOP_TRACE())
  19434. {
  19435. char valueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19436. baseOpnd->GetValueType().ToString(valueTypeStr);
  19437. const int loopCountBufSize = 16;
  19438. char16 loopCountBuf[loopCountBufSize];
  19439. if (loopCount->LoopCountMinusOneSym())
  19440. {
  19441. swprintf_s(loopCountBuf, _u("s%u"), loopCount->LoopCountMinusOneSym()->m_id);
  19442. }
  19443. else
  19444. {
  19445. swprintf_s(loopCountBuf, _u("%u"), loopCount->LoopCountMinusOneConstantValue() + 1);
  19446. }
  19447. if (isMemset)
  19448. {
  19449. const Loop::MemSetCandidate* candidate = emitData->candidate->AsMemSet();
  19450. const int constBufSize = 32;
  19451. char16 constBuf[constBufSize];
  19452. if (candidate->srcSym)
  19453. {
  19454. swprintf_s(constBuf, _u("s%u"), candidate->srcSym->m_id);
  19455. }
  19456. else
  19457. {
  19458. switch (candidate->constant.type)
  19459. {
  19460. case TyInt8:
  19461. case TyInt16:
  19462. case TyInt32:
  19463. case TyInt64:
  19464. swprintf_s(constBuf, sizeof(IntConstType) == 8 ? _u("%lld") : _u("%d"), candidate->constant.u.intConst.value);
  19465. break;
  19466. case TyFloat32:
  19467. case TyFloat64:
  19468. swprintf_s(constBuf, _u("%.4f"), candidate->constant.u.floatConst.value);
  19469. break;
  19470. case TyVar:
  19471. swprintf_s(constBuf, sizeof(Js::Var) == 8 ? _u("0x%.16llX") : _u("0x%.8X"), candidate->constant.u.varConst.value);
  19472. break;
  19473. default:
  19474. AssertMsg(false, "Unsupported constant type");
  19475. swprintf_s(constBuf, _u("Unknown"));
  19476. break;
  19477. }
  19478. }
  19479. TRACE_MEMOP_PHASE(MemSet, loop, emitData->stElemInstr,
  19480. _u("ValueType: %S, Base: s%u, Index: s%u, Constant: %s, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  19481. valueTypeStr,
  19482. candidate->base,
  19483. candidate->index,
  19484. constBuf,
  19485. loopCountBuf,
  19486. bIndexAlreadyChanged);
  19487. }
  19488. else
  19489. {
  19490. const Loop::MemCopyCandidate* candidate = emitData->candidate->AsMemCopy();
  19491. TRACE_MEMOP_PHASE(MemCopy, loop, emitData->stElemInstr,
  19492. _u("ValueType: %S, StBase: s%u, Index: s%u, LdBase: s%u, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  19493. valueTypeStr,
  19494. candidate->base,
  19495. candidate->index,
  19496. candidate->ldBase,
  19497. loopCountBuf,
  19498. bIndexAlreadyChanged);
  19499. }
  19500. }
  19501. #endif
  19502. RemoveMemOpSrcInstr(memopInstr, emitData->stElemInstr, emitData->block);
  19503. if (!isMemset)
  19504. {
  19505. RemoveMemOpSrcInstr(memopInstr, ((MemCopyEmitData*)emitData)->ldElemInstr, emitData->block);
  19506. }
  19507. }
  19508. bool
  19509. GlobOpt::InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, MemSetEmitData* emitData, bool& errorInInstr)
  19510. {
  19511. Assert(emitData && emitData->candidate && emitData->candidate->IsMemSet());
  19512. Loop::MemSetCandidate* candidate = (Loop::MemSetCandidate*)emitData->candidate;
  19513. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  19514. {
  19515. if (instr->GetDst()->IsIndirOpnd()
  19516. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base)
  19517. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19518. )
  19519. {
  19520. Assert(instr->IsProfiledInstr());
  19521. emitData->stElemInstr = instr;
  19522. emitData->bailOutKind = instr->GetBailOutKind();
  19523. return true;
  19524. }
  19525. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan StElemI_A detected"));
  19526. errorInInstr = true;
  19527. }
  19528. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  19529. {
  19530. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan LdElemI_A detected"));
  19531. errorInInstr = true;
  19532. }
  19533. return false;
  19534. }
  19535. bool
  19536. GlobOpt::InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, MemCopyEmitData* emitData, bool& errorInInstr)
  19537. {
  19538. Assert(emitData && emitData->candidate && emitData->candidate->IsMemCopy());
  19539. Loop::MemCopyCandidate* candidate = (Loop::MemCopyCandidate*)emitData->candidate;
  19540. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  19541. {
  19542. if (
  19543. instr->GetDst()->IsIndirOpnd() &&
  19544. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base) &&
  19545. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19546. )
  19547. {
  19548. Assert(instr->IsProfiledInstr());
  19549. emitData->stElemInstr = instr;
  19550. emitData->bailOutKind = instr->GetBailOutKind();
  19551. // Still need to find the LdElem
  19552. return false;
  19553. }
  19554. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan StElemI_A detected"));
  19555. errorInInstr = true;
  19556. }
  19557. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  19558. {
  19559. if (
  19560. emitData->stElemInstr &&
  19561. instr->GetSrc1()->IsIndirOpnd() &&
  19562. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->ldBase) &&
  19563. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  19564. )
  19565. {
  19566. Assert(instr->IsProfiledInstr());
  19567. emitData->ldElemInstr = instr;
  19568. ValueType stValueType = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  19569. ValueType ldValueType = emitData->ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  19570. if (stValueType != ldValueType)
  19571. {
  19572. #if DBG_DUMP
  19573. char16 stValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19574. stValueType.ToString(stValueTypeStr);
  19575. char16 ldValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  19576. ldValueType.ToString(ldValueTypeStr);
  19577. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("for mismatch in Load(%s) and Store(%s) value type"), ldValueTypeStr, stValueTypeStr);
  19578. #endif
  19579. errorInInstr = true;
  19580. return false;
  19581. }
  19582. // We found both instruction for this candidate
  19583. return true;
  19584. }
  19585. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan LdElemI_A detected"));
  19586. errorInInstr = true;
  19587. }
  19588. return false;
  19589. }
  19590. // The caller is responsible to free the memory allocated between inOrderEmitData[iEmitData -> end]
  19591. bool
  19592. GlobOpt::ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) MemOpEmitData** inOrderEmitData, int& iEmitData)
  19593. {
  19594. AnalysisAssert(iEmitData == (int)loop->memOpInfo->candidates->Count());
  19595. // We iterate over the second block of the loop only. MemOp Works only if the loop has exactly 2 blocks
  19596. Assert(loop->blockList.HasTwo());
  19597. Loop::MemOpList::Iterator iter(loop->memOpInfo->candidates);
  19598. BasicBlock* bblock = loop->blockList.Head()->next;
  19599. Loop::MemOpCandidate* candidate = nullptr;
  19600. MemOpEmitData* emitData = nullptr;
  19601. // Iterate backward because the list of candidate is reversed
  19602. FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, bblock)
  19603. {
  19604. if (!candidate)
  19605. {
  19606. // Time to check next candidate
  19607. if (!iter.Next())
  19608. {
  19609. // We have been through the whole list of candidates, finish
  19610. break;
  19611. }
  19612. candidate = iter.Data();
  19613. if (!candidate)
  19614. {
  19615. continue;
  19616. }
  19617. // Common check for memset and memcopy
  19618. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  19619. // Get the inductionVariable changeInfo
  19620. if (!loop->memOpInfo->inductionVariableChangeInfoMap->TryGetValue(candidate->index, &inductionVariableChangeInfo))
  19621. {
  19622. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): no induction variable"), candidate->base);
  19623. return false;
  19624. }
  19625. if (inductionVariableChangeInfo.unroll != candidate->count)
  19626. {
  19627. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): not matching unroll count"), candidate->base);
  19628. return false;
  19629. }
  19630. if (candidate->IsMemSet())
  19631. {
  19632. Assert(!PHASE_OFF(Js::MemSetPhase, this->func));
  19633. emitData = JitAnew(this->alloc, MemSetEmitData);
  19634. }
  19635. else
  19636. {
  19637. Assert(!PHASE_OFF(Js::MemCopyPhase, this->func));
  19638. // Specific check for memcopy
  19639. Assert(candidate->IsMemCopy());
  19640. Loop::MemCopyCandidate* memcopyCandidate = candidate->AsMemCopy();
  19641. if (memcopyCandidate->base == Js::Constants::InvalidSymID
  19642. || memcopyCandidate->ldBase == Js::Constants::InvalidSymID
  19643. || (memcopyCandidate->ldCount != memcopyCandidate->count))
  19644. {
  19645. TRACE_MEMOP_PHASE(MemCopy, loop, nullptr, _u("(s%d): not matching ldElem and stElem"), candidate->base);
  19646. return false;
  19647. }
  19648. emitData = JitAnew(this->alloc, MemCopyEmitData);
  19649. }
  19650. Assert(emitData);
  19651. emitData->block = bblock;
  19652. emitData->inductionVar = inductionVariableChangeInfo;
  19653. emitData->candidate = candidate;
  19654. }
  19655. bool errorInInstr = false;
  19656. bool candidateFound = candidate->IsMemSet() ?
  19657. InspectInstrForMemSetCandidate(loop, instr, (MemSetEmitData*)emitData, errorInInstr)
  19658. : InspectInstrForMemCopyCandidate(loop, instr, (MemCopyEmitData*)emitData, errorInInstr);
  19659. if (errorInInstr)
  19660. {
  19661. JitAdelete(this->alloc, emitData);
  19662. return false;
  19663. }
  19664. if (candidateFound)
  19665. {
  19666. AnalysisAssert(iEmitData > 0);
  19667. if (iEmitData == 0)
  19668. {
  19669. // Explicit for OACR
  19670. break;
  19671. }
  19672. inOrderEmitData[--iEmitData] = emitData;
  19673. candidate = nullptr;
  19674. emitData = nullptr;
  19675. }
  19676. } NEXT_INSTR_BACKWARD_IN_BLOCK;
  19677. if (iter.IsValid())
  19678. {
  19679. TRACE_MEMOP(loop, nullptr, _u("Candidates not found in loop while validating"));
  19680. return false;
  19681. }
  19682. return true;
  19683. }
  19684. void
  19685. GlobOpt::ProcessMemOp()
  19686. {
  19687. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  19688. {
  19689. if (HasMemOp(loop))
  19690. {
  19691. const int candidateCount = loop->memOpInfo->candidates->Count();
  19692. Assert(candidateCount > 0);
  19693. LoopCount * loopCount = GetOrGenerateLoopCountForMemOp(loop);
  19694. // If loopCount is not available we can not continue with memop
  19695. if (!loopCount || !(loopCount->LoopCountMinusOneSym() || loopCount->LoopCountMinusOneConstantValue()))
  19696. {
  19697. TRACE_MEMOP(loop, nullptr, _u("MemOp skipped for no loop count"));
  19698. loop->doMemOp = false;
  19699. loop->memOpInfo->candidates->Clear();
  19700. continue;
  19701. }
  19702. // The list is reversed, check them and place them in order in the following array
  19703. MemOpEmitData** inOrderCandidates = JitAnewArray(this->alloc, MemOpEmitData*, candidateCount);
  19704. int i = candidateCount;
  19705. if (ValidateMemOpCandidates(loop, inOrderCandidates, i))
  19706. {
  19707. Assert(i == 0);
  19708. // Process the valid MemOp candidate in order.
  19709. for (; i < candidateCount; ++i)
  19710. {
  19711. // Emit
  19712. EmitMemop(loop, loopCount, inOrderCandidates[i]);
  19713. JitAdelete(this->alloc, inOrderCandidates[i]);
  19714. }
  19715. }
  19716. else
  19717. {
  19718. Assert(i != 0);
  19719. for (; i < candidateCount; ++i)
  19720. {
  19721. JitAdelete(this->alloc, inOrderCandidates[i]);
  19722. }
  19723. // One of the memop candidates did not validate. Do not emit for this loop.
  19724. loop->doMemOp = false;
  19725. loop->memOpInfo->candidates->Clear();
  19726. }
  19727. // Free memory
  19728. JitAdeleteArray(this->alloc, candidateCount, inOrderCandidates);
  19729. }
  19730. } NEXT_LOOP_EDITING;
  19731. }
  19732. template<>
  19733. ValueNumber JsUtil::ValueToKey<ValueNumber, Value *>::ToKey(Value *const &value)
  19734. {
  19735. Assert(value);
  19736. return value->GetValueNumber();
  19737. }