GlobOpt.cpp 723 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft Corporation and contributors. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "Backend.h"
  6. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7. #define TESTTRACE_PHASE_INSTR(phase, instr, ...) \
  8. if(PHASE_TESTTRACE(phase, this->func)) \
  9. { \
  10. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE]; \
  11. Output::Print( \
  12. _u("Testtrace: %s function %s (%s): "), \
  13. Js::PhaseNames[phase], \
  14. instr->m_func->GetJITFunctionBody()->GetDisplayName(), \
  15. instr->m_func->GetDebugNumberSet(debugStringBuffer)); \
  16. Output::Print(__VA_ARGS__); \
  17. Output::Flush(); \
  18. }
  19. #else // ENABLE_DEBUG_CONFIG_OPTIONS
  20. #define TESTTRACE_PHASE_INSTR(phase, instr, ...)
  21. #endif // ENABLE_DEBUG_CONFIG_OPTIONS
  22. #if DBG_DUMP
  23. #define DO_MEMOP_TRACE() (PHASE_TRACE(Js::MemOpPhase, this->func) ||\
  24. PHASE_TRACE(Js::MemSetPhase, this->func) ||\
  25. PHASE_TRACE(Js::MemCopyPhase, this->func))
  26. #define DO_MEMOP_TRACE_PHASE(phase) (PHASE_TRACE(Js::MemOpPhase, this->func) || PHASE_TRACE(Js::phase ## Phase, this->func))
  27. #define OUTPUT_MEMOP_TRACE(loop, instr, ...) {\
  28. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];\
  29. Output::Print(15, _u("Function: %s%s, Loop: %u: "), this->func->GetJITFunctionBody()->GetDisplayName(), this->func->GetDebugNumberSet(debugStringBuffer), loop->GetLoopNumber());\
  30. Output::Print(__VA_ARGS__);\
  31. IR::Instr* __instr__ = instr;\
  32. if(__instr__) __instr__->DumpByteCodeOffset();\
  33. if(__instr__) Output::Print(_u(" (%s)"), Js::OpCodeUtil::GetOpCodeName(__instr__->m_opcode));\
  34. Output::Print(_u("\n"));\
  35. Output::Flush(); \
  36. }
  37. #define TRACE_MEMOP(loop, instr, ...) \
  38. if (DO_MEMOP_TRACE()) {\
  39. Output::Print(_u("TRACE MemOp:"));\
  40. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  41. }
  42. #define TRACE_MEMOP_VERBOSE(loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP(loop, instr, __VA_ARGS__)}
  43. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...) \
  44. if (DO_MEMOP_TRACE_PHASE(phase))\
  45. {\
  46. Output::Print(_u("TRACE ") _u(#phase) _u(":"));\
  47. OUTPUT_MEMOP_TRACE(loop, instr, __VA_ARGS__)\
  48. }
  49. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...) if(CONFIG_FLAG(Verbose)) {TRACE_MEMOP_PHASE(phase, loop, instr, __VA_ARGS__)}
  50. #else
  51. #define DO_MEMOP_TRACE()
  52. #define DO_MEMOP_TRACE_PHASE(phase)
  53. #define OUTPUT_MEMOP_TRACE(loop, instr, ...)
  54. #define TRACE_MEMOP(loop, instr, ...)
  55. #define TRACE_MEMOP_VERBOSE(loop, instr, ...)
  56. #define TRACE_MEMOP_PHASE(phase, loop, instr, ...)
  57. #define TRACE_MEMOP_PHASE_VERBOSE(phase, loop, instr, ...)
  58. #endif
  59. class AutoRestoreVal
  60. {
  61. private:
  62. Value *const originalValue;
  63. Value *const tempValue;
  64. Value * *const valueRef;
  65. public:
  66. AutoRestoreVal(Value *const originalValue, Value * *const tempValueRef)
  67. : originalValue(originalValue), tempValue(*tempValueRef), valueRef(tempValueRef)
  68. {
  69. }
  70. ~AutoRestoreVal()
  71. {
  72. if(*valueRef == tempValue)
  73. {
  74. *valueRef = originalValue;
  75. }
  76. }
  77. PREVENT_COPY(AutoRestoreVal);
  78. };
  79. GlobOpt::GlobOpt(Func * func)
  80. : func(func),
  81. intConstantToStackSymMap(nullptr),
  82. intConstantToValueMap(nullptr),
  83. currentValue(FirstNewValueNumber),
  84. prePassLoop(nullptr),
  85. alloc(nullptr),
  86. isCallHelper(false),
  87. inInlinedBuiltIn(false),
  88. rootLoopPrePass(nullptr),
  89. noImplicitCallUsesToInsert(nullptr),
  90. valuesCreatedForClone(nullptr),
  91. valuesCreatedForMerge(nullptr),
  92. instrCountSinceLastCleanUp(0),
  93. isRecursiveCallOnLandingPad(false),
  94. updateInductionVariableValueNumber(false),
  95. isPerformingLoopBackEdgeCompensation(false),
  96. currentRegion(nullptr),
  97. changedSymsAfterIncBailoutCandidate(nullptr),
  98. doTypeSpec(
  99. !IsTypeSpecPhaseOff(func)),
  100. doAggressiveIntTypeSpec(
  101. doTypeSpec &&
  102. DoAggressiveIntTypeSpec(func)),
  103. doAggressiveMulIntTypeSpec(
  104. doTypeSpec &&
  105. !PHASE_OFF(Js::AggressiveMulIntTypeSpecPhase, func) &&
  106. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsAggressiveMulIntTypeSpecDisabled(func->IsLoopBody()))),
  107. doDivIntTypeSpec(
  108. doAggressiveIntTypeSpec &&
  109. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsDivIntTypeSpecDisabled(func->IsLoopBody()))),
  110. doLossyIntTypeSpec(
  111. doTypeSpec &&
  112. DoLossyIntTypeSpec(func)),
  113. doFloatTypeSpec(
  114. doTypeSpec &&
  115. DoFloatTypeSpec(func)),
  116. doArrayCheckHoist(
  117. DoArrayCheckHoist(func)),
  118. doArrayMissingValueCheckHoist(
  119. doArrayCheckHoist &&
  120. DoArrayMissingValueCheckHoist(func)),
  121. doArraySegmentHoist(
  122. doArrayCheckHoist &&
  123. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array), func)),
  124. doJsArraySegmentHoist(
  125. doArraySegmentHoist &&
  126. DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array), func)),
  127. doArrayLengthHoist(
  128. doArrayCheckHoist &&
  129. DoArrayLengthHoist(func)),
  130. doEliminateArrayAccessHelperCall(
  131. doArrayCheckHoist &&
  132. !PHASE_OFF(Js::EliminateArrayAccessHelperCallPhase, func)),
  133. doTrackRelativeIntBounds(
  134. doAggressiveIntTypeSpec &&
  135. DoPathDependentValues() &&
  136. !PHASE_OFF(Js::Phase::TrackRelativeIntBoundsPhase, func)),
  137. doBoundCheckElimination(
  138. doTrackRelativeIntBounds &&
  139. !PHASE_OFF(Js::Phase::BoundCheckEliminationPhase, func)),
  140. doBoundCheckHoist(
  141. doEliminateArrayAccessHelperCall &&
  142. doBoundCheckElimination &&
  143. DoConstFold() &&
  144. !PHASE_OFF(Js::Phase::BoundCheckHoistPhase, func) &&
  145. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsBoundCheckHoistDisabled(func->IsLoopBody()))),
  146. doLoopCountBasedBoundCheckHoist(
  147. doBoundCheckHoist &&
  148. !PHASE_OFF(Js::Phase::LoopCountBasedBoundCheckHoistPhase, func) &&
  149. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLoopCountBasedBoundCheckHoistDisabled(func->IsLoopBody()))),
  150. doPowIntIntTypeSpec(
  151. doAggressiveIntTypeSpec &&
  152. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsPowIntIntTypeSpecDisabled())),
  153. doTagChecks(
  154. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTagCheckDisabled())),
  155. isAsmJSFunc(func->GetJITFunctionBody()->IsAsmJsMode())
  156. {
  157. }
  158. void
  159. GlobOpt::BackwardPass(Js::Phase tag)
  160. {
  161. BEGIN_CODEGEN_PHASE(this->func, tag);
  162. ::BackwardPass backwardPass(this->func, this, tag);
  163. backwardPass.Optimize();
  164. END_CODEGEN_PHASE(this->func, tag);
  165. }
  166. void
  167. GlobOpt::Optimize()
  168. {
  169. this->objectTypeSyms = nullptr;
  170. this->func->argInsCount = this->func->GetInParamsCount() - 1; //Don't include "this" pointer in the count.
  171. if (!func->DoGlobOpt())
  172. {
  173. this->lengthEquivBv = nullptr;
  174. this->argumentsEquivBv = nullptr;
  175. this->callerEquivBv = nullptr;
  176. // Still need to run the dead store phase to calculate the live reg on back edge
  177. this->BackwardPass(Js::DeadStorePhase);
  178. CannotAllocateArgumentsObjectOnStack();
  179. return;
  180. }
  181. {
  182. this->lengthEquivBv = this->func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::length, nullptr); // Used to kill live "length" properties
  183. this->argumentsEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::arguments, nullptr); // Used to kill live "arguments" properties
  184. this->callerEquivBv = func->m_symTable->m_propertyEquivBvMap->Lookup(Js::PropertyIds::caller, nullptr); // Used to kill live "caller" properties
  185. // The backward phase needs the glob opt's allocator to allocate the propertyTypeValueMap
  186. // in GlobOpt::EnsurePropertyTypeValue and ranges of instructions where int overflow may be ignored.
  187. // (see BackwardPass::TrackIntUsage)
  188. PageAllocator * pageAllocator = this->func->m_alloc->GetPageAllocator();
  189. NoRecoverMemoryJitArenaAllocator localAlloc(_u("BE-GlobOpt"), pageAllocator, Js::Throw::OutOfMemory);
  190. this->alloc = &localAlloc;
  191. NoRecoverMemoryJitArenaAllocator localTempAlloc(_u("BE-GlobOpt temp"), pageAllocator, Js::Throw::OutOfMemory);
  192. this->tempAlloc = &localTempAlloc;
  193. // The forward passes use info (upwardExposedUses) from the backward pass. This info
  194. // isn't available for some of the symbols created during the backward pass, or the forward pass.
  195. // Keep track of the last symbol for which we're guaranteed to have data.
  196. this->maxInitialSymID = this->func->m_symTable->GetMaxSymID();
  197. this->BackwardPass(Js::BackwardPhase);
  198. this->ForwardPass();
  199. }
  200. this->BackwardPass(Js::DeadStorePhase);
  201. this->TailDupPass();
  202. }
  203. bool GlobOpt::ShouldExpectConventionalArrayIndexValue(IR::IndirOpnd *const indirOpnd)
  204. {
  205. Assert(indirOpnd);
  206. if(!indirOpnd->GetIndexOpnd())
  207. {
  208. return indirOpnd->GetOffset() >= 0;
  209. }
  210. IR::RegOpnd *const indexOpnd = indirOpnd->GetIndexOpnd();
  211. if(indexOpnd->m_sym->m_isNotInt)
  212. {
  213. // Typically, single-def or any sym-specific information for type-specialized syms should not be used because all of
  214. // their defs will not have been accounted for until after the forward pass. But m_isNotInt is only ever changed from
  215. // false to true, so it's okay in this case.
  216. return false;
  217. }
  218. StackSym *indexVarSym = indexOpnd->m_sym;
  219. if(indexVarSym->IsTypeSpec())
  220. {
  221. indexVarSym = indexVarSym->GetVarEquivSym(nullptr);
  222. Assert(indexVarSym);
  223. }
  224. else if(!IsLoopPrePass())
  225. {
  226. // Don't use single-def info or const flags for type-specialized syms, as all of their defs will not have been accounted
  227. // for until after the forward pass. Also, don't use the const flags in a loop prepass because the const flags may not
  228. // be up-to-date.
  229. StackSym *const indexSym = indexOpnd->m_sym;
  230. if(indexSym->IsIntConst())
  231. {
  232. return indexSym->GetIntConstValue() >= 0;
  233. }
  234. }
  235. Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
  236. if(!indexValue)
  237. {
  238. // Treat it as Uninitialized, assume it's going to be valid
  239. return true;
  240. }
  241. ValueInfo *const indexValueInfo = indexValue->GetValueInfo();
  242. int32 indexConstantValue;
  243. if(indexValueInfo->TryGetIntConstantValue(&indexConstantValue))
  244. {
  245. return indexConstantValue >= 0;
  246. }
  247. if(indexValueInfo->IsUninitialized())
  248. {
  249. // Assume it's going to be valid
  250. return true;
  251. }
  252. return indexValueInfo->HasBeenNumber() && !indexValueInfo->HasBeenFloat();
  253. }
  254. //
  255. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  256. //
  257. ValueType GlobOpt::GetDivValueType(IR::Instr* instr, Value* src1Val, Value* src2Val, bool specialize)
  258. {
  259. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  260. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  261. if (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo())
  262. {
  263. ValueType resultType = instr->m_func->GetReadOnlyProfileInfo()->GetDivProfileInfo(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId));
  264. if (resultType.IsLikelyInt())
  265. {
  266. if (specialize && src1ValueInfo && src2ValueInfo
  267. && ((src1ValueInfo->IsInt() && src2ValueInfo->IsInt()) ||
  268. (this->DoDivIntTypeSpec() && src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())))
  269. {
  270. return ValueType::GetInt(true);
  271. }
  272. return resultType;
  273. }
  274. // Consider: Checking that the sources are numbers.
  275. if (resultType.IsLikelyFloat())
  276. {
  277. return ValueType::Float;
  278. }
  279. return resultType;
  280. }
  281. int32 src1IntConstantValue;
  282. if(!src1ValueInfo || !src1ValueInfo->TryGetIntConstantValue(&src1IntConstantValue))
  283. {
  284. return ValueType::Number;
  285. }
  286. if (src1IntConstantValue == 1)
  287. {
  288. return ValueType::Float;
  289. }
  290. int32 src2IntConstantValue;
  291. if(!src2Val || !src2ValueInfo->TryGetIntConstantValue(&src2IntConstantValue))
  292. {
  293. return ValueType::Number;
  294. }
  295. if (src2IntConstantValue // Avoid divide by zero
  296. && !(src1IntConstantValue == 0x80000000 && src2IntConstantValue == -1) // Avoid integer overflow
  297. && (src1IntConstantValue % src2IntConstantValue) != 0)
  298. {
  299. return ValueType::Float;
  300. }
  301. return ValueType::Number;
  302. }
  303. void
  304. GlobOpt::ForwardPass()
  305. {
  306. BEGIN_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  307. #if DBG_DUMP
  308. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  309. {
  310. this->func->DumpHeader();
  311. }
  312. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::GlobOptPhase))
  313. {
  314. this->TraceSettings();
  315. }
  316. #endif
  317. // GetConstantCount() gives us the right size to pick for the SparseArray, but we may need more if we've inlined
  318. // functions with constants. There will be a gap in the symbol numbering between the main constants and
  319. // the inlined ones, so we'll most likely need a new array chunk. Make the min size of the array chunks be 64
  320. // in case we have a main function with very few constants and a bunch of constants from inlined functions.
  321. this->byteCodeConstantValueArray = SparseArray<Value>::New(this->alloc, max(this->func->GetJITFunctionBody()->GetConstCount(), 64U));
  322. this->byteCodeConstantValueNumbersBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  323. this->tempBv = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  324. this->prePassCopyPropSym = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  325. this->slotSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  326. this->byteCodeUses = nullptr;
  327. this->propertySymUse = nullptr;
  328. // changedSymsAfterIncBailoutCandidate helps track building incremental bailout in ForwardPass
  329. this->changedSymsAfterIncBailoutCandidate = JitAnew(alloc, BVSparse<JitArenaAllocator>, alloc);
  330. #if DBG
  331. this->byteCodeUsesBeforeOpt = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  332. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) && this->DoFunctionFieldCopyProp())
  333. {
  334. Output::Print(_u("TRACE: CanDoFieldCopyProp Func: "));
  335. this->func->DumpFullFunctionName();
  336. Output::Print(_u("\n"));
  337. }
  338. #endif
  339. OpndList localNoImplicitCallUsesToInsert(alloc);
  340. this->noImplicitCallUsesToInsert = &localNoImplicitCallUsesToInsert;
  341. IntConstantToStackSymMap localIntConstantToStackSymMap(alloc);
  342. this->intConstantToStackSymMap = &localIntConstantToStackSymMap;
  343. IntConstantToValueMap localIntConstantToValueMap(alloc);
  344. this->intConstantToValueMap = &localIntConstantToValueMap;
  345. Int64ConstantToValueMap localInt64ConstantToValueMap(alloc);
  346. this->int64ConstantToValueMap = &localInt64ConstantToValueMap;
  347. AddrConstantToValueMap localAddrConstantToValueMap(alloc);
  348. this->addrConstantToValueMap = &localAddrConstantToValueMap;
  349. StringConstantToValueMap localStringConstantToValueMap(alloc);
  350. this->stringConstantToValueMap = &localStringConstantToValueMap;
  351. SymIdToInstrMap localPrePassInstrMap(alloc);
  352. this->prePassInstrMap = &localPrePassInstrMap;
  353. ValueSetByValueNumber localValuesCreatedForClone(alloc, 64);
  354. this->valuesCreatedForClone = &localValuesCreatedForClone;
  355. ValueNumberPairToValueMap localValuesCreatedForMerge(alloc, 64);
  356. this->valuesCreatedForMerge = &localValuesCreatedForMerge;
  357. #if DBG
  358. BVSparse<JitArenaAllocator> localFinishedStackLiteralInitFld(alloc);
  359. this->finishedStackLiteralInitFld = &localFinishedStackLiteralInitFld;
  360. #endif
  361. FOREACH_BLOCK_IN_FUNC_EDITING(block, this->func)
  362. {
  363. this->OptBlock(block);
  364. } NEXT_BLOCK_IN_FUNC_EDITING;
  365. if (!PHASE_OFF(Js::MemOpPhase, this->func))
  366. {
  367. ProcessMemOp();
  368. }
  369. this->noImplicitCallUsesToInsert = nullptr;
  370. this->intConstantToStackSymMap = nullptr;
  371. this->intConstantToValueMap = nullptr;
  372. this->int64ConstantToValueMap = nullptr;
  373. this->addrConstantToValueMap = nullptr;
  374. this->stringConstantToValueMap = nullptr;
  375. #if DBG
  376. this->finishedStackLiteralInitFld = nullptr;
  377. uint freedCount = 0;
  378. uint spilledCount = 0;
  379. #endif
  380. FOREACH_BLOCK_IN_FUNC(block, this->func)
  381. {
  382. #if DBG
  383. if (block->GetDataUseCount() == 0)
  384. {
  385. freedCount++;
  386. }
  387. else
  388. {
  389. spilledCount++;
  390. }
  391. #endif
  392. block->SetDataUseCount(0);
  393. if (block->cloneStrCandidates)
  394. {
  395. JitAdelete(this->alloc, block->cloneStrCandidates);
  396. block->cloneStrCandidates = nullptr;
  397. }
  398. } NEXT_BLOCK_IN_FUNC;
  399. // Make sure we free most of them.
  400. Assert(freedCount >= spilledCount);
  401. // this->alloc will be freed right after return, no need to free it here
  402. this->changedSymsAfterIncBailoutCandidate = nullptr;
  403. END_CODEGEN_PHASE(this->func, Js::ForwardPhase);
  404. }
  405. void
  406. GlobOpt::OptBlock(BasicBlock *block)
  407. {
  408. if (this->func->m_fg->RemoveUnreachableBlock(block, this))
  409. {
  410. GOPT_TRACE(_u("Removing unreachable block #%d\n"), block->GetBlockNum());
  411. return;
  412. }
  413. Loop * loop = block->loop;
  414. if (loop && block->isLoopHeader)
  415. {
  416. if (loop != this->prePassLoop)
  417. {
  418. OptLoops(loop);
  419. if (!this->IsLoopPrePass() && DoFieldPRE(loop))
  420. {
  421. // Note: !IsLoopPrePass means this was a root loop pre-pass. FieldPre() is called once per loop.
  422. this->FieldPRE(loop);
  423. // Re-optimize the landing pad
  424. BasicBlock *landingPad = loop->landingPad;
  425. this->isRecursiveCallOnLandingPad = true;
  426. this->OptBlock(landingPad);
  427. this->isRecursiveCallOnLandingPad = false;
  428. }
  429. }
  430. }
  431. this->currentBlock = block;
  432. PrepareLoopArrayCheckHoist();
  433. block->MergePredBlocksValueMaps(this);
  434. this->intOverflowCurrentlyMattersInRange = true;
  435. this->intOverflowDoesNotMatterRange = this->currentBlock->intOverflowDoesNotMatterRange;
  436. if (loop && DoFieldHoisting(loop))
  437. {
  438. if (block->isLoopHeader)
  439. {
  440. if (!this->IsLoopPrePass())
  441. {
  442. this->PrepareFieldHoisting(loop);
  443. }
  444. else if (loop == this->rootLoopPrePass)
  445. {
  446. this->PreparePrepassFieldHoisting(loop);
  447. }
  448. }
  449. }
  450. else
  451. {
  452. Assert(!TrackHoistableFields() || !HasHoistableFields(CurrentBlockData()));
  453. if (!DoFieldCopyProp() && !DoFieldRefOpts())
  454. {
  455. this->KillAllFields(CurrentBlockData()->liveFields);
  456. }
  457. }
  458. this->tempAlloc->Reset();
  459. if(loop && block->isLoopHeader)
  460. {
  461. loop->firstValueNumberInLoop = this->currentValue;
  462. }
  463. GOPT_TRACE_BLOCK(block, true);
  464. FOREACH_INSTR_IN_BLOCK_EDITING(instr, instrNext, block)
  465. {
  466. GOPT_TRACE_INSTRTRACE(instr);
  467. BailOutInfo* oldBailOutInfo = nullptr;
  468. bool isCheckAuxBailoutNeeded = this->func->IsJitInDebugMode() && !this->IsLoopPrePass();
  469. if (isCheckAuxBailoutNeeded && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  470. {
  471. oldBailOutInfo = instr->GetBailOutInfo();
  472. Assert(oldBailOutInfo);
  473. }
  474. bool isInstrRemoved = false;
  475. instrNext = this->OptInstr(instr, &isInstrRemoved);
  476. // If we still have instrs with only aux bail out, convert aux bail out back to regular bail out and fill it.
  477. // During OptInstr some instr can be moved out to a different block, in this case bailout info is going to be replaced
  478. // with e.g. loop bailout info which is filled as part of processing that block, thus we don't need to fill it here.
  479. if (isCheckAuxBailoutNeeded && !isInstrRemoved && instr->HasAuxBailOut() && !instr->HasBailOutInfo())
  480. {
  481. if (instr->GetBailOutInfo() == oldBailOutInfo)
  482. {
  483. instr->PromoteAuxBailOut();
  484. FillBailOutInfo(block, instr->GetBailOutInfo());
  485. }
  486. else
  487. {
  488. AssertMsg(instr->GetBailOutInfo(), "With aux bailout, the bailout info should not be removed by OptInstr.");
  489. }
  490. }
  491. } NEXT_INSTR_IN_BLOCK_EDITING;
  492. GOPT_TRACE_BLOCK(block, false);
  493. if (block->loop)
  494. {
  495. if (IsLoopPrePass())
  496. {
  497. if (DoBoundCheckHoist())
  498. {
  499. DetectUnknownChangesToInductionVariables(&block->globOptData);
  500. }
  501. }
  502. else
  503. {
  504. isPerformingLoopBackEdgeCompensation = true;
  505. Assert(this->tempBv->IsEmpty());
  506. BVSparse<JitArenaAllocator> tempBv2(this->tempAlloc);
  507. // On loop back-edges, we need to restore the state of the type specialized
  508. // symbols to that of the loop header.
  509. FOREACH_SUCCESSOR_BLOCK(succ, block)
  510. {
  511. if (succ->isLoopHeader && succ->loop->IsDescendentOrSelf(block->loop))
  512. {
  513. BVSparse<JitArenaAllocator> *liveOnBackEdge = block->loop->regAlloc.liveOnBackEdgeSyms;
  514. this->tempBv->Minus(block->loop->varSymsOnEntry, block->globOptData.liveVarSyms);
  515. this->tempBv->And(liveOnBackEdge);
  516. this->ToVar(this->tempBv, block);
  517. // Lossy int in the loop header, and no int on the back-edge - need a lossy conversion to int
  518. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->globOptData.liveInt32Syms);
  519. this->tempBv->And(liveOnBackEdge);
  520. this->ToInt32(this->tempBv, block, true /* lossy */);
  521. // Lossless int in the loop header, and no lossless int on the back-edge - need a lossless conversion to int
  522. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  523. tempBv2.Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  524. this->tempBv->Minus(&tempBv2);
  525. this->tempBv->And(liveOnBackEdge);
  526. this->ToInt32(this->tempBv, block, false /* lossy */);
  527. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->globOptData.liveFloat64Syms);
  528. this->tempBv->And(liveOnBackEdge);
  529. this->ToFloat64(this->tempBv, block);
  530. #ifdef ENABLE_SIMDJS
  531. // SIMD_JS
  532. // Compensate on backedge if sym is live on loop entry but not on backedge
  533. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->globOptData.liveSimd128F4Syms);
  534. this->tempBv->And(liveOnBackEdge);
  535. this->ToTypeSpec(this->tempBv, block, TySimd128F4, IR::BailOutSimd128F4Only);
  536. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->globOptData.liveSimd128I4Syms);
  537. this->tempBv->And(liveOnBackEdge);
  538. this->ToTypeSpec(this->tempBv, block, TySimd128I4, IR::BailOutSimd128I4Only);
  539. #endif
  540. // For ints and floats, go aggressive and type specialize in the landing pad any symbol which was specialized on
  541. // entry to the loop body (in the loop header), and is still specialized on this tail, but wasn't specialized in
  542. // the landing pad.
  543. // Lossy int in the loop header and no int in the landing pad - need a lossy conversion to int
  544. // (entry.lossyInt32 - landingPad.int32)
  545. this->tempBv->Minus(block->loop->lossyInt32SymsOnEntry, block->loop->landingPad->globOptData.liveInt32Syms);
  546. this->tempBv->And(liveOnBackEdge);
  547. this->ToInt32(this->tempBv, block->loop->landingPad, true /* lossy */);
  548. // Lossless int in the loop header, and no lossless int in the landing pad - need a lossless conversion to int
  549. // ((entry.int32 - entry.lossyInt32) - (landingPad.int32 - landingPad.lossyInt32))
  550. this->tempBv->Minus(block->loop->int32SymsOnEntry, block->loop->lossyInt32SymsOnEntry);
  551. tempBv2.Minus(
  552. block->loop->landingPad->globOptData.liveInt32Syms,
  553. block->loop->landingPad->globOptData.liveLossyInt32Syms);
  554. this->tempBv->Minus(&tempBv2);
  555. this->tempBv->And(liveOnBackEdge);
  556. this->ToInt32(this->tempBv, block->loop->landingPad, false /* lossy */);
  557. // ((entry.float64 - landingPad.float64) & block.float64)
  558. this->tempBv->Minus(block->loop->float64SymsOnEntry, block->loop->landingPad->globOptData.liveFloat64Syms);
  559. this->tempBv->And(block->globOptData.liveFloat64Syms);
  560. this->tempBv->And(liveOnBackEdge);
  561. this->ToFloat64(this->tempBv, block->loop->landingPad);
  562. #ifdef ENABLE_SIMDJS
  563. // SIMD_JS
  564. // compensate on landingpad if live on loopEntry and Backedge.
  565. this->tempBv->Minus(block->loop->simd128F4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128F4Syms);
  566. this->tempBv->And(block->globOptData.liveSimd128F4Syms);
  567. this->tempBv->And(liveOnBackEdge);
  568. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128F4, IR::BailOutSimd128F4Only);
  569. this->tempBv->Minus(block->loop->simd128I4SymsOnEntry, block->loop->landingPad->globOptData.liveSimd128I4Syms);
  570. this->tempBv->And(block->globOptData.liveSimd128I4Syms);
  571. this->tempBv->And(liveOnBackEdge);
  572. this->ToTypeSpec(this->tempBv, block->loop->landingPad, TySimd128I4, IR::BailOutSimd128I4Only);
  573. #endif
  574. // Now that we're done with the liveFields within this loop, trim the set to those syms
  575. // that the backward pass told us were live out of the loop.
  576. // This assumes we have no further need of the liveFields within the loop.
  577. if (block->loop->liveOutFields)
  578. {
  579. block->globOptData.liveFields->And(block->loop->liveOutFields);
  580. }
  581. }
  582. } NEXT_SUCCESSOR_BLOCK;
  583. this->tempBv->ClearAll();
  584. isPerformingLoopBackEdgeCompensation = false;
  585. }
  586. }
  587. #if DBG
  588. // The set of live lossy int32 syms should be a subset of all live int32 syms
  589. this->tempBv->And(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  590. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  591. // The set of live lossy int32 syms should be a subset of live var or float syms (var or float sym containing the lossless
  592. // value of the sym should be live)
  593. this->tempBv->Or(block->globOptData.liveVarSyms, block->globOptData.liveFloat64Syms);
  594. this->tempBv->And(block->globOptData.liveLossyInt32Syms);
  595. Assert(this->tempBv->Count() == block->globOptData.liveLossyInt32Syms->Count());
  596. this->tempBv->ClearAll();
  597. Assert(this->currentBlock == block);
  598. #endif
  599. }
  600. void
  601. GlobOpt::OptLoops(Loop *loop)
  602. {
  603. Assert(loop != nullptr);
  604. #if DBG
  605. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase) &&
  606. !DoFunctionFieldCopyProp() && DoFieldCopyProp(loop))
  607. {
  608. Output::Print(_u("TRACE: CanDoFieldCopyProp Loop: "));
  609. this->func->DumpFullFunctionName();
  610. uint loopNumber = loop->GetLoopNumber();
  611. Assert(loopNumber != Js::LoopHeader::NoLoop);
  612. Output::Print(_u(" Loop: %d\n"), loopNumber);
  613. }
  614. #endif
  615. Loop *previousLoop = this->prePassLoop;
  616. this->prePassLoop = loop;
  617. if (previousLoop == nullptr)
  618. {
  619. Assert(this->rootLoopPrePass == nullptr);
  620. this->rootLoopPrePass = loop;
  621. this->prePassInstrMap->Clear();
  622. if (loop->parent == nullptr)
  623. {
  624. // Outer most loop...
  625. this->prePassCopyPropSym->ClearAll();
  626. }
  627. }
  628. if (loop->symsUsedBeforeDefined == nullptr)
  629. {
  630. loop->symsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  631. loop->likelyIntSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  632. loop->likelyNumberSymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  633. loop->forceFloat64SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  634. #ifdef ENABLE_SIMDJS
  635. loop->likelySimd128F4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  636. loop->likelySimd128I4SymsUsedBeforeDefined = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  637. loop->forceSimd128F4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  638. loop->forceSimd128I4SymsOnEntry = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  639. #endif
  640. loop->symsDefInLoop = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  641. loop->fieldKilled = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  642. loop->fieldPRESymStore = JitAnew(alloc, BVSparse<JitArenaAllocator>, this->alloc);
  643. loop->allFieldsKilled = false;
  644. }
  645. else
  646. {
  647. loop->symsUsedBeforeDefined->ClearAll();
  648. loop->likelyIntSymsUsedBeforeDefined->ClearAll();
  649. loop->likelyNumberSymsUsedBeforeDefined->ClearAll();
  650. loop->forceFloat64SymsOnEntry->ClearAll();
  651. #ifdef ENABLE_SIMDJS
  652. loop->likelySimd128F4SymsUsedBeforeDefined->ClearAll();
  653. loop->likelySimd128I4SymsUsedBeforeDefined->ClearAll();
  654. loop->forceSimd128F4SymsOnEntry->ClearAll();
  655. loop->forceSimd128I4SymsOnEntry->ClearAll();
  656. #endif
  657. loop->symsDefInLoop->ClearAll();
  658. loop->fieldKilled->ClearAll();
  659. loop->allFieldsKilled = false;
  660. loop->initialValueFieldMap.Reset();
  661. }
  662. FOREACH_BLOCK_IN_LOOP(block, loop)
  663. {
  664. block->SetDataUseCount(block->GetSuccList()->Count());
  665. OptBlock(block);
  666. } NEXT_BLOCK_IN_LOOP;
  667. if (previousLoop == nullptr)
  668. {
  669. Assert(this->rootLoopPrePass == loop);
  670. this->rootLoopPrePass = nullptr;
  671. }
  672. this->prePassLoop = previousLoop;
  673. }
  674. void
  675. GlobOpt::TailDupPass()
  676. {
  677. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  678. {
  679. BasicBlock* header = loop->GetHeadBlock();
  680. BasicBlock* loopTail = nullptr;
  681. FOREACH_PREDECESSOR_BLOCK(pred, header)
  682. {
  683. if (loop->IsDescendentOrSelf(pred->loop))
  684. {
  685. loopTail = pred;
  686. break;
  687. }
  688. } NEXT_PREDECESSOR_BLOCK;
  689. if (loopTail)
  690. {
  691. AssertMsg(loopTail->GetLastInstr()->IsBranchInstr(), "LastInstr of loop should always be a branch no?");
  692. if (!loopTail->GetPredList()->HasOne())
  693. {
  694. TryTailDup(loopTail->GetLastInstr()->AsBranchInstr());
  695. }
  696. }
  697. } NEXT_LOOP_IN_FUNC_EDITING;
  698. }
  699. bool
  700. GlobOpt::TryTailDup(IR::BranchInstr *tailBranch)
  701. {
  702. if (PHASE_OFF(Js::TailDupPhase, tailBranch->m_func->GetTopFunc()))
  703. {
  704. return false;
  705. }
  706. if (tailBranch->IsConditional())
  707. {
  708. return false;
  709. }
  710. IR::Instr *instr;
  711. uint instrCount = 0;
  712. for (instr = tailBranch->GetPrevRealInstrOrLabel(); !instr->IsLabelInstr(); instr = instr->GetPrevRealInstrOrLabel())
  713. {
  714. if (instr->HasBailOutInfo())
  715. {
  716. break;
  717. }
  718. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  719. {
  720. // Consider: We could be more aggressive here
  721. break;
  722. }
  723. instrCount++;
  724. if (instrCount > 1)
  725. {
  726. // Consider: If copy handled single-def tmps renaming, we could do more instrs
  727. break;
  728. }
  729. }
  730. if (!instr->IsLabelInstr())
  731. {
  732. return false;
  733. }
  734. IR::LabelInstr *mergeLabel = instr->AsLabelInstr();
  735. IR::Instr *mergeLabelPrev = mergeLabel->m_prev;
  736. // Skip unreferenced labels
  737. while (mergeLabelPrev->IsLabelInstr() && mergeLabelPrev->AsLabelInstr()->labelRefs.Empty())
  738. {
  739. mergeLabelPrev = mergeLabelPrev->m_prev;
  740. }
  741. BasicBlock* labelBlock = mergeLabel->GetBasicBlock();
  742. uint origPredCount = labelBlock->GetPredList()->Count();
  743. uint dupCount = 0;
  744. // We are good to go. Let's do the tail duplication.
  745. FOREACH_SLISTCOUNTED_ENTRY_EDITING(IR::BranchInstr*, branchEntry, &mergeLabel->labelRefs, iter)
  746. {
  747. if (branchEntry->IsUnconditional() && !branchEntry->IsMultiBranch() && branchEntry != mergeLabelPrev && branchEntry != tailBranch)
  748. {
  749. for (instr = mergeLabel->m_next; instr != tailBranch; instr = instr->m_next)
  750. {
  751. branchEntry->InsertBefore(instr->Copy());
  752. }
  753. instr = branchEntry;
  754. branchEntry->ReplaceTarget(mergeLabel, tailBranch->GetTarget());
  755. while(!instr->IsLabelInstr())
  756. {
  757. instr = instr->m_prev;
  758. }
  759. BasicBlock* branchBlock = instr->AsLabelInstr()->GetBasicBlock();
  760. labelBlock->RemovePred(branchBlock, func->m_fg);
  761. func->m_fg->AddEdge(branchBlock, tailBranch->GetTarget()->GetBasicBlock());
  762. dupCount++;
  763. }
  764. } NEXT_SLISTCOUNTED_ENTRY_EDITING;
  765. // If we've duplicated everywhere, tail block is dead and should be removed.
  766. if (dupCount == origPredCount)
  767. {
  768. AssertMsg(mergeLabel->labelRefs.Empty(), "Should not remove block with referenced label.");
  769. func->m_fg->RemoveBlock(labelBlock, nullptr, true);
  770. }
  771. return true;
  772. }
  773. void
  774. GlobOpt::ToVar(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  775. {
  776. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  777. {
  778. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  779. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, TyVar, this->func);
  780. IR::Instr *lastInstr = block->GetLastInstr();
  781. if (lastInstr->IsBranchInstr() || lastInstr->m_opcode == Js::OpCode::BailTarget)
  782. {
  783. // If branch is using this symbol, hoist the operand as the ToVar load will get
  784. // inserted right before the branch.
  785. IR::Opnd *src1 = lastInstr->GetSrc1();
  786. if (src1)
  787. {
  788. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  789. {
  790. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  791. }
  792. IR::Opnd *src2 = lastInstr->GetSrc2();
  793. if (src2)
  794. {
  795. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  796. {
  797. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  798. }
  799. }
  800. }
  801. this->ToVar(lastInstr, newOpnd, block, nullptr, false);
  802. }
  803. else
  804. {
  805. IR::Instr *lastNextInstr = lastInstr->m_next;
  806. this->ToVar(lastNextInstr, newOpnd, block, nullptr, false);
  807. }
  808. } NEXT_BITSET_IN_SPARSEBV;
  809. }
  810. void
  811. GlobOpt::ToInt32(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, bool lossy, IR::Instr *insertBeforeInstr)
  812. {
  813. return this->ToTypeSpec(bv, block, TyInt32, IR::BailOutIntOnly, lossy, insertBeforeInstr);
  814. }
  815. void
  816. GlobOpt::ToFloat64(BVSparse<JitArenaAllocator> *bv, BasicBlock *block)
  817. {
  818. return this->ToTypeSpec(bv, block, TyFloat64, IR::BailOutNumberOnly);
  819. }
  820. void
  821. GlobOpt::ToTypeSpec(BVSparse<JitArenaAllocator> *bv, BasicBlock *block, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  822. {
  823. FOREACH_BITSET_IN_SPARSEBV(id, bv)
  824. {
  825. StackSym *stackSym = this->func->m_symTable->FindStackSym(id);
  826. IRType fromType = TyIllegal;
  827. // Win8 bug: 757126. If we are trying to type specialize the arguments object,
  828. // let's make sure stack args optimization is not enabled. This is a problem, particularly,
  829. // if the instruction comes from an unreachable block. In other cases, the pass on the
  830. // instruction itself should disable arguments object optimization.
  831. if(block->globOptData.argObjSyms && block->globOptData.IsArgumentsSymID(id))
  832. {
  833. CannotAllocateArgumentsObjectOnStack();
  834. }
  835. if (block->globOptData.liveVarSyms->Test(id))
  836. {
  837. fromType = TyVar;
  838. }
  839. else if (block->globOptData.liveInt32Syms->Test(id) && !block->globOptData.liveLossyInt32Syms->Test(id))
  840. {
  841. fromType = TyInt32;
  842. stackSym = stackSym->GetInt32EquivSym(this->func);
  843. }
  844. else if (block->globOptData.liveFloat64Syms->Test(id))
  845. {
  846. fromType = TyFloat64;
  847. stackSym = stackSym->GetFloat64EquivSym(this->func);
  848. }
  849. else
  850. {
  851. #ifdef ENABLE_SIMDJS
  852. Assert(block->globOptData.IsLiveAsSimd128(stackSym));
  853. if (block->globOptData.IsLiveAsSimd128F4(stackSym))
  854. {
  855. fromType = TySimd128F4;
  856. stackSym = stackSym->GetSimd128F4EquivSym(this->func);
  857. }
  858. else
  859. {
  860. fromType = TySimd128I4;
  861. stackSym = stackSym->GetSimd128I4EquivSym(this->func);
  862. }
  863. #else
  864. Assert(UNREACHED);
  865. #endif
  866. }
  867. IR::RegOpnd *newOpnd = IR::RegOpnd::New(stackSym, fromType, this->func);
  868. IR::Instr *lastInstr = block->GetLastInstr();
  869. if (!insertBeforeInstr && lastInstr->IsBranchInstr())
  870. {
  871. // If branch is using this symbol, hoist the operand as the ToInt32 load will get
  872. // inserted right before the branch.
  873. IR::Instr *instrPrev = lastInstr->m_prev;
  874. IR::Opnd *src1 = lastInstr->GetSrc1();
  875. if (src1)
  876. {
  877. if (src1->IsRegOpnd() && src1->AsRegOpnd()->m_sym == stackSym)
  878. {
  879. lastInstr->HoistSrc1(Js::OpCode::Ld_A);
  880. }
  881. IR::Opnd *src2 = lastInstr->GetSrc2();
  882. if (src2)
  883. {
  884. if (src2->IsRegOpnd() && src2->AsRegOpnd()->m_sym == stackSym)
  885. {
  886. lastInstr->HoistSrc2(Js::OpCode::Ld_A);
  887. }
  888. }
  889. // Did we insert anything?
  890. if (lastInstr->m_prev != instrPrev)
  891. {
  892. // If we had ByteCodeUses right before the branch, move them back down.
  893. IR::Instr *insertPoint = lastInstr;
  894. for (IR::Instr *instrBytecode = instrPrev; instrBytecode->m_opcode == Js::OpCode::ByteCodeUses; instrBytecode = instrBytecode->m_prev)
  895. {
  896. instrBytecode->Unlink();
  897. insertPoint->InsertBefore(instrBytecode);
  898. insertPoint = instrBytecode;
  899. }
  900. }
  901. }
  902. }
  903. this->ToTypeSpecUse(nullptr, newOpnd, block, nullptr, nullptr, toType, bailOutKind, lossy, insertBeforeInstr);
  904. } NEXT_BITSET_IN_SPARSEBV;
  905. }
  906. PRECandidatesList * GlobOpt::FindPossiblePRECandidates(Loop *loop, JitArenaAllocator *alloc)
  907. {
  908. // Find the set of PRE candidates
  909. BasicBlock *loopHeader = loop->GetHeadBlock();
  910. PRECandidatesList *candidates = nullptr;
  911. bool firstBackEdge = true;
  912. FOREACH_PREDECESSOR_BLOCK(blockPred, loopHeader)
  913. {
  914. if (!loop->IsDescendentOrSelf(blockPred->loop))
  915. {
  916. // Not a loop back-edge
  917. continue;
  918. }
  919. if (firstBackEdge)
  920. {
  921. candidates = this->FindBackEdgePRECandidates(blockPred, alloc);
  922. }
  923. else
  924. {
  925. blockPred->globOptData.RemoveUnavailableCandidates(candidates);
  926. }
  927. } NEXT_PREDECESSOR_BLOCK;
  928. return candidates;
  929. }
  930. BOOL GlobOpt::PreloadPRECandidate(Loop *loop, GlobHashBucket* candidate)
  931. {
  932. // Insert a load for each field PRE candidate.
  933. PropertySym *propertySym = candidate->value->AsPropertySym();
  934. StackSym *objPtrSym = propertySym->m_stackSym;
  935. // If objPtr isn't live, we'll retry later.
  936. // Another PRE candidate may insert a load for it.
  937. if (!loop->landingPad->globOptData.IsLive(objPtrSym))
  938. {
  939. return false;
  940. }
  941. BasicBlock *landingPad = loop->landingPad;
  942. Value *value = candidate->element;
  943. Sym *symStore = value->GetValueInfo()->GetSymStore();
  944. // The symStore can't be live into the loop
  945. // The symStore needs to still have the same value
  946. Assert(symStore && symStore->IsStackSym());
  947. if (loop->landingPad->globOptData.IsLive(symStore))
  948. {
  949. // May have already been hoisted:
  950. // o.x = t1;
  951. // o.y = t1;
  952. return false;
  953. }
  954. Value *landingPadValue = landingPad->globOptData.FindValue(propertySym);
  955. // Value should be added as initial value or already be there.
  956. Assert(landingPadValue);
  957. IR::Instr * ldInstr = this->prePassInstrMap->Lookup(propertySym->m_id, nullptr);
  958. Assert(ldInstr);
  959. // Create instr to put in landing pad for compensation
  960. Assert(IsPREInstrCandidateLoad(ldInstr->m_opcode));
  961. IR::SymOpnd *ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  962. if (ldSrc->m_sym != propertySym)
  963. {
  964. // It's possible that the propertySym but have equivalent objPtrs. Verify their values.
  965. Value *val1 = CurrentBlockData()->FindValue(ldSrc->m_sym->AsPropertySym()->m_stackSym);
  966. Value *val2 = CurrentBlockData()->FindValue(propertySym->m_stackSym);
  967. if (!val1 || !val2 || val1->GetValueNumber() != val2->GetValueNumber())
  968. {
  969. return false;
  970. }
  971. }
  972. ldInstr = ldInstr->Copy();
  973. // Consider: Shouldn't be necessary once we have copy-prop in prepass...
  974. ldInstr->GetSrc1()->AsSymOpnd()->m_sym = propertySym;
  975. ldSrc = ldInstr->GetSrc1()->AsSymOpnd();
  976. if (ldSrc->IsPropertySymOpnd())
  977. {
  978. IR::PropertySymOpnd *propSymOpnd = ldSrc->AsPropertySymOpnd();
  979. IR::PropertySymOpnd *newPropSymOpnd;
  980. newPropSymOpnd = propSymOpnd->AsPropertySymOpnd()->CopyWithoutFlowSensitiveInfo(this->func);
  981. ldInstr->ReplaceSrc1(newPropSymOpnd);
  982. }
  983. if (ldInstr->GetDst()->AsRegOpnd()->m_sym != symStore)
  984. {
  985. ldInstr->ReplaceDst(IR::RegOpnd::New(symStore->AsStackSym(), TyVar, this->func));
  986. }
  987. ldInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  988. ldInstr->GetDst()->SetIsJITOptimizedReg(true);
  989. landingPad->globOptData.liveVarSyms->Set(symStore->m_id);
  990. loop->fieldPRESymStore->Set(symStore->m_id);
  991. ValueType valueType(ValueType::Uninitialized);
  992. Value *initialValue = nullptr;
  993. if (loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue))
  994. {
  995. if (ldInstr->IsProfiledInstr())
  996. {
  997. if (initialValue->GetValueNumber() == value->GetValueNumber())
  998. {
  999. if (value->GetValueInfo()->IsUninitialized())
  1000. {
  1001. valueType = ldInstr->AsProfiledInstr()->u.FldInfo().valueType;
  1002. }
  1003. else
  1004. {
  1005. valueType = value->GetValueInfo()->Type();
  1006. }
  1007. }
  1008. else
  1009. {
  1010. valueType = ValueType::Uninitialized;
  1011. }
  1012. ldInstr->AsProfiledInstr()->u.FldInfo().valueType = valueType;
  1013. }
  1014. }
  1015. else
  1016. {
  1017. valueType = landingPadValue->GetValueInfo()->Type();
  1018. }
  1019. loop->symsUsedBeforeDefined->Set(symStore->m_id);
  1020. if (valueType.IsLikelyNumber())
  1021. {
  1022. loop->likelyNumberSymsUsedBeforeDefined->Set(symStore->m_id);
  1023. if (DoAggressiveIntTypeSpec() ? valueType.IsLikelyInt() : valueType.IsInt())
  1024. {
  1025. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  1026. // specialization is enabled
  1027. loop->likelyIntSymsUsedBeforeDefined->Set(symStore->m_id);
  1028. }
  1029. }
  1030. // Insert in landing pad
  1031. if (ldInstr->HasAnyImplicitCalls())
  1032. {
  1033. IR::Instr * bailInstr = EnsureDisableImplicitCallRegion(loop);
  1034. bailInstr->InsertBefore(ldInstr);
  1035. }
  1036. else if (loop->endDisableImplicitCall)
  1037. {
  1038. loop->endDisableImplicitCall->InsertBefore(ldInstr);
  1039. }
  1040. else
  1041. {
  1042. loop->landingPad->InsertAfter(ldInstr);
  1043. }
  1044. ldInstr->ClearByteCodeOffset();
  1045. ldInstr->SetByteCodeOffset(landingPad->GetFirstInstr());
  1046. #if DBG_DUMP
  1047. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  1048. {
  1049. Output::Print(_u("** TRACE: Field PRE: field pre-loaded in landing pad of loop head #%-3d: "), loop->GetHeadBlock()->GetBlockNum());
  1050. ldInstr->Dump();
  1051. Output::Print(_u("\n"));
  1052. }
  1053. #endif
  1054. return true;
  1055. }
  1056. void GlobOpt::PreloadPRECandidates(Loop *loop, PRECandidatesList *candidates)
  1057. {
  1058. // Insert loads in landing pad for field PRE candidates. Iterate while(changed)
  1059. // for the o.x.y cases.
  1060. BOOL changed = true;
  1061. if (!candidates)
  1062. {
  1063. return;
  1064. }
  1065. Assert(loop->landingPad->GetFirstInstr() == loop->landingPad->GetLastInstr());
  1066. while (changed)
  1067. {
  1068. changed = false;
  1069. FOREACH_SLIST_ENTRY_EDITING(GlobHashBucket*, candidate, (SList<GlobHashBucket*>*)candidates, iter)
  1070. {
  1071. if (this->PreloadPRECandidate(loop, candidate))
  1072. {
  1073. changed = true;
  1074. iter.RemoveCurrent();
  1075. }
  1076. } NEXT_SLIST_ENTRY_EDITING;
  1077. }
  1078. }
  1079. void GlobOpt::FieldPRE(Loop *loop)
  1080. {
  1081. if (!DoFieldPRE(loop))
  1082. {
  1083. return;
  1084. }
  1085. PRECandidatesList *candidates;
  1086. JitArenaAllocator *alloc = this->tempAlloc;
  1087. candidates = this->FindPossiblePRECandidates(loop, alloc);
  1088. this->PreloadPRECandidates(loop, candidates);
  1089. }
  1090. void GlobOpt::InsertValueCompensation(
  1091. BasicBlock *const predecessor,
  1092. const SymToValueInfoMap &symsRequiringCompensationToMergedValueInfoMap)
  1093. {
  1094. Assert(predecessor);
  1095. Assert(symsRequiringCompensationToMergedValueInfoMap.Count() != 0);
  1096. IR::Instr *insertBeforeInstr = predecessor->GetLastInstr();
  1097. Func *const func = insertBeforeInstr->m_func;
  1098. bool setLastInstrInPredecessor;
  1099. if(insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  1100. {
  1101. // Don't insert code between the branch and the corresponding ByteCodeUses instructions
  1102. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  1103. {
  1104. insertBeforeInstr = insertBeforeInstr->m_prev;
  1105. }
  1106. setLastInstrInPredecessor = false;
  1107. }
  1108. else
  1109. {
  1110. // Insert at the end of the block and set the last instruction
  1111. Assert(insertBeforeInstr->m_next);
  1112. insertBeforeInstr = insertBeforeInstr->m_next; // Instruction after the last instruction in the predecessor
  1113. setLastInstrInPredecessor = true;
  1114. }
  1115. GlobOptBlockData &predecessorBlockData = predecessor->globOptData;
  1116. GlobOptBlockData &successorBlockData = *CurrentBlockData();
  1117. struct DelayChangeValueInfo
  1118. {
  1119. Value* predecessorValue;
  1120. ArrayValueInfo* valueInfo;
  1121. void ChangeValueInfo(BasicBlock* predecessor, GlobOpt* g)
  1122. {
  1123. g->ChangeValueInfo(
  1124. predecessor,
  1125. predecessorValue,
  1126. valueInfo,
  1127. false /*allowIncompatibleType*/,
  1128. true /*compensated*/);
  1129. }
  1130. };
  1131. JsUtil::List<DelayChangeValueInfo, ArenaAllocator> delayChangeValueInfo(alloc);
  1132. for(auto it = symsRequiringCompensationToMergedValueInfoMap.GetIterator(); it.IsValid(); it.MoveNext())
  1133. {
  1134. const auto &entry = it.Current();
  1135. Sym *const sym = entry.Key();
  1136. Value *const predecessorValue = predecessorBlockData.FindValue(sym);
  1137. Assert(predecessorValue);
  1138. ValueInfo *const predecessorValueInfo = predecessorValue->GetValueInfo();
  1139. // Currently, array value infos are the only ones that require compensation based on values
  1140. Assert(predecessorValueInfo->IsAnyOptimizedArray());
  1141. const ArrayValueInfo *const predecessorArrayValueInfo = predecessorValueInfo->AsArrayValueInfo();
  1142. StackSym *const predecessorHeadSegmentSym = predecessorArrayValueInfo->HeadSegmentSym();
  1143. StackSym *const predecessorHeadSegmentLengthSym = predecessorArrayValueInfo->HeadSegmentLengthSym();
  1144. StackSym *const predecessorLengthSym = predecessorArrayValueInfo->LengthSym();
  1145. ValueInfo *const mergedValueInfo = entry.Value();
  1146. const ArrayValueInfo *const mergedArrayValueInfo = mergedValueInfo->AsArrayValueInfo();
  1147. StackSym *const mergedHeadSegmentSym = mergedArrayValueInfo->HeadSegmentSym();
  1148. StackSym *const mergedHeadSegmentLengthSym = mergedArrayValueInfo->HeadSegmentLengthSym();
  1149. StackSym *const mergedLengthSym = mergedArrayValueInfo->LengthSym();
  1150. Assert(!mergedHeadSegmentSym || predecessorHeadSegmentSym);
  1151. Assert(!mergedHeadSegmentLengthSym || predecessorHeadSegmentLengthSym);
  1152. Assert(!mergedLengthSym || predecessorLengthSym);
  1153. bool compensated = false;
  1154. if(mergedHeadSegmentSym && predecessorHeadSegmentSym != mergedHeadSegmentSym)
  1155. {
  1156. IR::Instr *const newInstr =
  1157. IR::Instr::New(
  1158. Js::OpCode::Ld_A,
  1159. IR::RegOpnd::New(mergedHeadSegmentSym, mergedHeadSegmentSym->GetType(), func),
  1160. IR::RegOpnd::New(predecessorHeadSegmentSym, predecessorHeadSegmentSym->GetType(), func),
  1161. func);
  1162. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1163. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1164. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1165. insertBeforeInstr->InsertBefore(newInstr);
  1166. compensated = true;
  1167. }
  1168. if(mergedHeadSegmentLengthSym && predecessorHeadSegmentLengthSym != mergedHeadSegmentLengthSym)
  1169. {
  1170. IR::Instr *const newInstr =
  1171. IR::Instr::New(
  1172. Js::OpCode::Ld_I4,
  1173. IR::RegOpnd::New(mergedHeadSegmentLengthSym, mergedHeadSegmentLengthSym->GetType(), func),
  1174. IR::RegOpnd::New(predecessorHeadSegmentLengthSym, predecessorHeadSegmentLengthSym->GetType(), func),
  1175. func);
  1176. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1177. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1178. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1179. insertBeforeInstr->InsertBefore(newInstr);
  1180. compensated = true;
  1181. // Merge the head segment length value
  1182. Assert(predecessorBlockData.liveVarSyms->Test(predecessorHeadSegmentLengthSym->m_id));
  1183. predecessorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  1184. successorBlockData.liveVarSyms->Set(mergedHeadSegmentLengthSym->m_id);
  1185. Value *const predecessorHeadSegmentLengthValue =
  1186. predecessorBlockData.FindValue(predecessorHeadSegmentLengthSym);
  1187. Assert(predecessorHeadSegmentLengthValue);
  1188. predecessorBlockData.SetValue(predecessorHeadSegmentLengthValue, mergedHeadSegmentLengthSym);
  1189. Value *const mergedHeadSegmentLengthValue = successorBlockData.FindValue(mergedHeadSegmentLengthSym);
  1190. if(mergedHeadSegmentLengthValue)
  1191. {
  1192. Assert(mergedHeadSegmentLengthValue->GetValueNumber() != predecessorHeadSegmentLengthValue->GetValueNumber());
  1193. if(predecessorHeadSegmentLengthValue->GetValueInfo() != mergedHeadSegmentLengthValue->GetValueInfo())
  1194. {
  1195. mergedHeadSegmentLengthValue->SetValueInfo(
  1196. ValueInfo::MergeLikelyIntValueInfo(
  1197. this->alloc,
  1198. mergedHeadSegmentLengthValue,
  1199. predecessorHeadSegmentLengthValue,
  1200. mergedHeadSegmentLengthValue->GetValueInfo()->Type()
  1201. .Merge(predecessorHeadSegmentLengthValue->GetValueInfo()->Type())));
  1202. }
  1203. }
  1204. else
  1205. {
  1206. successorBlockData.SetValue(CopyValue(predecessorHeadSegmentLengthValue), mergedHeadSegmentLengthSym);
  1207. }
  1208. }
  1209. if(mergedLengthSym && predecessorLengthSym != mergedLengthSym)
  1210. {
  1211. IR::Instr *const newInstr =
  1212. IR::Instr::New(
  1213. Js::OpCode::Ld_I4,
  1214. IR::RegOpnd::New(mergedLengthSym, mergedLengthSym->GetType(), func),
  1215. IR::RegOpnd::New(predecessorLengthSym, predecessorLengthSym->GetType(), func),
  1216. func);
  1217. newInstr->GetDst()->SetIsJITOptimizedReg(true);
  1218. newInstr->GetSrc1()->SetIsJITOptimizedReg(true);
  1219. newInstr->SetByteCodeOffset(insertBeforeInstr);
  1220. insertBeforeInstr->InsertBefore(newInstr);
  1221. compensated = true;
  1222. // Merge the length value
  1223. Assert(predecessorBlockData.liveVarSyms->Test(predecessorLengthSym->m_id));
  1224. predecessorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  1225. successorBlockData.liveVarSyms->Set(mergedLengthSym->m_id);
  1226. Value *const predecessorLengthValue = predecessorBlockData.FindValue(predecessorLengthSym);
  1227. Assert(predecessorLengthValue);
  1228. predecessorBlockData.SetValue(predecessorLengthValue, mergedLengthSym);
  1229. Value *const mergedLengthValue = successorBlockData.FindValue(mergedLengthSym);
  1230. if(mergedLengthValue)
  1231. {
  1232. Assert(mergedLengthValue->GetValueNumber() != predecessorLengthValue->GetValueNumber());
  1233. if(predecessorLengthValue->GetValueInfo() != mergedLengthValue->GetValueInfo())
  1234. {
  1235. mergedLengthValue->SetValueInfo(
  1236. ValueInfo::MergeLikelyIntValueInfo(
  1237. this->alloc,
  1238. mergedLengthValue,
  1239. predecessorLengthValue,
  1240. mergedLengthValue->GetValueInfo()->Type().Merge(predecessorLengthValue->GetValueInfo()->Type())));
  1241. }
  1242. }
  1243. else
  1244. {
  1245. successorBlockData.SetValue(CopyValue(predecessorLengthValue), mergedLengthSym);
  1246. }
  1247. }
  1248. if(compensated)
  1249. {
  1250. // Save the new ValueInfo for later.
  1251. // We don't want other symbols needing compensation to see this new one
  1252. delayChangeValueInfo.Add({
  1253. predecessorValue,
  1254. ArrayValueInfo::New(
  1255. alloc,
  1256. predecessorValueInfo->Type(),
  1257. mergedHeadSegmentSym ? mergedHeadSegmentSym : predecessorHeadSegmentSym,
  1258. mergedHeadSegmentLengthSym ? mergedHeadSegmentLengthSym : predecessorHeadSegmentLengthSym,
  1259. mergedLengthSym ? mergedLengthSym : predecessorLengthSym,
  1260. predecessorValueInfo->GetSymStore())
  1261. });
  1262. }
  1263. }
  1264. // Once we've compensated all the symbols, update the new ValueInfo.
  1265. delayChangeValueInfo.Map([predecessor, this](int, DelayChangeValueInfo d) { d.ChangeValueInfo(predecessor, this); });
  1266. if(setLastInstrInPredecessor)
  1267. {
  1268. predecessor->SetLastInstr(insertBeforeInstr->m_prev);
  1269. }
  1270. }
  1271. bool
  1272. GlobOpt::AreFromSameBytecodeFunc(IR::RegOpnd const* src1, IR::RegOpnd const* dst) const
  1273. {
  1274. Assert(this->func->m_symTable->FindStackSym(src1->m_sym->m_id) == src1->m_sym);
  1275. Assert(this->func->m_symTable->FindStackSym(dst->m_sym->m_id) == dst->m_sym);
  1276. if (dst->m_sym->HasByteCodeRegSlot() && src1->m_sym->HasByteCodeRegSlot())
  1277. {
  1278. return src1->m_sym->GetByteCodeFunc() == dst->m_sym->GetByteCodeFunc();
  1279. }
  1280. return false;
  1281. }
  1282. /*
  1283. * This is for scope object removal along with Heap Arguments optimization.
  1284. * We track several instructions to facilitate the removal of scope object.
  1285. * - LdSlotArr - This instr is tracked to keep track of the formals array (the dest)
  1286. * - InlineeStart - To keep track of the stack syms for the formals of the inlinee.
  1287. */
  1288. void
  1289. GlobOpt::TrackInstrsForScopeObjectRemoval(IR::Instr * instr)
  1290. {
  1291. IR::Opnd* dst = instr->GetDst();
  1292. IR::Opnd* src1 = instr->GetSrc1();
  1293. if (instr->m_opcode == Js::OpCode::Ld_A && src1->IsRegOpnd())
  1294. {
  1295. AssertMsg(!instr->m_func->IsStackArgsEnabled() || !src1->IsScopeObjOpnd(instr->m_func), "There can be no aliasing for scope object.");
  1296. }
  1297. // The following is to track formals array for Stack Arguments optimization with Formals
  1298. if (instr->m_func->IsStackArgsEnabled() && !this->IsLoopPrePass())
  1299. {
  1300. if (instr->m_opcode == Js::OpCode::LdSlotArr)
  1301. {
  1302. if (instr->GetSrc1()->IsScopeObjOpnd(instr->m_func))
  1303. {
  1304. AssertMsg(!instr->m_func->GetJITFunctionBody()->HasImplicitArgIns(), "No mapping is required in this case. So it should already be generating ArgIns.");
  1305. instr->m_func->TrackFormalsArraySym(dst->GetStackSym()->m_id);
  1306. }
  1307. }
  1308. else if (instr->m_opcode == Js::OpCode::InlineeStart)
  1309. {
  1310. Assert(instr->m_func->IsInlined());
  1311. Js::ArgSlot actualsCount = instr->m_func->actualCount - 1;
  1312. Js::ArgSlot formalsCount = instr->m_func->GetJITFunctionBody()->GetInParamsCount() - 1;
  1313. Func * func = instr->m_func;
  1314. Func * inlinerFunc = func->GetParentFunc(); //Inliner's func
  1315. IR::Instr * argOutInstr = instr->GetSrc2()->GetStackSym()->GetInstrDef();
  1316. //The argout immediately before the InlineeStart will be the ArgOut for NewScObject
  1317. //So we don't want to track the stack sym for this argout.- Skipping it here.
  1318. if (instr->m_func->IsInlinedConstructor())
  1319. {
  1320. //PRE might introduce a second defintion for the Src1. So assert for the opcode only when it has single definition.
  1321. Assert(argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef() == nullptr ||
  1322. argOutInstr->GetSrc1()->GetStackSym()->GetInstrDef()->m_opcode == Js::OpCode::NewScObjectNoCtor);
  1323. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1324. }
  1325. if (formalsCount < actualsCount)
  1326. {
  1327. Js::ArgSlot extraActuals = actualsCount - formalsCount;
  1328. //Skipping extra actuals passed
  1329. for (Js::ArgSlot i = 0; i < extraActuals; i++)
  1330. {
  1331. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1332. }
  1333. }
  1334. StackSym * undefinedSym = nullptr;
  1335. for (Js::ArgSlot param = formalsCount; param > 0; param--)
  1336. {
  1337. StackSym * argOutSym = nullptr;
  1338. if (argOutInstr->GetSrc1())
  1339. {
  1340. if (argOutInstr->GetSrc1()->IsRegOpnd())
  1341. {
  1342. argOutSym = argOutInstr->GetSrc1()->GetStackSym();
  1343. }
  1344. else
  1345. {
  1346. // We will always have ArgOut instr - so the source operand will not be removed.
  1347. argOutSym = StackSym::New(inlinerFunc);
  1348. IR::Opnd * srcOpnd = argOutInstr->GetSrc1();
  1349. IR::Opnd * dstOpnd = IR::RegOpnd::New(argOutSym, TyVar, inlinerFunc);
  1350. IR::Instr * assignInstr = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  1351. instr->InsertBefore(assignInstr);
  1352. }
  1353. }
  1354. Assert(!func->HasStackSymForFormal(param - 1));
  1355. if (param <= actualsCount)
  1356. {
  1357. Assert(argOutSym);
  1358. func->TrackStackSymForFormalIndex(param - 1, argOutSym);
  1359. argOutInstr = argOutInstr->GetSrc2()->GetStackSym()->GetInstrDef();
  1360. }
  1361. else
  1362. {
  1363. /*When param is out of range of actuals count, load undefined*/
  1364. // TODO: saravind: This will insert undefined for each of the param not having an actual. - Clean up this by having a sym for undefined on func ?
  1365. Assert(formalsCount > actualsCount);
  1366. if (undefinedSym == nullptr)
  1367. {
  1368. undefinedSym = StackSym::New(inlinerFunc);
  1369. IR::Opnd * srcOpnd = IR::AddrOpnd::New(inlinerFunc->GetScriptContextInfo()->GetUndefinedAddr(), IR::AddrOpndKindDynamicMisc, inlinerFunc);
  1370. IR::Opnd * dstOpnd = IR::RegOpnd::New(undefinedSym, TyVar, inlinerFunc);
  1371. IR::Instr * assignUndefined = IR::Instr::New(Js::OpCode::Ld_A, dstOpnd, srcOpnd, inlinerFunc);
  1372. instr->InsertBefore(assignUndefined);
  1373. }
  1374. func->TrackStackSymForFormalIndex(param - 1, undefinedSym);
  1375. }
  1376. }
  1377. }
  1378. }
  1379. }
  1380. void
  1381. GlobOpt::OptArguments(IR::Instr *instr)
  1382. {
  1383. IR::Opnd* dst = instr->GetDst();
  1384. IR::Opnd* src1 = instr->GetSrc1();
  1385. IR::Opnd* src2 = instr->GetSrc2();
  1386. TrackInstrsForScopeObjectRemoval(instr);
  1387. if (!TrackArgumentsObject())
  1388. {
  1389. return;
  1390. }
  1391. if (instr->HasAnyLoadHeapArgsOpCode())
  1392. {
  1393. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1394. if (instr->m_func->IsStackArgsEnabled())
  1395. {
  1396. if (instr->GetSrc1()->IsRegOpnd() && instr->m_func->GetJITFunctionBody()->GetInParamsCount() > 1)
  1397. {
  1398. StackSym * scopeObjSym = instr->GetSrc1()->GetStackSym();
  1399. Assert(scopeObjSym);
  1400. Assert(scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::InitCachedScope || scopeObjSym->GetInstrDef()->m_opcode == Js::OpCode::NewScopeObject);
  1401. Assert(instr->m_func->GetScopeObjSym() == scopeObjSym);
  1402. if (PHASE_VERBOSE_TRACE1(Js::StackArgFormalsOptPhase))
  1403. {
  1404. Output::Print(_u("StackArgFormals : %s (%d) :Setting scopeObjSym in forward pass. \n"), instr->m_func->GetJITFunctionBody()->GetDisplayName(), instr->m_func->GetJITFunctionBody()->GetFunctionNumber());
  1405. Output::Flush();
  1406. }
  1407. }
  1408. }
  1409. #endif
  1410. if (instr->m_func->GetJITFunctionBody()->GetInParamsCount() != 1 && !instr->m_func->IsStackArgsEnabled())
  1411. {
  1412. CannotAllocateArgumentsObjectOnStack();
  1413. }
  1414. else
  1415. {
  1416. CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
  1417. }
  1418. return;
  1419. }
  1420. // Keep track of arguments objects and its aliases
  1421. // LdHeapArguments loads the arguments object and Ld_A tracks the aliases.
  1422. if ((instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::BytecodeArgOutCapture) && (src1->IsRegOpnd() && CurrentBlockData()->IsArgumentsOpnd(src1)))
  1423. {
  1424. // In the debug mode, we don't want to optimize away the aliases. Since we may have to show them on the inspection.
  1425. if (((!AreFromSameBytecodeFunc(src1->AsRegOpnd(), dst->AsRegOpnd()) || this->currentBlock->loop) && instr->m_opcode != Js::OpCode::BytecodeArgOutCapture) || this->func->IsJitInDebugMode())
  1426. {
  1427. CannotAllocateArgumentsObjectOnStack();
  1428. return;
  1429. }
  1430. if(!dst->AsRegOpnd()->GetStackSym()->m_nonEscapingArgObjAlias)
  1431. {
  1432. CurrentBlockData()->TrackArgumentsSym(dst->AsRegOpnd());
  1433. }
  1434. return;
  1435. }
  1436. if (!CurrentBlockData()->TestAnyArgumentsSym())
  1437. {
  1438. // There are no syms to track yet, don't start tracking arguments sym.
  1439. return;
  1440. }
  1441. // Avoid loop prepass
  1442. if (this->currentBlock->loop && this->IsLoopPrePass())
  1443. {
  1444. return;
  1445. }
  1446. SymID id = 0;
  1447. switch(instr->m_opcode)
  1448. {
  1449. case Js::OpCode::LdElemI_A:
  1450. case Js::OpCode::TypeofElem:
  1451. {
  1452. Assert(src1->IsIndirOpnd());
  1453. IR::RegOpnd *indexOpnd = src1->AsIndirOpnd()->GetIndexOpnd();
  1454. if (indexOpnd && CurrentBlockData()->IsArgumentsSymID(indexOpnd->m_sym->m_id))
  1455. {
  1456. // Pathological test cases such as a[arguments]
  1457. CannotAllocateArgumentsObjectOnStack();
  1458. return;
  1459. }
  1460. IR::RegOpnd *baseOpnd = src1->AsIndirOpnd()->GetBaseOpnd();
  1461. id = baseOpnd->m_sym->m_id;
  1462. if (CurrentBlockData()->IsArgumentsSymID(id))
  1463. {
  1464. instr->usesStackArgumentsObject = true;
  1465. }
  1466. break;
  1467. }
  1468. case Js::OpCode::LdLen_A:
  1469. {
  1470. Assert(src1->IsRegOpnd());
  1471. if(CurrentBlockData()->IsArgumentsOpnd(src1))
  1472. {
  1473. instr->usesStackArgumentsObject = true;
  1474. }
  1475. break;
  1476. }
  1477. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  1478. {
  1479. if (CurrentBlockData()->IsArgumentsOpnd(src1))
  1480. {
  1481. instr->usesStackArgumentsObject = true;
  1482. }
  1483. if (CurrentBlockData()->IsArgumentsOpnd(src1) &&
  1484. src1->AsRegOpnd()->m_sym->GetInstrDef()->m_opcode == Js::OpCode::BytecodeArgOutCapture)
  1485. {
  1486. // Apply inlining results in such usage - this is to ignore this sym that is def'd by ByteCodeArgOutCapture
  1487. // It's needed because we do not have block level merging of arguments object and this def due to inlining can turn off stack args opt.
  1488. IR::Instr* builtinStart = instr->GetNextRealInstr();
  1489. if (builtinStart->m_opcode == Js::OpCode::InlineBuiltInStart)
  1490. {
  1491. IR::Opnd* builtinOpnd = builtinStart->GetSrc1();
  1492. if (builtinStart->GetSrc1()->IsAddrOpnd())
  1493. {
  1494. Assert(builtinOpnd->AsAddrOpnd()->m_isFunction);
  1495. Js::BuiltinFunction builtinFunction = Js::JavascriptLibrary::GetBuiltInForFuncInfo(((FixedFieldInfo*)builtinOpnd->AsAddrOpnd()->m_metadata)->GetFuncInfoAddr(), func->GetThreadContextInfo());
  1496. if (builtinFunction == Js::BuiltinFunction::JavascriptFunction_Apply)
  1497. {
  1498. CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
  1499. }
  1500. }
  1501. else if (builtinOpnd->IsRegOpnd())
  1502. {
  1503. if (builtinOpnd->AsRegOpnd()->m_sym->m_builtInIndex == Js::BuiltinFunction::JavascriptFunction_Apply)
  1504. {
  1505. CurrentBlockData()->ClearArgumentsSym(src1->AsRegOpnd());
  1506. }
  1507. }
  1508. }
  1509. }
  1510. break;
  1511. }
  1512. case Js::OpCode::BailOnNotStackArgs:
  1513. case Js::OpCode::ArgOut_A_FromStackArgs:
  1514. case Js::OpCode::BytecodeArgOutUse:
  1515. {
  1516. if (src1 && CurrentBlockData()->IsArgumentsOpnd(src1))
  1517. {
  1518. instr->usesStackArgumentsObject = true;
  1519. }
  1520. break;
  1521. }
  1522. default:
  1523. {
  1524. // Super conservative here, if we see the arguments or any of its alias being used in any
  1525. // other opcode just don't do this optimization. Revisit this to optimize further if we see any common
  1526. // case is missed.
  1527. if (src1)
  1528. {
  1529. if (src1->IsRegOpnd() || src1->IsSymOpnd() || src1->IsIndirOpnd())
  1530. {
  1531. if (CurrentBlockData()->IsArgumentsOpnd(src1))
  1532. {
  1533. #ifdef PERF_HINT
  1534. if (PHASE_TRACE1(Js::PerfHintPhase))
  1535. {
  1536. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  1537. }
  1538. #endif
  1539. CannotAllocateArgumentsObjectOnStack();
  1540. return;
  1541. }
  1542. }
  1543. }
  1544. if (src2)
  1545. {
  1546. if (src2->IsRegOpnd() || src2->IsSymOpnd() || src2->IsIndirOpnd())
  1547. {
  1548. if (CurrentBlockData()->IsArgumentsOpnd(src2))
  1549. {
  1550. #ifdef PERF_HINT
  1551. if (PHASE_TRACE1(Js::PerfHintPhase))
  1552. {
  1553. WritePerfHint(PerfHints::HeapArgumentsCreated, instr->m_func, instr->GetByteCodeOffset());
  1554. }
  1555. #endif
  1556. CannotAllocateArgumentsObjectOnStack();
  1557. return;
  1558. }
  1559. }
  1560. }
  1561. // We should look at dst last to correctly handle cases where it's the same as one of the src operands.
  1562. if (dst)
  1563. {
  1564. if (dst->IsIndirOpnd() || dst->IsSymOpnd())
  1565. {
  1566. if (CurrentBlockData()->IsArgumentsOpnd(dst))
  1567. {
  1568. #ifdef PERF_HINT
  1569. if (PHASE_TRACE1(Js::PerfHintPhase))
  1570. {
  1571. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  1572. }
  1573. #endif
  1574. CannotAllocateArgumentsObjectOnStack();
  1575. return;
  1576. }
  1577. }
  1578. else if (dst->IsRegOpnd())
  1579. {
  1580. if (this->currentBlock->loop && CurrentBlockData()->IsArgumentsOpnd(dst))
  1581. {
  1582. #ifdef PERF_HINT
  1583. if (PHASE_TRACE1(Js::PerfHintPhase))
  1584. {
  1585. WritePerfHint(PerfHints::HeapArgumentsModification, instr->m_func, instr->GetByteCodeOffset());
  1586. }
  1587. #endif
  1588. CannotAllocateArgumentsObjectOnStack();
  1589. return;
  1590. }
  1591. CurrentBlockData()->ClearArgumentsSym(dst->AsRegOpnd());
  1592. }
  1593. }
  1594. }
  1595. break;
  1596. }
  1597. return;
  1598. }
  1599. void
  1600. GlobOpt::MarkArgumentsUsedForBranch(IR::Instr * instr)
  1601. {
  1602. // If it's a conditional branch instruction and the operand used for branching is one of the arguments
  1603. // to the function, tag the m_argUsedForBranch of the functionBody so that it can be used later for inlining decisions.
  1604. if (instr->IsBranchInstr() && !instr->AsBranchInstr()->IsUnconditional())
  1605. {
  1606. IR::BranchInstr * bInstr = instr->AsBranchInstr();
  1607. IR::Opnd *src1 = bInstr->GetSrc1();
  1608. IR::Opnd *src2 = bInstr->GetSrc2();
  1609. // These are used because we don't want to rely on src1 or src2 to always be the register/constant
  1610. IR::RegOpnd *regOpnd = nullptr;
  1611. if (!src2 && (instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A) && src1->IsRegOpnd())
  1612. {
  1613. regOpnd = src1->AsRegOpnd();
  1614. }
  1615. // We need to check for (0===arg) and (arg===0); this is especially important since some minifiers
  1616. // change all instances of one to the other.
  1617. else if (src2 && src2->IsConstOpnd() && src1->IsRegOpnd())
  1618. {
  1619. regOpnd = src1->AsRegOpnd();
  1620. }
  1621. else if (src2 && src2->IsRegOpnd() && src1->IsConstOpnd())
  1622. {
  1623. regOpnd = src2->AsRegOpnd();
  1624. }
  1625. if (regOpnd != nullptr)
  1626. {
  1627. if (regOpnd->m_sym->IsSingleDef())
  1628. {
  1629. IR::Instr * defInst = regOpnd->m_sym->GetInstrDef();
  1630. IR::Opnd *defSym = defInst->GetSrc1();
  1631. if (defSym && defSym->IsSymOpnd() && defSym->AsSymOpnd()->m_sym->IsStackSym()
  1632. && defSym->AsSymOpnd()->m_sym->AsStackSym()->IsParamSlotSym())
  1633. {
  1634. uint16 param = defSym->AsSymOpnd()->m_sym->AsStackSym()->GetParamSlotNum();
  1635. // We only support functions with 13 arguments to ensure optimal size of callSiteInfo
  1636. if (param < Js::Constants::MaximumArgumentCountForConstantArgumentInlining)
  1637. {
  1638. this->func->GetJITOutput()->SetArgUsedForBranch((uint8)param);
  1639. }
  1640. }
  1641. }
  1642. }
  1643. }
  1644. }
  1645. const InductionVariable*
  1646. GlobOpt::GetInductionVariable(SymID sym, Loop *loop)
  1647. {
  1648. if (loop->inductionVariables)
  1649. {
  1650. for (auto it = loop->inductionVariables->GetIterator(); it.IsValid(); it.MoveNext())
  1651. {
  1652. InductionVariable* iv = &it.CurrentValueReference();
  1653. if (!iv->IsChangeDeterminate() || !iv->IsChangeUnidirectional())
  1654. {
  1655. continue;
  1656. }
  1657. if (iv->Sym()->m_id == sym)
  1658. {
  1659. return iv;
  1660. }
  1661. }
  1662. }
  1663. return nullptr;
  1664. }
  1665. bool
  1666. GlobOpt::IsSymIDInductionVariable(SymID sym, Loop *loop)
  1667. {
  1668. return GetInductionVariable(sym, loop) != nullptr;
  1669. }
  1670. SymID
  1671. GlobOpt::GetVarSymID(StackSym *sym)
  1672. {
  1673. if (sym && sym->m_type != TyVar)
  1674. {
  1675. sym = sym->GetVarEquivSym(nullptr);
  1676. }
  1677. if (!sym)
  1678. {
  1679. return Js::Constants::InvalidSymID;
  1680. }
  1681. return sym->m_id;
  1682. }
  1683. bool
  1684. GlobOpt::IsAllowedForMemOpt(IR::Instr* instr, bool isMemset, IR::RegOpnd *baseOpnd, IR::Opnd *indexOpnd)
  1685. {
  1686. Assert(instr);
  1687. if (!baseOpnd || !indexOpnd)
  1688. {
  1689. return false;
  1690. }
  1691. Loop* loop = this->currentBlock->loop;
  1692. const ValueType baseValueType(baseOpnd->GetValueType());
  1693. const ValueType indexValueType(indexOpnd->GetValueType());
  1694. // Validate the array and index types
  1695. if (
  1696. !indexValueType.IsInt() ||
  1697. !(
  1698. baseValueType.IsTypedIntOrFloatArray() ||
  1699. baseValueType.IsArray()
  1700. )
  1701. )
  1702. {
  1703. #if DBG_DUMP
  1704. wchar indexValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  1705. indexValueType.ToString(indexValueTypeStr);
  1706. wchar baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  1707. baseValueType.ToString(baseValueTypeStr);
  1708. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index[%s] or Array[%s] value type is invalid"), indexValueTypeStr, baseValueTypeStr);
  1709. #endif
  1710. return false;
  1711. }
  1712. // The following is conservative and works around a bug in induction variable analysis.
  1713. if (baseOpnd->IsArrayRegOpnd())
  1714. {
  1715. IR::ArrayRegOpnd *baseArrayOp = baseOpnd->AsArrayRegOpnd();
  1716. bool hasBoundChecksRemoved = (
  1717. baseArrayOp->EliminatedLowerBoundCheck() &&
  1718. baseArrayOp->EliminatedUpperBoundCheck() &&
  1719. !instr->extractedUpperBoundCheckWithoutHoisting &&
  1720. !instr->loadedArrayHeadSegment &&
  1721. !instr->loadedArrayHeadSegmentLength
  1722. );
  1723. if (!hasBoundChecksRemoved)
  1724. {
  1725. TRACE_MEMOP_VERBOSE(loop, instr, _u("Missing bounds check optimization"));
  1726. return false;
  1727. }
  1728. }
  1729. if (!baseValueType.IsTypedArray())
  1730. {
  1731. // Check if the instr can kill the value type of the array
  1732. JsArrayKills arrayKills = CheckJsArrayKills(instr);
  1733. if (arrayKills.KillsValueType(baseValueType))
  1734. {
  1735. TRACE_MEMOP_VERBOSE(loop, instr, _u("The array (s%d) can lose its value type"), GetVarSymID(baseOpnd->GetStackSym()));
  1736. return false;
  1737. }
  1738. }
  1739. // Process the Index Operand
  1740. if (!this->OptIsInvariant(baseOpnd, this->currentBlock, loop, CurrentBlockData()->FindValue(baseOpnd->m_sym), false, true))
  1741. {
  1742. TRACE_MEMOP_VERBOSE(loop, instr, _u("Base (s%d) is not invariant"), GetVarSymID(baseOpnd->GetStackSym()));
  1743. return false;
  1744. }
  1745. // Validate the index
  1746. Assert(indexOpnd->GetStackSym());
  1747. SymID indexSymID = GetVarSymID(indexOpnd->GetStackSym());
  1748. const InductionVariable* iv = GetInductionVariable(indexSymID, loop);
  1749. if (!iv)
  1750. {
  1751. // If the index is not an induction variable return
  1752. TRACE_MEMOP_VERBOSE(loop, instr, _u("Index (s%d) is not an induction variable"), indexSymID);
  1753. return false;
  1754. }
  1755. Assert(iv->IsChangeDeterminate() && iv->IsChangeUnidirectional());
  1756. const IntConstantBounds & bounds = iv->ChangeBounds();
  1757. if (loop->memOpInfo)
  1758. {
  1759. // Only accept induction variables that increments by 1
  1760. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  1761. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(indexSymID, inductionVariableChangeInfo);
  1762. if (
  1763. (bounds.LowerBound() != 1 && bounds.LowerBound() != -1) ||
  1764. (bounds.UpperBound() != bounds.LowerBound()) ||
  1765. inductionVariableChangeInfo.unroll > 1 // Must be 0 (not seen yet) or 1 (already seen)
  1766. )
  1767. {
  1768. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index does not change by 1: %d><%d, unroll=%d"), bounds.LowerBound(), bounds.UpperBound(), inductionVariableChangeInfo.unroll);
  1769. return false;
  1770. }
  1771. // Check if the index is the same in all MemOp optimization in this loop
  1772. if (!loop->memOpInfo->candidates->Empty())
  1773. {
  1774. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  1775. // All MemOp operations within the same loop must use the same index
  1776. if (previousCandidate->index != indexSymID)
  1777. {
  1778. TRACE_MEMOP_VERBOSE(loop, instr, _u("The index is not the same as other MemOp in the loop"));
  1779. return false;
  1780. }
  1781. }
  1782. }
  1783. return true;
  1784. }
  1785. bool
  1786. GlobOpt::CollectMemcopyLdElementI(IR::Instr *instr, Loop *loop)
  1787. {
  1788. Assert(instr->GetSrc1()->IsIndirOpnd());
  1789. IR::IndirOpnd *src1 = instr->GetSrc1()->AsIndirOpnd();
  1790. IR::Opnd *indexOpnd = src1->GetIndexOpnd();
  1791. IR::RegOpnd *baseOpnd = src1->GetBaseOpnd()->AsRegOpnd();
  1792. SymID baseSymID = GetVarSymID(baseOpnd->GetStackSym());
  1793. if (!IsAllowedForMemOpt(instr, false, baseOpnd, indexOpnd))
  1794. {
  1795. return false;
  1796. }
  1797. SymID inductionSymID = GetVarSymID(indexOpnd->GetStackSym());
  1798. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1799. loop->EnsureMemOpVariablesInitialized();
  1800. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1801. IR::Opnd * dst = instr->GetDst();
  1802. if (!dst->IsRegOpnd() || !dst->AsRegOpnd()->GetStackSym()->IsSingleDef())
  1803. {
  1804. return false;
  1805. }
  1806. Loop::MemCopyCandidate* memcopyInfo = memcopyInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemCopyCandidate);
  1807. memcopyInfo->ldBase = baseSymID;
  1808. memcopyInfo->ldCount = 1;
  1809. memcopyInfo->count = 0;
  1810. memcopyInfo->bIndexAlreadyChanged = isIndexPreIncr;
  1811. memcopyInfo->base = Js::Constants::InvalidSymID; //need to find the stElem first
  1812. memcopyInfo->index = inductionSymID;
  1813. memcopyInfo->transferSym = dst->AsRegOpnd()->GetStackSym();
  1814. loop->memOpInfo->candidates->Prepend(memcopyInfo);
  1815. return true;
  1816. }
  1817. bool
  1818. GlobOpt::CollectMemsetStElementI(IR::Instr *instr, Loop *loop)
  1819. {
  1820. Assert(instr->GetDst()->IsIndirOpnd());
  1821. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  1822. IR::Opnd *indexOp = dst->GetIndexOpnd();
  1823. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  1824. if (!IsAllowedForMemOpt(instr, true, baseOp, indexOp))
  1825. {
  1826. return false;
  1827. }
  1828. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  1829. IR::Opnd *srcDef = instr->GetSrc1();
  1830. StackSym *srcSym = nullptr;
  1831. if (srcDef->IsRegOpnd())
  1832. {
  1833. IR::RegOpnd* opnd = srcDef->AsRegOpnd();
  1834. if (this->OptIsInvariant(opnd, this->currentBlock, loop, CurrentBlockData()->FindValue(opnd->m_sym), true, true))
  1835. {
  1836. srcSym = opnd->GetStackSym();
  1837. }
  1838. }
  1839. BailoutConstantValue constant = {TyIllegal, 0};
  1840. if (srcDef->IsFloatConstOpnd())
  1841. {
  1842. constant.InitFloatConstValue(srcDef->AsFloatConstOpnd()->m_value);
  1843. }
  1844. else if (srcDef->IsIntConstOpnd())
  1845. {
  1846. constant.InitIntConstValue(srcDef->AsIntConstOpnd()->GetValue(), srcDef->AsIntConstOpnd()->GetType());
  1847. }
  1848. else if (srcDef->IsAddrOpnd())
  1849. {
  1850. constant.InitVarConstValue(srcDef->AsAddrOpnd()->m_address);
  1851. }
  1852. else if(!srcSym)
  1853. {
  1854. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Source is not an invariant"));
  1855. return false;
  1856. }
  1857. // Process the Index Operand
  1858. Assert(indexOp->GetStackSym());
  1859. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  1860. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1861. loop->EnsureMemOpVariablesInitialized();
  1862. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1863. Loop::MemSetCandidate* memsetInfo = JitAnewStruct(this->func->GetTopFunc()->m_fg->alloc, Loop::MemSetCandidate);
  1864. memsetInfo->base = baseSymID;
  1865. memsetInfo->index = inductionSymID;
  1866. memsetInfo->constant = constant;
  1867. memsetInfo->srcSym = srcSym;
  1868. memsetInfo->count = 1;
  1869. memsetInfo->bIndexAlreadyChanged = isIndexPreIncr;
  1870. loop->memOpInfo->candidates->Prepend(memsetInfo);
  1871. return true;
  1872. }
  1873. bool GlobOpt::CollectMemcopyStElementI(IR::Instr *instr, Loop *loop)
  1874. {
  1875. if (!loop->memOpInfo || loop->memOpInfo->candidates->Empty())
  1876. {
  1877. // There is no ldElem matching this stElem
  1878. return false;
  1879. }
  1880. Assert(instr->GetDst()->IsIndirOpnd());
  1881. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  1882. IR::Opnd *indexOp = dst->GetIndexOpnd();
  1883. IR::RegOpnd *baseOp = dst->GetBaseOpnd()->AsRegOpnd();
  1884. SymID baseSymID = GetVarSymID(baseOp->GetStackSym());
  1885. if (!instr->GetSrc1()->IsRegOpnd())
  1886. {
  1887. return false;
  1888. }
  1889. IR::RegOpnd* src1 = instr->GetSrc1()->AsRegOpnd();
  1890. if (!src1->GetIsDead())
  1891. {
  1892. // This must be the last use of the register.
  1893. // It will invalidate `var m = a[i]; b[i] = m;` but this is not a very interesting case.
  1894. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Source (s%d) is still alive after StElemI"), baseSymID);
  1895. return false;
  1896. }
  1897. if (!IsAllowedForMemOpt(instr, false, baseOp, indexOp))
  1898. {
  1899. return false;
  1900. }
  1901. SymID srcSymID = GetVarSymID(src1->GetStackSym());
  1902. // Prepare the memcopyCandidate entry
  1903. Loop::MemOpCandidate* previousCandidate = loop->memOpInfo->candidates->Head();
  1904. if (!previousCandidate->IsMemCopy())
  1905. {
  1906. return false;
  1907. }
  1908. Loop::MemCopyCandidate* memcopyInfo = previousCandidate->AsMemCopy();
  1909. // The previous candidate has to have been created by the matching ldElem
  1910. if (
  1911. memcopyInfo->base != Js::Constants::InvalidSymID ||
  1912. GetVarSymID(memcopyInfo->transferSym) != srcSymID
  1913. )
  1914. {
  1915. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("No matching LdElem found (s%d)"), baseSymID);
  1916. return false;
  1917. }
  1918. Assert(indexOp->GetStackSym());
  1919. SymID inductionSymID = GetVarSymID(indexOp->GetStackSym());
  1920. Assert(IsSymIDInductionVariable(inductionSymID, loop));
  1921. bool isIndexPreIncr = loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID);
  1922. if (isIndexPreIncr != memcopyInfo->bIndexAlreadyChanged)
  1923. {
  1924. // The index changed between the load and the store
  1925. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Index value changed between ldElem and stElem"));
  1926. return false;
  1927. }
  1928. // Consider: Can we remove the count field?
  1929. memcopyInfo->count++;
  1930. memcopyInfo->base = baseSymID;
  1931. return true;
  1932. }
  1933. bool
  1934. GlobOpt::CollectMemOpLdElementI(IR::Instr *instr, Loop *loop)
  1935. {
  1936. Assert(instr->m_opcode == Js::OpCode::LdElemI_A);
  1937. return (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyLdElementI(instr, loop));
  1938. }
  1939. bool
  1940. GlobOpt::CollectMemOpStElementI(IR::Instr *instr, Loop *loop)
  1941. {
  1942. Assert(instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict);
  1943. Assert(instr->GetSrc1());
  1944. return (!PHASE_OFF(Js::MemSetPhase, this->func) && CollectMemsetStElementI(instr, loop)) ||
  1945. (!PHASE_OFF(Js::MemCopyPhase, this->func) && CollectMemcopyStElementI(instr, loop));
  1946. }
  1947. bool
  1948. GlobOpt::CollectMemOpInfo(IR::Instr *instrBegin, IR::Instr *instr, Value *src1Val, Value *src2Val)
  1949. {
  1950. Assert(this->currentBlock->loop);
  1951. Loop *loop = this->currentBlock->loop;
  1952. if (!loop->blockList.HasTwo())
  1953. {
  1954. // We support memcopy and memset for loops which have only two blocks.
  1955. return false;
  1956. }
  1957. if (loop->GetLoopFlags().isInterpreted && !loop->GetLoopFlags().memopMinCountReached)
  1958. {
  1959. TRACE_MEMOP_VERBOSE(loop, instr, _u("minimum loop count not reached"))
  1960. loop->doMemOp = false;
  1961. return false;
  1962. }
  1963. Assert(loop->doMemOp);
  1964. bool isIncr = true, isChangedByOne = false;
  1965. switch (instr->m_opcode)
  1966. {
  1967. case Js::OpCode::StElemI_A:
  1968. case Js::OpCode::StElemI_A_Strict:
  1969. if (!CollectMemOpStElementI(instr, loop))
  1970. {
  1971. loop->doMemOp = false;
  1972. return false;
  1973. }
  1974. break;
  1975. case Js::OpCode::LdElemI_A:
  1976. if (!CollectMemOpLdElementI(instr, loop))
  1977. {
  1978. loop->doMemOp = false;
  1979. return false;
  1980. }
  1981. break;
  1982. case Js::OpCode::Decr_A:
  1983. isIncr = false;
  1984. case Js::OpCode::Incr_A:
  1985. isChangedByOne = true;
  1986. goto MemOpCheckInductionVariable;
  1987. case Js::OpCode::Sub_I4:
  1988. case Js::OpCode::Sub_A:
  1989. isIncr = false;
  1990. case Js::OpCode::Add_A:
  1991. case Js::OpCode::Add_I4:
  1992. {
  1993. MemOpCheckInductionVariable:
  1994. StackSym *sym = instr->GetSrc1()->GetStackSym();
  1995. if (!sym)
  1996. {
  1997. sym = instr->GetSrc2()->GetStackSym();
  1998. }
  1999. SymID inductionSymID = GetVarSymID(sym);
  2000. if (IsSymIDInductionVariable(inductionSymID, this->currentBlock->loop))
  2001. {
  2002. if (!isChangedByOne)
  2003. {
  2004. IR::Opnd *src1, *src2;
  2005. src1 = instr->GetSrc1();
  2006. src2 = instr->GetSrc2();
  2007. if (src2->IsRegOpnd())
  2008. {
  2009. Value *val = CurrentBlockData()->FindValue(src2->AsRegOpnd()->m_sym);
  2010. if (val)
  2011. {
  2012. ValueInfo *vi = val->GetValueInfo();
  2013. int constValue;
  2014. if (vi && vi->TryGetIntConstantValue(&constValue))
  2015. {
  2016. if (constValue == 1)
  2017. {
  2018. isChangedByOne = true;
  2019. }
  2020. }
  2021. }
  2022. }
  2023. else if (src2->IsIntConstOpnd())
  2024. {
  2025. if (src2->AsIntConstOpnd()->GetValue() == 1)
  2026. {
  2027. isChangedByOne = true;
  2028. }
  2029. }
  2030. }
  2031. loop->EnsureMemOpVariablesInitialized();
  2032. if (!isChangedByOne)
  2033. {
  2034. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { Js::Constants::InvalidLoopUnrollFactor, 0 };
  2035. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  2036. {
  2037. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  2038. }
  2039. else
  2040. {
  2041. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  2042. }
  2043. }
  2044. else
  2045. {
  2046. if (!loop->memOpInfo->inductionVariableChangeInfoMap->ContainsKey(inductionSymID))
  2047. {
  2048. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 1, isIncr };
  2049. loop->memOpInfo->inductionVariableChangeInfoMap->Add(inductionSymID, inductionVariableChangeInfo);
  2050. }
  2051. else
  2052. {
  2053. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  2054. inductionVariableChangeInfo = loop->memOpInfo->inductionVariableChangeInfoMap->Lookup(inductionSymID, inductionVariableChangeInfo);
  2055. inductionVariableChangeInfo.unroll++;
  2056. inductionVariableChangeInfo.isIncremental = isIncr;
  2057. loop->memOpInfo->inductionVariableChangeInfoMap->Item(inductionSymID, inductionVariableChangeInfo);
  2058. }
  2059. }
  2060. break;
  2061. }
  2062. // Fallthrough if not an induction variable
  2063. }
  2064. default:
  2065. FOREACH_INSTR_IN_RANGE(chkInstr, instrBegin->m_next, instr)
  2066. {
  2067. if (IsInstrInvalidForMemOp(chkInstr, loop, src1Val, src2Val))
  2068. {
  2069. loop->doMemOp = false;
  2070. return false;
  2071. }
  2072. // Make sure this instruction doesn't use the memcopy transfer sym before it is checked by StElemI
  2073. if (loop->memOpInfo && !loop->memOpInfo->candidates->Empty())
  2074. {
  2075. Loop::MemOpCandidate* prevCandidate = loop->memOpInfo->candidates->Head();
  2076. if (prevCandidate->IsMemCopy())
  2077. {
  2078. Loop::MemCopyCandidate* memcopyCandidate = prevCandidate->AsMemCopy();
  2079. if (memcopyCandidate->base == Js::Constants::InvalidSymID)
  2080. {
  2081. if (chkInstr->HasSymUse(memcopyCandidate->transferSym))
  2082. {
  2083. loop->doMemOp = false;
  2084. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, chkInstr, _u("Found illegal use of LdElemI value(s%d)"), GetVarSymID(memcopyCandidate->transferSym));
  2085. return false;
  2086. }
  2087. }
  2088. }
  2089. }
  2090. }
  2091. NEXT_INSTR_IN_RANGE;
  2092. }
  2093. return true;
  2094. }
  2095. bool
  2096. GlobOpt::IsInstrInvalidForMemOp(IR::Instr *instr, Loop *loop, Value *src1Val, Value *src2Val)
  2097. {
  2098. // List of instruction that are valid with memop (ie: instr that gets removed if memop is emitted)
  2099. if (
  2100. this->currentBlock != loop->GetHeadBlock() &&
  2101. !instr->IsLabelInstr() &&
  2102. instr->IsRealInstr() &&
  2103. instr->m_opcode != Js::OpCode::IncrLoopBodyCount &&
  2104. instr->m_opcode != Js::OpCode::StLoopBodyCount &&
  2105. instr->m_opcode != Js::OpCode::Ld_A &&
  2106. instr->m_opcode != Js::OpCode::Ld_I4 &&
  2107. !(instr->IsBranchInstr() && instr->AsBranchInstr()->IsUnconditional())
  2108. )
  2109. {
  2110. TRACE_MEMOP_VERBOSE(loop, instr, _u("Instruction not accepted for memop"));
  2111. return true;
  2112. }
  2113. // Check prev instr because it could have been added by an optimization and we won't see it here.
  2114. if (OpCodeAttr::FastFldInstr(instr->m_opcode) || (instr->m_prev && OpCodeAttr::FastFldInstr(instr->m_prev->m_opcode)))
  2115. {
  2116. // Refuse any operations interacting with Fields
  2117. TRACE_MEMOP_VERBOSE(loop, instr, _u("Field interaction detected"));
  2118. return true;
  2119. }
  2120. if (Js::OpCodeUtil::GetOpCodeLayout(instr->m_opcode) == Js::OpLayoutType::ElementSlot)
  2121. {
  2122. // Refuse any operations interacting with slots
  2123. TRACE_MEMOP_VERBOSE(loop, instr, _u("Slot interaction detected"));
  2124. return true;
  2125. }
  2126. if (this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val))
  2127. {
  2128. TRACE_MEMOP_VERBOSE(loop, instr, _u("Implicit call bailout detected"));
  2129. return true;
  2130. }
  2131. return false;
  2132. }
  2133. void
  2134. GlobOpt::TryReplaceLdLen(IR::Instr *& instr)
  2135. {
  2136. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  2137. if ((instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->m_opcode == Js::OpCode::ProfiledLdFld) || instr->m_opcode == Js::OpCode::LdFld || instr->m_opcode == Js::OpCode::ScopedLdFld)
  2138. {
  2139. IR::SymOpnd * opnd = instr->GetSrc1()->AsSymOpnd();
  2140. Sym *sym = opnd->m_sym;
  2141. if (sym->IsPropertySym())
  2142. {
  2143. PropertySym *originalPropertySym = sym->AsPropertySym();
  2144. // only on .length
  2145. if (this->lengthEquivBv != nullptr && this->lengthEquivBv->Test(originalPropertySym->m_id))
  2146. {
  2147. IR::RegOpnd* newopnd = IR::RegOpnd::New(originalPropertySym->m_stackSym, IRType::TyVar, instr->m_func);
  2148. ValueInfo *const objectValueInfo = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym)->GetValueInfo();
  2149. // Only for things we'd emit a fast path for
  2150. if (
  2151. objectValueInfo->IsLikelyAnyArray() ||
  2152. objectValueInfo->HasHadStringTag() ||
  2153. objectValueInfo->IsLikelyString() ||
  2154. newopnd->IsArgumentsObject() ||
  2155. (CurrentBlockData()->argObjSyms && CurrentBlockData()->IsArgumentsOpnd(newopnd))
  2156. )
  2157. {
  2158. // We need to properly transfer over the information from the old operand, which is
  2159. // a SymOpnd, to the new one, which is a RegOpnd. Unfortunately, the types mean the
  2160. // normal copy methods won't work here, so we're going to directly copy data.
  2161. newopnd->SetIsJITOptimizedReg(opnd->GetIsJITOptimizedReg());
  2162. newopnd->SetValueType(objectValueInfo->Type());
  2163. newopnd->SetIsDead(opnd->GetIsDead());
  2164. // Now that we have the operand we need, we can go ahead and make the new instr.
  2165. IR::Instr *newinstr = IR::Instr::New(Js::OpCode::LdLen_A, instr->m_func);
  2166. instr->TransferTo(newinstr);
  2167. newinstr->UnlinkSrc1();
  2168. newinstr->SetSrc1(newopnd);
  2169. instr->InsertAfter(newinstr);
  2170. instr->Remove();
  2171. instr = newinstr;
  2172. }
  2173. }
  2174. }
  2175. }
  2176. }
  2177. IR::Instr *
  2178. GlobOpt::OptInstr(IR::Instr *&instr, bool* isInstrRemoved)
  2179. {
  2180. Assert(instr->m_func->IsTopFunc() || instr->m_func->isGetterSetter || instr->m_func->callSiteIdInParentFunc != UINT16_MAX);
  2181. IR::Opnd *src1, *src2;
  2182. Value *src1Val = nullptr, *src2Val = nullptr, *dstVal = nullptr;
  2183. Value *src1IndirIndexVal = nullptr, *dstIndirIndexVal = nullptr;
  2184. IR::Instr *instrPrev = instr->m_prev;
  2185. IR::Instr *instrNext = instr->m_next;
  2186. if (instr->IsLabelInstr() && this->func->HasTry() && this->func->DoOptimizeTry())
  2187. {
  2188. this->currentRegion = instr->AsLabelInstr()->GetRegion();
  2189. Assert(this->currentRegion);
  2190. }
  2191. if(PrepareForIgnoringIntOverflow(instr))
  2192. {
  2193. if(!IsLoopPrePass())
  2194. {
  2195. *isInstrRemoved = true;
  2196. currentBlock->RemoveInstr(instr);
  2197. }
  2198. return instrNext;
  2199. }
  2200. if (!instr->IsRealInstr() || instr->IsByteCodeUsesInstr() || instr->m_opcode == Js::OpCode::Conv_Bool)
  2201. {
  2202. return instrNext;
  2203. }
  2204. if (instr->m_opcode == Js::OpCode::Yield)
  2205. {
  2206. // TODO[generators][ianhall]: Can this and the FillBailOutInfo call below be moved to after Src1 and Src2 so that Yield can be optimized right up to the actual yield?
  2207. CurrentBlockData()->KillStateForGeneratorYield();
  2208. }
  2209. // Change LdFld on arrays, strings, and 'arguments' to LdLen when we're accessing the .length field
  2210. this->TryReplaceLdLen(instr);
  2211. // Consider: Do we ever get post-op bailout here, and if so is the FillBailOutInfo call in the right place?
  2212. if (instr->HasBailOutInfo() && !this->IsLoopPrePass())
  2213. {
  2214. this->FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  2215. }
  2216. this->instrCountSinceLastCleanUp++;
  2217. instr = this->PreOptPeep(instr);
  2218. this->OptArguments(instr);
  2219. //StackArguments Optimization - We bail out if the index is out of range of actuals.
  2220. if ((instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem) &&
  2221. instr->DoStackArgsOpt(this->func) && !this->IsLoopPrePass())
  2222. {
  2223. GenerateBailAtOperation(&instr, IR::BailOnStackArgsOutOfActualsRange);
  2224. }
  2225. #if DBG
  2226. PropertySym *propertySymUseBefore = nullptr;
  2227. Assert(this->byteCodeUses == nullptr);
  2228. this->byteCodeUsesBeforeOpt->ClearAll();
  2229. GlobOpt::TrackByteCodeSymUsed(instr, this->byteCodeUsesBeforeOpt, &propertySymUseBefore);
  2230. Assert(noImplicitCallUsesToInsert->Count() == 0);
  2231. #endif
  2232. this->ignoredIntOverflowForCurrentInstr = false;
  2233. this->ignoredNegativeZeroForCurrentInstr = false;
  2234. src1 = instr->GetSrc1();
  2235. src2 = instr->GetSrc2();
  2236. if (src1)
  2237. {
  2238. src1Val = this->OptSrc(src1, &instr, &src1IndirIndexVal);
  2239. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  2240. if (src2)
  2241. {
  2242. src2Val = this->OptSrc(src2, &instr);
  2243. }
  2244. }
  2245. if(instr->GetDst() && instr->GetDst()->IsIndirOpnd())
  2246. {
  2247. this->OptSrc(instr->GetDst(), &instr, &dstIndirIndexVal);
  2248. }
  2249. MarkArgumentsUsedForBranch(instr);
  2250. CSEOptimize(this->currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal);
  2251. OptimizeChecks(instr);
  2252. OptArraySrc(&instr);
  2253. OptNewScObject(&instr, src1Val);
  2254. instr = this->OptPeep(instr, src1Val, src2Val);
  2255. if (instr->m_opcode == Js::OpCode::Nop ||
  2256. (instr->m_opcode == Js::OpCode::CheckThis &&
  2257. instr->GetSrc1()->IsRegOpnd() &&
  2258. instr->GetSrc1()->AsRegOpnd()->m_sym->m_isSafeThis))
  2259. {
  2260. instrNext = instr->m_next;
  2261. InsertNoImplicitCallUses(instr);
  2262. if (this->byteCodeUses)
  2263. {
  2264. this->InsertByteCodeUses(instr);
  2265. }
  2266. *isInstrRemoved = true;
  2267. this->currentBlock->RemoveInstr(instr);
  2268. return instrNext;
  2269. }
  2270. else if (instr->m_opcode == Js::OpCode::GetNewScObject && !this->IsLoopPrePass() && src1Val->GetValueInfo()->IsPrimitive())
  2271. {
  2272. // Constructor returned (src1) a primitive value, so fold this into "dst = Ld_A src2", where src2 is the new object that
  2273. // was passed into the constructor as its 'this' parameter
  2274. instr->FreeSrc1();
  2275. instr->SetSrc1(instr->UnlinkSrc2());
  2276. instr->m_opcode = Js::OpCode::Ld_A;
  2277. src1Val = src2Val;
  2278. src2Val = nullptr;
  2279. }
  2280. else if ((instr->m_opcode == Js::OpCode::TryCatch && this->func->DoOptimizeTry()) || (instr->m_opcode == Js::OpCode::TryFinally && this->func->DoOptimizeTry()))
  2281. {
  2282. ProcessTryHandler(instr);
  2283. }
  2284. else if (instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException)
  2285. {
  2286. if (this->ProcessExceptionHandlingEdges(instr))
  2287. {
  2288. *isInstrRemoved = true;
  2289. return instrNext;
  2290. }
  2291. }
  2292. bool isAlreadyTypeSpecialized = false;
  2293. if (!IsLoopPrePass() && instr->HasBailOutInfo())
  2294. {
  2295. if (instr->GetBailOutKind() == IR::BailOutExpectingInteger)
  2296. {
  2297. isAlreadyTypeSpecialized = TypeSpecializeBailoutExpectedInteger(instr, src1Val, &dstVal);
  2298. }
  2299. else if (instr->GetBailOutKind() == IR::BailOutExpectingString)
  2300. {
  2301. if (instr->GetSrc1()->IsRegOpnd())
  2302. {
  2303. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyString())
  2304. {
  2305. // Disable SwitchOpt if the source is definitely not a string - This may be realized only in Globopt
  2306. Assert(IsSwitchOptEnabled());
  2307. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  2308. }
  2309. }
  2310. }
  2311. }
  2312. bool forceInvariantHoisting = false;
  2313. const bool ignoreIntOverflowInRangeForInstr = instr->ignoreIntOverflowInRange; // Save it since the instr can change
  2314. if (!isAlreadyTypeSpecialized)
  2315. {
  2316. bool redoTypeSpec;
  2317. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  2318. if(redoTypeSpec && instr->m_opcode != Js::OpCode::Nop)
  2319. {
  2320. forceInvariantHoisting = false;
  2321. instr = this->TypeSpecialization(instr, &src1Val, &src2Val, &dstVal, &redoTypeSpec, &forceInvariantHoisting);
  2322. Assert(!redoTypeSpec);
  2323. }
  2324. if (instr->m_opcode == Js::OpCode::Nop)
  2325. {
  2326. InsertNoImplicitCallUses(instr);
  2327. if (this->byteCodeUses)
  2328. {
  2329. this->InsertByteCodeUses(instr);
  2330. }
  2331. instrNext = instr->m_next;
  2332. *isInstrRemoved = true;
  2333. this->currentBlock->RemoveInstr(instr);
  2334. return instrNext;
  2335. }
  2336. }
  2337. if (ignoreIntOverflowInRangeForInstr)
  2338. {
  2339. VerifyIntSpecForIgnoringIntOverflow(instr);
  2340. }
  2341. // Track calls after any pre-op bailouts have been inserted before the call, because they will need to restore out params.
  2342. this->TrackCalls(instr);
  2343. if (instr->GetSrc1())
  2344. {
  2345. this->UpdateObjPtrValueType(instr->GetSrc1(), instr);
  2346. }
  2347. IR::Opnd *dst = instr->GetDst();
  2348. if (dst)
  2349. {
  2350. // Copy prop dst uses and mark live/available type syms before tracking kills.
  2351. CopyPropDstUses(dst, instr, src1Val);
  2352. }
  2353. // Track mark temp object before we process the dst so we can generate pre-op bailout
  2354. instr = this->TrackMarkTempObject(instrPrev->m_next, instr);
  2355. bool removed = OptTagChecks(instr);
  2356. if (removed)
  2357. {
  2358. *isInstrRemoved = true;
  2359. return instrNext;
  2360. }
  2361. dstVal = this->OptDst(&instr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  2362. dst = instr->GetDst();
  2363. instrNext = instr->m_next;
  2364. if (dst)
  2365. {
  2366. if (this->func->HasTry() && this->func->DoOptimizeTry())
  2367. {
  2368. this->InsertToVarAtDefInTryRegion(instr, dst);
  2369. }
  2370. instr = this->SetTypeCheckBailOut(dst, instr, nullptr);
  2371. this->UpdateObjPtrValueType(dst, instr);
  2372. }
  2373. BVSparse<JitArenaAllocator> instrByteCodeStackSymUsedAfter(this->alloc);
  2374. PropertySym *propertySymUseAfter = nullptr;
  2375. if (this->byteCodeUses != nullptr)
  2376. {
  2377. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  2378. }
  2379. #if DBG
  2380. else
  2381. {
  2382. GlobOpt::TrackByteCodeSymUsed(instr, &instrByteCodeStackSymUsedAfter, &propertySymUseAfter);
  2383. instrByteCodeStackSymUsedAfter.Equal(this->byteCodeUsesBeforeOpt);
  2384. Assert(propertySymUseAfter == propertySymUseBefore);
  2385. }
  2386. #endif
  2387. bool isHoisted = false;
  2388. if (this->currentBlock->loop && !this->IsLoopPrePass())
  2389. {
  2390. isHoisted = this->TryHoistInvariant(instr, this->currentBlock, dstVal, src1Val, src2Val, true, false, forceInvariantHoisting);
  2391. }
  2392. src1 = instr->GetSrc1();
  2393. if (!this->IsLoopPrePass() && src1)
  2394. {
  2395. // instr const, nonConst => canonicalize by swapping operands
  2396. // This simplifies lowering. (somewhat machine dependent)
  2397. // Note that because of Var overflows, src1 may not have been constant prop'd to an IntConst
  2398. this->PreLowerCanonicalize(instr, &src1Val, &src2Val);
  2399. }
  2400. if (!PHASE_OFF(Js::MemOpPhase, this->func) &&
  2401. !isHoisted &&
  2402. !(instr->IsJitProfilingInstr()) &&
  2403. this->currentBlock->loop && !IsLoopPrePass() &&
  2404. !func->IsJitInDebugMode() &&
  2405. (func->HasProfileInfo() && !func->GetReadOnlyProfileInfo()->IsMemOpDisabled()) &&
  2406. this->currentBlock->loop->doMemOp)
  2407. {
  2408. CollectMemOpInfo(instrPrev, instr, src1Val, src2Val);
  2409. }
  2410. InsertNoImplicitCallUses(instr);
  2411. if (this->byteCodeUses != nullptr)
  2412. {
  2413. // Optimization removed some uses from the instruction.
  2414. // Need to insert fake uses so we can get the correct live register to restore in bailout.
  2415. this->byteCodeUses->Minus(&instrByteCodeStackSymUsedAfter);
  2416. if (this->propertySymUse == propertySymUseAfter)
  2417. {
  2418. this->propertySymUse = nullptr;
  2419. }
  2420. this->InsertByteCodeUses(instr);
  2421. }
  2422. if (!this->IsLoopPrePass() && !isHoisted && this->IsImplicitCallBailOutCurrentlyNeeded(instr, src1Val, src2Val))
  2423. {
  2424. IR::BailOutKind kind = IR::BailOutOnImplicitCalls;
  2425. if(instr->HasBailOutInfo())
  2426. {
  2427. Assert(instr->GetBailOutInfo()->bailOutOffset == instr->GetByteCodeOffset());
  2428. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  2429. if((bailOutKind & ~IR::BailOutKindBits) != IR::BailOutOnImplicitCallsPreOp)
  2430. {
  2431. Assert(!(bailOutKind & ~IR::BailOutKindBits));
  2432. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  2433. }
  2434. }
  2435. else if (instr->forcePreOpBailOutIfNeeded || this->isRecursiveCallOnLandingPad)
  2436. {
  2437. // We can't have a byte code reg slot as dst to generate a
  2438. // pre-op implicit call after we have processed the dst.
  2439. // Consider: This might miss an opportunity to use a copy prop sym to restore
  2440. // some other byte code reg if the dst is that copy prop that we already killed.
  2441. Assert(!instr->GetDst()
  2442. || !instr->GetDst()->IsRegOpnd()
  2443. || instr->GetDst()->AsRegOpnd()->GetIsJITOptimizedReg()
  2444. || !instr->GetDst()->AsRegOpnd()->m_sym->HasByteCodeRegSlot());
  2445. this->GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  2446. }
  2447. else
  2448. {
  2449. // Capture value of the bailout after the operation is done.
  2450. this->GenerateBailAfterOperation(&instr, kind);
  2451. }
  2452. }
  2453. if (!isHoisted && instr->HasBailOutInfo() && !this->IsLoopPrePass())
  2454. {
  2455. GlobOptBlockData * globOptData = CurrentBlockData();
  2456. globOptData->changedSyms->ClearAll();
  2457. if (!this->changedSymsAfterIncBailoutCandidate->IsEmpty())
  2458. {
  2459. //
  2460. // some symbols are changed after the values for current bailout have been
  2461. // captured (GlobOpt::CapturedValues), need to restore such symbols as changed
  2462. // for following incremental bailout construction, or we will miss capturing
  2463. // values for later bailout
  2464. //
  2465. // swap changedSyms and changedSymsAfterIncBailoutCandidate
  2466. // because both are from this->alloc
  2467. BVSparse<JitArenaAllocator> * tempBvSwap = globOptData->changedSyms;
  2468. globOptData->changedSyms = this->changedSymsAfterIncBailoutCandidate;
  2469. this->changedSymsAfterIncBailoutCandidate = tempBvSwap;
  2470. }
  2471. globOptData->capturedValues = globOptData->capturedValuesCandidate;
  2472. // null out capturedValuesCandicate to stop tracking symbols change for it
  2473. globOptData->capturedValuesCandidate = nullptr;
  2474. }
  2475. return instrNext;
  2476. }
  2477. bool
  2478. GlobOpt::OptTagChecks(IR::Instr *instr)
  2479. {
  2480. if (PHASE_OFF(Js::OptTagChecksPhase, this->func) || !this->DoTagChecks())
  2481. {
  2482. return false;
  2483. }
  2484. StackSym *stackSym = nullptr;
  2485. IR::SymOpnd *symOpnd = nullptr;
  2486. IR::RegOpnd *regOpnd = nullptr;
  2487. switch(instr->m_opcode)
  2488. {
  2489. case Js::OpCode::LdFld:
  2490. case Js::OpCode::LdMethodFld:
  2491. case Js::OpCode::CheckFixedFld:
  2492. case Js::OpCode::CheckPropertyGuardAndLoadType:
  2493. symOpnd = instr->GetSrc1()->AsSymOpnd();
  2494. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  2495. break;
  2496. case Js::OpCode::BailOnNotObject:
  2497. case Js::OpCode::BailOnNotArray:
  2498. if (instr->GetSrc1()->IsRegOpnd())
  2499. {
  2500. regOpnd = instr->GetSrc1()->AsRegOpnd();
  2501. stackSym = regOpnd->m_sym;
  2502. }
  2503. break;
  2504. case Js::OpCode::StFld:
  2505. symOpnd = instr->GetDst()->AsSymOpnd();
  2506. stackSym = symOpnd->m_sym->AsPropertySym()->m_stackSym;
  2507. break;
  2508. }
  2509. if (stackSym)
  2510. {
  2511. Value *value = CurrentBlockData()->FindValue(stackSym);
  2512. if (value)
  2513. {
  2514. ValueInfo *valInfo = value->GetValueInfo();
  2515. if (valInfo->GetSymStore() && valInfo->GetSymStore()->IsStackSym() && valInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable())
  2516. {
  2517. return false;
  2518. }
  2519. ValueType valueType = value->GetValueInfo()->Type();
  2520. if (instr->m_opcode == Js::OpCode::BailOnNotObject)
  2521. {
  2522. if (valueType.CanBeTaggedValue())
  2523. {
  2524. // We're not adding new information to the value other than changing the value type. Preserve any existing
  2525. // information and just change the value type.
  2526. ChangeValueType(nullptr, value, valueType.SetCanBeTaggedValue(false), true /*preserveSubClassInfo*/);
  2527. return false;
  2528. }
  2529. if (this->byteCodeUses)
  2530. {
  2531. this->InsertByteCodeUses(instr);
  2532. }
  2533. this->currentBlock->RemoveInstr(instr);
  2534. return true;
  2535. }
  2536. if (valueType.CanBeTaggedValue() &&
  2537. !valueType.HasBeenNumber() &&
  2538. (this->IsLoopPrePass() || !this->currentBlock->loop))
  2539. {
  2540. ValueType newValueType = valueType.SetCanBeTaggedValue(false);
  2541. // Split out the tag check as a separate instruction.
  2542. IR::Instr *bailOutInstr;
  2543. bailOutInstr = IR::BailOutInstr::New(Js::OpCode::BailOnNotObject, IR::BailOutOnTaggedValue, instr, instr->m_func);
  2544. if (!this->IsLoopPrePass())
  2545. {
  2546. FillBailOutInfo(this->currentBlock, bailOutInstr->GetBailOutInfo());
  2547. }
  2548. IR::RegOpnd *srcOpnd = regOpnd;
  2549. if (!srcOpnd)
  2550. {
  2551. srcOpnd = IR::RegOpnd::New(stackSym, stackSym->GetType(), instr->m_func);
  2552. AnalysisAssert(symOpnd);
  2553. if (symOpnd->GetIsJITOptimizedReg())
  2554. {
  2555. srcOpnd->SetIsJITOptimizedReg(true);
  2556. }
  2557. }
  2558. bailOutInstr->SetSrc1(srcOpnd);
  2559. bailOutInstr->GetSrc1()->SetValueType(valueType);
  2560. instr->InsertBefore(bailOutInstr);
  2561. if (symOpnd)
  2562. {
  2563. symOpnd->SetPropertyOwnerValueType(newValueType);
  2564. }
  2565. else
  2566. {
  2567. regOpnd->SetValueType(newValueType);
  2568. }
  2569. ChangeValueType(nullptr, value, newValueType, false);
  2570. }
  2571. }
  2572. }
  2573. return false;
  2574. }
  2575. bool
  2576. GlobOpt::TypeSpecializeBailoutExpectedInteger(IR::Instr* instr, Value* src1Val, Value** dstVal)
  2577. {
  2578. bool isAlreadyTypeSpecialized = false;
  2579. if(instr->GetSrc1()->IsRegOpnd())
  2580. {
  2581. if (!src1Val || !src1Val->GetValueInfo()->IsLikelyInt() || instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  2582. {
  2583. Assert(IsSwitchOptEnabled());
  2584. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  2585. }
  2586. // Attach the BailOutExpectingInteger to FromVar and Remove the bail out info on the Ld_A (Begin Switch) instr.
  2587. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, TyInt32, IR::BailOutExpectingInteger, false, instr);
  2588. //TypeSpecialize the dst of Ld_A
  2589. TypeSpecializeIntDst(instr, instr->m_opcode, src1Val, src1Val, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, dstVal);
  2590. isAlreadyTypeSpecialized = true;
  2591. }
  2592. instr->ClearBailOutInfo();
  2593. return isAlreadyTypeSpecialized;
  2594. }
  2595. Value*
  2596. GlobOpt::OptDst(
  2597. IR::Instr ** pInstr,
  2598. Value *dstVal,
  2599. Value *src1Val,
  2600. Value *src2Val,
  2601. Value *dstIndirIndexVal,
  2602. Value *src1IndirIndexVal)
  2603. {
  2604. IR::Instr *&instr = *pInstr;
  2605. IR::Opnd *opnd = instr->GetDst();
  2606. if (opnd)
  2607. {
  2608. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  2609. {
  2610. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  2611. }
  2612. else if (instr->m_opcode == Js::OpCode::StElemI_A ||
  2613. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  2614. instr->m_opcode == Js::OpCode::InitComputedProperty)
  2615. {
  2616. this->KillObjectHeaderInlinedTypeSyms(this->currentBlock, false);
  2617. }
  2618. if (opnd->IsIndirOpnd() && !this->IsLoopPrePass())
  2619. {
  2620. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  2621. const ValueType baseValueType(baseOpnd->GetValueType());
  2622. if ((
  2623. baseValueType.IsLikelyNativeArray() ||
  2624. #ifdef _M_IX86
  2625. (
  2626. !AutoSystemInfo::Data.SSE2Available() &&
  2627. baseValueType.IsLikelyObject() &&
  2628. (
  2629. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  2630. baseValueType.GetObjectType() == ObjectType::Float64Array
  2631. )
  2632. )
  2633. #else
  2634. false
  2635. #endif
  2636. ) &&
  2637. instr->GetSrc1()->IsVar())
  2638. {
  2639. if(instr->m_opcode == Js::OpCode::StElemC)
  2640. {
  2641. // StElemC has different code that handles native array conversion or missing value stores. Add a bailout
  2642. // for those cases.
  2643. Assert(baseValueType.IsLikelyNativeArray());
  2644. Assert(!instr->HasBailOutInfo());
  2645. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  2646. }
  2647. else if(instr->HasBailOutInfo())
  2648. {
  2649. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  2650. // path. Note that the removed bailouts should not be necessary for correctness. Bailout on native array
  2651. // conversion will be handled automatically as normal.
  2652. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  2653. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  2654. {
  2655. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  2656. }
  2657. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  2658. {
  2659. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  2660. }
  2661. if(bailOutKind)
  2662. {
  2663. instr->SetBailOutKind(bailOutKind);
  2664. }
  2665. else
  2666. {
  2667. instr->ClearBailOutInfo();
  2668. }
  2669. }
  2670. }
  2671. }
  2672. }
  2673. this->ProcessKills(instr);
  2674. if (opnd)
  2675. {
  2676. if (dstVal == nullptr)
  2677. {
  2678. dstVal = ValueNumberDst(pInstr, src1Val, src2Val);
  2679. }
  2680. if (this->IsLoopPrePass())
  2681. {
  2682. // Keep track of symbols defined in the loop.
  2683. if (opnd->IsRegOpnd())
  2684. {
  2685. StackSym *symDst = opnd->AsRegOpnd()->m_sym;
  2686. rootLoopPrePass->symsDefInLoop->Set(symDst->m_id);
  2687. }
  2688. }
  2689. else if (dstVal)
  2690. {
  2691. opnd->SetValueType(dstVal->GetValueInfo()->Type());
  2692. if(currentBlock->loop &&
  2693. !IsLoopPrePass() &&
  2694. (instr->m_opcode == Js::OpCode::Ld_A || instr->m_opcode == Js::OpCode::Ld_I4) &&
  2695. instr->GetSrc1()->IsRegOpnd() &&
  2696. !func->IsJitInDebugMode() &&
  2697. func->DoGlobOptsForGeneratorFunc())
  2698. {
  2699. // Look for the following patterns:
  2700. //
  2701. // Pattern 1:
  2702. // s1[liveOnBackEdge] = s3[dead]
  2703. //
  2704. // Pattern 2:
  2705. // s3 = operation(s1[liveOnBackEdge], s2)
  2706. // s1[liveOnBackEdge] = s3
  2707. //
  2708. // In both patterns, s1 and s3 have the same value by the end. Prefer to use s1 as the sym store instead of s3
  2709. // since s1 is live on back-edge, as otherwise, their lifetimes overlap, requiring two registers to hold the
  2710. // value instead of one.
  2711. do
  2712. {
  2713. IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  2714. StackSym *srcVarSym = src->m_sym;
  2715. if(srcVarSym->IsTypeSpec())
  2716. {
  2717. srcVarSym = srcVarSym->GetVarEquivSym(nullptr);
  2718. Assert(srcVarSym);
  2719. }
  2720. if(dstVal->GetValueInfo()->GetSymStore() != srcVarSym)
  2721. {
  2722. break;
  2723. }
  2724. IR::RegOpnd *const dst = opnd->AsRegOpnd();
  2725. StackSym *dstVarSym = dst->m_sym;
  2726. if(dstVarSym->IsTypeSpec())
  2727. {
  2728. dstVarSym = dstVarSym->GetVarEquivSym(nullptr);
  2729. Assert(dstVarSym);
  2730. }
  2731. if(!currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(dstVarSym->m_id))
  2732. {
  2733. break;
  2734. }
  2735. Value *const srcValue = CurrentBlockData()->FindValue(srcVarSym);
  2736. if(srcValue->GetValueNumber() != dstVal->GetValueNumber())
  2737. {
  2738. break;
  2739. }
  2740. if(!src->GetIsDead())
  2741. {
  2742. IR::Instr *const prevInstr = instr->GetPrevRealInstrOrLabel();
  2743. IR::Opnd *const prevDst = prevInstr->GetDst();
  2744. if(!prevDst ||
  2745. !src->IsEqualInternal(prevDst) ||
  2746. !(
  2747. (prevInstr->GetSrc1() && dst->IsEqual(prevInstr->GetSrc1())) ||
  2748. (prevInstr->GetSrc2() && dst->IsEqual(prevInstr->GetSrc2()))
  2749. ))
  2750. {
  2751. break;
  2752. }
  2753. }
  2754. this->SetSymStoreDirect(dstVal->GetValueInfo(), dstVarSym);
  2755. } while(false);
  2756. }
  2757. }
  2758. this->ValueNumberObjectType(opnd, instr);
  2759. }
  2760. this->CSEAddInstr(this->currentBlock, *pInstr, dstVal, src1Val, src2Val, dstIndirIndexVal, src1IndirIndexVal);
  2761. return dstVal;
  2762. }
  2763. void
  2764. GlobOpt::CopyPropDstUses(IR::Opnd *opnd, IR::Instr *instr, Value *src1Val)
  2765. {
  2766. if (opnd->IsSymOpnd())
  2767. {
  2768. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  2769. if (symOpnd->m_sym->IsPropertySym())
  2770. {
  2771. PropertySym * originalPropertySym = symOpnd->m_sym->AsPropertySym();
  2772. Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
  2773. symOpnd->SetPropertyOwnerValueType(objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  2774. this->FieldHoistOptDst(instr, originalPropertySym, src1Val);
  2775. PropertySym * sym = this->CopyPropPropertySymObj(symOpnd, instr);
  2776. if (sym != originalPropertySym && !this->IsLoopPrePass())
  2777. {
  2778. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  2779. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  2780. this->FieldHoistOptDst(instr, sym, src1Val);
  2781. }
  2782. }
  2783. }
  2784. }
  2785. void
  2786. GlobOpt::SetLoopFieldInitialValue(Loop *loop, IR::Instr *instr, PropertySym *propertySym, PropertySym *originalPropertySym)
  2787. {
  2788. Value *initialValue = nullptr;
  2789. StackSym *symStore;
  2790. if (loop->allFieldsKilled || loop->fieldKilled->Test(originalPropertySym->m_id))
  2791. {
  2792. return;
  2793. }
  2794. Assert(!loop->fieldKilled->Test(propertySym->m_id));
  2795. // Value already exists
  2796. if (CurrentBlockData()->FindValue(propertySym))
  2797. {
  2798. return;
  2799. }
  2800. // If this initial value was already added, we would find in the current value table.
  2801. Assert(!loop->initialValueFieldMap.TryGetValue(propertySym, &initialValue));
  2802. // If propertySym is live in landingPad, we don't need an initial value.
  2803. if (loop->landingPad->globOptData.liveFields->Test(propertySym->m_id))
  2804. {
  2805. return;
  2806. }
  2807. Value *landingPadObjPtrVal, *currentObjPtrVal;
  2808. landingPadObjPtrVal = loop->landingPad->globOptData.FindValue(propertySym->m_stackSym);
  2809. currentObjPtrVal = CurrentBlockData()->FindValue(propertySym->m_stackSym);
  2810. if (!currentObjPtrVal || !landingPadObjPtrVal || currentObjPtrVal->GetValueNumber() != landingPadObjPtrVal->GetValueNumber())
  2811. {
  2812. // objPtr has a different value in the landing pad.
  2813. return;
  2814. }
  2815. // The opnd's value type has not yet been initialized. Since the property sym doesn't have a value, it effectively has an
  2816. // Uninitialized value type. Use the profiled value type from the instruction.
  2817. const ValueType profiledValueType =
  2818. instr->IsProfiledInstr() ? instr->AsProfiledInstr()->u.FldInfo().valueType : ValueType::Uninitialized;
  2819. Assert(!profiledValueType.IsDefinite()); // Hence the values created here don't need to be tracked for kills
  2820. initialValue = this->NewGenericValue(profiledValueType, propertySym);
  2821. symStore = StackSym::New(this->func);
  2822. initialValue->GetValueInfo()->SetSymStore(symStore);
  2823. loop->initialValueFieldMap.Add(propertySym, initialValue->Copy(this->alloc, initialValue->GetValueNumber()));
  2824. // Copy the initial value into the landing pad, but without a symStore
  2825. Value *landingPadInitialValue = Value::New(this->alloc, initialValue->GetValueNumber(),
  2826. ValueInfo::New(this->alloc, initialValue->GetValueInfo()->Type()));
  2827. loop->landingPad->globOptData.SetValue(landingPadInitialValue, propertySym);
  2828. loop->landingPad->globOptData.liveFields->Set(propertySym->m_id);
  2829. #if DBG_DUMP
  2830. if (PHASE_TRACE(Js::FieldPREPhase, this->func))
  2831. {
  2832. Output::Print(_u("** TRACE: Field PRE initial value for loop head #%d. Val:%d symStore:"),
  2833. loop->GetHeadBlock()->GetBlockNum(), initialValue->GetValueNumber());
  2834. symStore->Dump();
  2835. Output::Print(_u("\n Instr: "));
  2836. instr->Dump();
  2837. }
  2838. #endif
  2839. // Add initial value to all the previous blocks in the loop.
  2840. FOREACH_BLOCK_BACKWARD_IN_RANGE(block, this->currentBlock->GetPrev(), loop->GetHeadBlock())
  2841. {
  2842. if (block->GetDataUseCount() == 0)
  2843. {
  2844. // All successor blocks have been processed, no point in adding the value.
  2845. continue;
  2846. }
  2847. Value *newValue = initialValue->Copy(this->alloc, initialValue->GetValueNumber());
  2848. block->globOptData.SetValue(newValue, propertySym);
  2849. block->globOptData.liveFields->Set(propertySym->m_id);
  2850. block->globOptData.SetValue(newValue, symStore);
  2851. block->globOptData.liveVarSyms->Set(symStore->m_id);
  2852. } NEXT_BLOCK_BACKWARD_IN_RANGE;
  2853. CurrentBlockData()->SetValue(initialValue, symStore);
  2854. CurrentBlockData()->liveVarSyms->Set(symStore->m_id);
  2855. CurrentBlockData()->liveFields->Set(propertySym->m_id);
  2856. }
  2857. // Examine src, apply copy prop and value number it
  2858. Value*
  2859. GlobOpt::OptSrc(IR::Opnd *opnd, IR::Instr * *pInstr, Value **indirIndexValRef, IR::IndirOpnd *parentIndirOpnd)
  2860. {
  2861. IR::Instr * &instr = *pInstr;
  2862. Assert(!indirIndexValRef || !*indirIndexValRef);
  2863. Assert(
  2864. parentIndirOpnd
  2865. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  2866. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  2867. Sym *sym;
  2868. Value *val;
  2869. PropertySym *originalPropertySym = nullptr;
  2870. switch(opnd->GetKind())
  2871. {
  2872. case IR::OpndKindIntConst:
  2873. val = this->GetIntConstantValue(opnd->AsIntConstOpnd()->AsInt32(), instr);
  2874. opnd->SetValueType(val->GetValueInfo()->Type());
  2875. return val;
  2876. case IR::OpndKindInt64Const:
  2877. val = this->GetIntConstantValue(opnd->AsInt64ConstOpnd()->GetValue(), instr);
  2878. opnd->SetValueType(val->GetValueInfo()->Type());
  2879. return val;
  2880. case IR::OpndKindFloatConst:
  2881. {
  2882. const FloatConstType floatValue = opnd->AsFloatConstOpnd()->m_value;
  2883. int32 int32Value;
  2884. if(Js::JavascriptNumber::TryGetInt32Value(floatValue, &int32Value))
  2885. {
  2886. val = GetIntConstantValue(int32Value, instr);
  2887. }
  2888. else
  2889. {
  2890. val = NewFloatConstantValue(floatValue);
  2891. }
  2892. opnd->SetValueType(val->GetValueInfo()->Type());
  2893. return val;
  2894. }
  2895. case IR::OpndKindAddr:
  2896. {
  2897. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  2898. if (addrOpnd->m_isFunction)
  2899. {
  2900. AssertMsg(!PHASE_OFF(Js::FixedMethodsPhase, instr->m_func), "Fixed function address operand with fixed method calls phase disabled?");
  2901. val = NewFixedFunctionValue((Js::JavascriptFunction *)addrOpnd->m_address, addrOpnd);
  2902. opnd->SetValueType(val->GetValueInfo()->Type());
  2903. return val;
  2904. }
  2905. else if (addrOpnd->IsVar() && Js::TaggedInt::Is(addrOpnd->m_address))
  2906. {
  2907. val = this->GetIntConstantValue(Js::TaggedInt::ToInt32(addrOpnd->m_address), instr);
  2908. opnd->SetValueType(val->GetValueInfo()->Type());
  2909. return val;
  2910. }
  2911. val = this->GetVarConstantValue(addrOpnd);
  2912. return val;
  2913. }
  2914. case IR::OpndKindSym:
  2915. {
  2916. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  2917. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  2918. // value if available, before returning from this function.
  2919. opnd->SetValueType(ValueType::Uninitialized);
  2920. sym = opnd->AsSymOpnd()->m_sym;
  2921. // Don't create a new value for ArgSlots and don't copy prop them away.
  2922. if (sym->IsStackSym() && sym->AsStackSym()->IsArgSlotSym())
  2923. {
  2924. return nullptr;
  2925. }
  2926. // Unless we have profile info, don't create a new value for ArgSlots and don't copy prop them away.
  2927. if (sym->IsStackSym() && sym->AsStackSym()->IsParamSlotSym())
  2928. {
  2929. if (!instr->m_func->IsLoopBody() && instr->m_func->HasProfileInfo())
  2930. {
  2931. // Skip "this" pointer.
  2932. int paramSlotNum = sym->AsStackSym()->GetParamSlotNum() - 2;
  2933. if (paramSlotNum >= 0)
  2934. {
  2935. const auto parameterType = instr->m_func->GetReadOnlyProfileInfo()->GetParameterInfo(static_cast<Js::ArgSlot>(paramSlotNum));
  2936. val = NewGenericValue(parameterType);
  2937. opnd->SetValueType(val->GetValueInfo()->Type());
  2938. return val;
  2939. }
  2940. }
  2941. return nullptr;
  2942. }
  2943. if (!sym->IsPropertySym())
  2944. {
  2945. break;
  2946. }
  2947. originalPropertySym = sym->AsPropertySym();
  2948. // Dont give a vale to 'arguments' property sym to prevent field copy prop of 'arguments'
  2949. if (originalPropertySym->AsPropertySym()->m_propertyId == Js::PropertyIds::arguments &&
  2950. originalPropertySym->AsPropertySym()->m_fieldKind == PropertyKindData)
  2951. {
  2952. return nullptr;
  2953. }
  2954. Value *const objectValue = CurrentBlockData()->FindValue(originalPropertySym->m_stackSym);
  2955. opnd->AsSymOpnd()->SetPropertyOwnerValueType(
  2956. objectValue ? objectValue->GetValueInfo()->Type() : ValueType::Uninitialized);
  2957. if (!FieldHoistOptSrc(opnd->AsSymOpnd(), instr, originalPropertySym))
  2958. {
  2959. sym = this->CopyPropPropertySymObj(opnd->AsSymOpnd(), instr);
  2960. // Consider: This doesn't detect hoistability of a property sym after object pointer copy prop
  2961. // on loop prepass. But if it so happened that the property sym is hoisted, we might as well do so.
  2962. if (originalPropertySym == sym || this->IsLoopPrePass() ||
  2963. !FieldHoistOptSrc(opnd->AsSymOpnd(), instr, sym->AsPropertySym()))
  2964. {
  2965. if (!DoFieldCopyProp())
  2966. {
  2967. if (opnd->AsSymOpnd()->IsPropertySymOpnd())
  2968. {
  2969. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  2970. }
  2971. return nullptr;
  2972. }
  2973. switch (instr->m_opcode)
  2974. {
  2975. // These need the symbolic reference to the field, don't copy prop the value of the field
  2976. case Js::OpCode::DeleteFld:
  2977. case Js::OpCode::DeleteRootFld:
  2978. case Js::OpCode::DeleteFldStrict:
  2979. case Js::OpCode::DeleteRootFldStrict:
  2980. case Js::OpCode::ScopedDeleteFld:
  2981. case Js::OpCode::ScopedDeleteFldStrict:
  2982. case Js::OpCode::LdMethodFromFlags:
  2983. case Js::OpCode::BrOnNoProperty:
  2984. case Js::OpCode::BrOnHasProperty:
  2985. case Js::OpCode::LdMethodFldPolyInlineMiss:
  2986. case Js::OpCode::StSlotChkUndecl:
  2987. return nullptr;
  2988. };
  2989. if (instr->CallsGetter())
  2990. {
  2991. return nullptr;
  2992. }
  2993. if (this->IsLoopPrePass() && this->DoFieldPRE(this->rootLoopPrePass))
  2994. {
  2995. if (!this->prePassLoop->allFieldsKilled && !this->prePassLoop->fieldKilled->Test(sym->m_id))
  2996. {
  2997. this->SetLoopFieldInitialValue(this->rootLoopPrePass, instr, sym->AsPropertySym(), originalPropertySym);
  2998. }
  2999. if (this->IsPREInstrCandidateLoad(instr->m_opcode))
  3000. {
  3001. // Foreach property sym, remember the first instruction that loads it.
  3002. // Can this be done in one call?
  3003. if (!this->prePassInstrMap->ContainsKey(sym->m_id))
  3004. {
  3005. this->prePassInstrMap->AddNew(sym->m_id, instr);
  3006. }
  3007. }
  3008. }
  3009. break;
  3010. }
  3011. }
  3012. // We field hoisted, we can continue as a reg.
  3013. opnd = instr->GetSrc1();
  3014. }
  3015. case IR::OpndKindReg:
  3016. // Clear the opnd's value type up-front, so that this code cannot accidentally use the value type set from a previous
  3017. // OptSrc on the same instruction (for instance, from an earlier loop prepass). The value type will be set from the
  3018. // value if available, before returning from this function.
  3019. opnd->SetValueType(ValueType::Uninitialized);
  3020. sym = opnd->AsRegOpnd()->m_sym;
  3021. CurrentBlockData()->MarkTempLastUse(instr, opnd->AsRegOpnd());
  3022. if (sym->AsStackSym()->IsTypeSpec())
  3023. {
  3024. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  3025. }
  3026. break;
  3027. case IR::OpndKindIndir:
  3028. this->OptimizeIndirUses(opnd->AsIndirOpnd(), &instr, indirIndexValRef);
  3029. return nullptr;
  3030. default:
  3031. return nullptr;
  3032. }
  3033. val = CurrentBlockData()->FindValue(sym);
  3034. if (val)
  3035. {
  3036. Assert(CurrentBlockData()->IsLive(sym) || (sym->IsPropertySym()));
  3037. if (instr)
  3038. {
  3039. opnd = this->CopyProp(opnd, instr, val, parentIndirOpnd);
  3040. }
  3041. // Check if we freed the operand.
  3042. if (opnd == nullptr)
  3043. {
  3044. return nullptr;
  3045. }
  3046. // In a loop prepass, determine stack syms that are used before they are defined in the root loop for which the prepass
  3047. // is being done. This information is used to do type specialization conversions in the landing pad where appropriate.
  3048. if(IsLoopPrePass() &&
  3049. sym->IsStackSym() &&
  3050. !rootLoopPrePass->symsUsedBeforeDefined->Test(sym->m_id) &&
  3051. rootLoopPrePass->landingPad->globOptData.IsLive(sym) && !isAsmJSFunc) // no typespec in asmjs and hence skipping this
  3052. {
  3053. Value *const landingPadValue = rootLoopPrePass->landingPad->globOptData.FindValue(sym);
  3054. if(landingPadValue && val->GetValueNumber() == landingPadValue->GetValueNumber())
  3055. {
  3056. rootLoopPrePass->symsUsedBeforeDefined->Set(sym->m_id);
  3057. ValueInfo *landingPadValueInfo = landingPadValue->GetValueInfo();
  3058. if(landingPadValueInfo->IsLikelyNumber())
  3059. {
  3060. rootLoopPrePass->likelyNumberSymsUsedBeforeDefined->Set(sym->m_id);
  3061. if(DoAggressiveIntTypeSpec() ? landingPadValueInfo->IsLikelyInt() : landingPadValueInfo->IsInt())
  3062. {
  3063. // Can only force int conversions in the landing pad based on likely-int values if aggressive int type
  3064. // specialization is enabled.
  3065. rootLoopPrePass->likelyIntSymsUsedBeforeDefined->Set(sym->m_id);
  3066. }
  3067. }
  3068. #ifdef ENABLE_SIMDJS
  3069. // SIMD_JS
  3070. // For uses before defs, we set likelySimd128*SymsUsedBeforeDefined bits for syms that have landing pad value info that allow type-spec to happen in the loop body.
  3071. // The BV will be added to loop header if the backedge has a live matching type-spec value. We then compensate in the loop header to unbox the value.
  3072. // This allows type-spec in the landing pad instead of boxing/unboxing on each iteration.
  3073. if (Js::IsSimd128Opcode(instr->m_opcode))
  3074. {
  3075. // Simd ops are strongly typed. We type-spec only if the type is likely/Definitely the expected type or if we have object which can come from merging different Simd types.
  3076. // Simd value must be initialized properly on all paths before the loop entry. Cannot be merged with Undefined/Null.
  3077. ThreadContext::SimdFuncSignature funcSignature;
  3078. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, funcSignature);
  3079. Assert(funcSignature.valid);
  3080. ValueType expectedType = funcSignature.args[opnd == instr->GetSrc1() ? 0 : 1];
  3081. if (expectedType.IsSimd128Float32x4())
  3082. {
  3083. if (
  3084. (landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  3085. &&
  3086. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  3087. )
  3088. {
  3089. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  3090. }
  3091. }
  3092. else if (expectedType.IsSimd128Int32x4())
  3093. {
  3094. if (
  3095. (landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  3096. &&
  3097. !landingPadValueInfo->HasBeenUndefined() && !landingPadValueInfo->HasBeenNull()
  3098. )
  3099. {
  3100. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  3101. }
  3102. }
  3103. }
  3104. else if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc1() && instr->GetDst()->GetValueType().IsSimd128())
  3105. {
  3106. // Extended_Args for Simd ops are annotated with the expected type by the inliner. Use this info to find out if type-spec is supposed to happen.
  3107. ValueType expectedType = instr->GetDst()->GetValueType();
  3108. if ((landingPadValueInfo->IsLikelySimd128Float32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  3109. && expectedType.IsSimd128Float32x4())
  3110. {
  3111. rootLoopPrePass->likelySimd128F4SymsUsedBeforeDefined->Set(sym->m_id);
  3112. }
  3113. else if ((landingPadValueInfo->IsLikelySimd128Int32x4() || (landingPadValueInfo->IsLikelyObject() && landingPadValueInfo->GetObjectType() == ObjectType::Object))
  3114. && expectedType.IsSimd128Int32x4())
  3115. {
  3116. rootLoopPrePass->likelySimd128I4SymsUsedBeforeDefined->Set(sym->m_id);
  3117. }
  3118. }
  3119. #endif
  3120. }
  3121. }
  3122. }
  3123. else if ((instr->TransfersSrcValue() || OpCodeAttr::CanCSE(instr->m_opcode)) && (opnd == instr->GetSrc1() || opnd == instr->GetSrc2()))
  3124. {
  3125. if (sym->IsPropertySym())
  3126. {
  3127. val = this->CreateFieldSrcValue(sym->AsPropertySym(), originalPropertySym, &opnd, instr);
  3128. }
  3129. else
  3130. {
  3131. val = this->NewGenericValue(ValueType::Uninitialized, opnd);
  3132. }
  3133. }
  3134. if (opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  3135. {
  3136. TryOptimizeInstrWithFixedDataProperty(&instr);
  3137. this->FinishOptPropOp(instr, opnd->AsPropertySymOpnd());
  3138. }
  3139. if (val)
  3140. {
  3141. ValueType valueType(val->GetValueInfo()->Type());
  3142. // This block uses local profiling data to optimize the case of a native array being passed to a function that fills it with other types. When the function is inlined
  3143. // into different call paths which use different types this can cause a perf hit by performing unnecessary array conversions, so only perform this optimization when
  3144. // the function is not inlined.
  3145. if (valueType.IsLikelyNativeArray() && !valueType.IsObject() && instr->IsProfiledInstr() && !instr->m_func->IsInlined())
  3146. {
  3147. // See if we have profile data for the array type
  3148. IR::ProfiledInstr *const profiledInstr = instr->AsProfiledInstr();
  3149. ValueType profiledArrayType;
  3150. switch(instr->m_opcode)
  3151. {
  3152. case Js::OpCode::LdElemI_A:
  3153. if(instr->GetSrc1()->IsIndirOpnd() && opnd == instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd())
  3154. {
  3155. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  3156. }
  3157. break;
  3158. case Js::OpCode::StElemI_A:
  3159. case Js::OpCode::StElemI_A_Strict:
  3160. case Js::OpCode::StElemC:
  3161. if(instr->GetDst()->IsIndirOpnd() && opnd == instr->GetDst()->AsIndirOpnd()->GetBaseOpnd())
  3162. {
  3163. profiledArrayType = profiledInstr->u.stElemInfo->GetArrayType();
  3164. }
  3165. break;
  3166. case Js::OpCode::LdLen_A:
  3167. if(instr->GetSrc1()->IsRegOpnd() && opnd == instr->GetSrc1())
  3168. {
  3169. profiledArrayType = profiledInstr->u.ldElemInfo->GetArrayType();
  3170. }
  3171. break;
  3172. }
  3173. if(profiledArrayType.IsLikelyObject() &&
  3174. profiledArrayType.GetObjectType() == valueType.GetObjectType() &&
  3175. (profiledArrayType.HasVarElements() || (valueType.HasIntElements() && profiledArrayType.HasFloatElements())))
  3176. {
  3177. // Merge array type we pulled from profile with type propagated by dataflow.
  3178. valueType = valueType.Merge(profiledArrayType).SetHasNoMissingValues(valueType.HasNoMissingValues());
  3179. ChangeValueType(this->currentBlock, CurrentBlockData()->FindValue(opnd->AsRegOpnd()->m_sym), valueType, false);
  3180. }
  3181. }
  3182. opnd->SetValueType(valueType);
  3183. if(!IsLoopPrePass() && opnd->IsSymOpnd() && valueType.IsDefinite())
  3184. {
  3185. if (opnd->AsSymOpnd()->m_sym->IsPropertySym())
  3186. {
  3187. // A property sym can only be guaranteed to have a definite value type when implicit calls are disabled from the
  3188. // point where the sym was defined with the definite value type. Insert an instruction to indicate to the
  3189. // dead-store pass that implicit calls need to be kept disabled until after this instruction.
  3190. Assert(DoFieldCopyProp());
  3191. CaptureNoImplicitCallUses(opnd, false, instr);
  3192. }
  3193. }
  3194. }
  3195. else
  3196. {
  3197. opnd->SetValueType(ValueType::Uninitialized);
  3198. }
  3199. return val;
  3200. }
  3201. /*
  3202. * GlobOpt::TryOptimizeInstrWithFixedDataProperty
  3203. * Converts Ld[Root]Fld instr to
  3204. * * CheckFixedFld
  3205. * * Dst = Ld_A <int Constant value>
  3206. * This API assumes that the source operand is a Sym/PropertySym kind.
  3207. */
  3208. void
  3209. GlobOpt::TryOptimizeInstrWithFixedDataProperty(IR::Instr ** const pInstr)
  3210. {
  3211. Assert(pInstr);
  3212. IR::Instr * &instr = *pInstr;
  3213. IR::Opnd * src1 = instr->GetSrc1();
  3214. Assert(src1 && src1->IsSymOpnd() && src1->AsSymOpnd()->IsPropertySymOpnd());
  3215. if(PHASE_OFF(Js::UseFixedDataPropsPhase, instr->m_func))
  3216. {
  3217. return;
  3218. }
  3219. if (!this->IsLoopPrePass() && !this->isRecursiveCallOnLandingPad &&
  3220. OpCodeAttr::CanLoadFixedFields(instr->m_opcode))
  3221. {
  3222. instr->TryOptimizeInstrWithFixedDataProperty(&instr, this);
  3223. }
  3224. }
  3225. // Constant prop if possible, otherwise if this value already resides in another
  3226. // symbol, reuse this previous symbol. This should help register allocation.
  3227. IR::Opnd *
  3228. GlobOpt::CopyProp(IR::Opnd *opnd, IR::Instr *instr, Value *val, IR::IndirOpnd *parentIndirOpnd)
  3229. {
  3230. Assert(
  3231. parentIndirOpnd
  3232. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  3233. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  3234. if (this->IsLoopPrePass())
  3235. {
  3236. // Transformations are not legal in prepass...
  3237. return opnd;
  3238. }
  3239. if (!this->func->DoGlobOptsForGeneratorFunc())
  3240. {
  3241. // Don't copy prop in generator functions because non-bytecode temps that span a yield
  3242. // cannot be saved and restored by the current bail-out mechanics utilized by generator
  3243. // yield/resume.
  3244. // TODO[generators][ianhall]: Enable copy-prop at least for in between yields.
  3245. return opnd;
  3246. }
  3247. if (instr->m_opcode == Js::OpCode::CheckFixedFld || instr->m_opcode == Js::OpCode::CheckPropertyGuardAndLoadType)
  3248. {
  3249. // Don't copy prop into CheckFixedFld or CheckPropertyGuardAndLoadType
  3250. return opnd;
  3251. }
  3252. // Don't copy-prop link operands of ExtendedArgs
  3253. if (instr->m_opcode == Js::OpCode::ExtendArg_A && opnd == instr->GetSrc2())
  3254. {
  3255. return opnd;
  3256. }
  3257. // Don't copy-prop operand of SIMD instr with ExtendedArg operands. Each instr should have its exclusive EA sequence.
  3258. if (
  3259. Js::IsSimd128Opcode(instr->m_opcode) &&
  3260. instr->GetSrc1() != nullptr &&
  3261. instr->GetSrc1()->IsRegOpnd() &&
  3262. instr->GetSrc2() == nullptr
  3263. )
  3264. {
  3265. StackSym *sym = instr->GetSrc1()->GetStackSym();
  3266. if (sym && sym->IsSingleDef() && sym->GetInstrDef()->m_opcode == Js::OpCode::ExtendArg_A)
  3267. {
  3268. return opnd;
  3269. }
  3270. }
  3271. ValueInfo *valueInfo = val->GetValueInfo();
  3272. if (this->func->HasFinally())
  3273. {
  3274. // s0 = undefined was added on functions with early exit in try-finally functions, that can get copy-proped and case incorrect results
  3275. if (instr->m_opcode == Js::OpCode::ArgOut_A_Inline && valueInfo->GetSymStore() &&
  3276. valueInfo->GetSymStore()->m_id == 0)
  3277. {
  3278. // We don't want to copy-prop s0 (return symbol) into inlinee code
  3279. return opnd;
  3280. }
  3281. }
  3282. // Constant prop?
  3283. int32 intConstantValue;
  3284. int64 int64ConstantValue;
  3285. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  3286. {
  3287. if (PHASE_OFF(Js::ConstPropPhase, this->func))
  3288. {
  3289. return opnd;
  3290. }
  3291. if ((
  3292. instr->m_opcode == Js::OpCode::StElemI_A ||
  3293. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  3294. instr->m_opcode == Js::OpCode::StElemC
  3295. ) && instr->GetSrc1() == opnd)
  3296. {
  3297. // Disabling prop to src of native array store, because we were losing the chance to type specialize.
  3298. // Is it possible to type specialize this src if we allow constants, etc., to be prop'd here?
  3299. if (instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray())
  3300. {
  3301. return opnd;
  3302. }
  3303. }
  3304. if(opnd != instr->GetSrc1() && opnd != instr->GetSrc2())
  3305. {
  3306. if(PHASE_OFF(Js::IndirCopyPropPhase, instr->m_func))
  3307. {
  3308. return opnd;
  3309. }
  3310. // Const-prop an indir opnd's constant index into its offset
  3311. IR::Opnd *srcs[] = { instr->GetSrc1(), instr->GetSrc2(), instr->GetDst() };
  3312. for(int i = 0; i < sizeof(srcs) / sizeof(srcs[0]); ++i)
  3313. {
  3314. const auto src = srcs[i];
  3315. if(!src || !src->IsIndirOpnd())
  3316. {
  3317. continue;
  3318. }
  3319. const auto indir = src->AsIndirOpnd();
  3320. if ((int64)indir->GetOffset() + intConstantValue > INT32_MAX)
  3321. {
  3322. continue;
  3323. }
  3324. if(opnd == indir->GetIndexOpnd())
  3325. {
  3326. Assert(indir->GetScale() == 0);
  3327. GOPT_TRACE_OPND(opnd, _u("Constant prop indir index into offset (value: %d)\n"), intConstantValue);
  3328. this->CaptureByteCodeSymUses(instr);
  3329. indir->SetOffset(indir->GetOffset() + intConstantValue);
  3330. indir->SetIndexOpnd(nullptr);
  3331. }
  3332. }
  3333. return opnd;
  3334. }
  3335. if (Js::TaggedInt::IsOverflow(intConstantValue))
  3336. {
  3337. return opnd;
  3338. }
  3339. IR::Opnd *constOpnd;
  3340. if (opnd->IsVar())
  3341. {
  3342. IR::AddrOpnd *addrOpnd = IR::AddrOpnd::New(Js::TaggedInt::ToVarUnchecked((int)intConstantValue), IR::AddrOpndKindConstantVar, instr->m_func);
  3343. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), addrOpnd->m_address, intConstantValue);
  3344. constOpnd = addrOpnd;
  3345. }
  3346. else
  3347. {
  3348. // Note: Jit loop body generates some i32 operands...
  3349. Assert(opnd->IsInt32() || opnd->IsInt64() || opnd->IsUInt32());
  3350. IRType opndType;
  3351. IntConstType constVal;
  3352. if (opnd->IsUInt32())
  3353. {
  3354. // avoid sign extension
  3355. constVal = (uint32)intConstantValue;
  3356. opndType = TyUint32;
  3357. }
  3358. else
  3359. {
  3360. constVal = intConstantValue;
  3361. opndType = TyInt32;
  3362. }
  3363. IR::IntConstOpnd *intOpnd = IR::IntConstOpnd::New(constVal, opndType, instr->m_func);
  3364. GOPT_TRACE_OPND(opnd, _u("Constant prop %d (value:%d)\n"), intOpnd->GetImmediateValue(instr->m_func), intConstantValue);
  3365. constOpnd = intOpnd;
  3366. }
  3367. #if ENABLE_DEBUG_CONFIG_OPTIONS
  3368. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  3369. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  3370. {
  3371. instr->DumpFieldCopyPropTestTrace();
  3372. }
  3373. #endif
  3374. this->CaptureByteCodeSymUses(instr);
  3375. opnd = instr->ReplaceSrc(opnd, constOpnd);
  3376. switch (instr->m_opcode)
  3377. {
  3378. case Js::OpCode::LdSlot:
  3379. case Js::OpCode::LdSlotArr:
  3380. case Js::OpCode::LdFld:
  3381. case Js::OpCode::LdFldForTypeOf:
  3382. case Js::OpCode::LdRootFldForTypeOf:
  3383. case Js::OpCode::LdFldForCallApplyTarget:
  3384. case Js::OpCode::LdRootFld:
  3385. case Js::OpCode::LdMethodFld:
  3386. case Js::OpCode::LdRootMethodFld:
  3387. case Js::OpCode::LdMethodFromFlags:
  3388. case Js::OpCode::ScopedLdMethodFld:
  3389. instr->m_opcode = Js::OpCode::Ld_A;
  3390. case Js::OpCode::Ld_A:
  3391. {
  3392. IR::Opnd * dst = instr->GetDst();
  3393. if (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsSingleDef())
  3394. {
  3395. dst->AsRegOpnd()->m_sym->SetIsIntConst((int)intConstantValue);
  3396. }
  3397. break;
  3398. }
  3399. case Js::OpCode::ArgOut_A:
  3400. case Js::OpCode::ArgOut_A_Inline:
  3401. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  3402. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  3403. if (instr->GetDst()->IsRegOpnd())
  3404. {
  3405. Assert(instr->GetDst()->AsRegOpnd()->m_sym->m_isSingleDef);
  3406. instr->GetDst()->AsRegOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  3407. }
  3408. else
  3409. {
  3410. instr->GetDst()->AsSymOpnd()->m_sym->AsStackSym()->SetIsIntConst((int)intConstantValue);
  3411. }
  3412. break;
  3413. case Js::OpCode::TypeofElem:
  3414. instr->m_opcode = Js::OpCode::Typeof;
  3415. break;
  3416. case Js::OpCode::StSlotChkUndecl:
  3417. if (instr->GetSrc2() == opnd)
  3418. {
  3419. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  3420. // due to the implicit read for ChkUndecl.
  3421. instr->m_opcode = Js::OpCode::StSlot;
  3422. instr->FreeSrc2();
  3423. opnd = nullptr;
  3424. }
  3425. break;
  3426. }
  3427. return opnd;
  3428. }
  3429. else if (valueInfo->TryGetIntConstantValue(&int64ConstantValue, false))
  3430. {
  3431. if (PHASE_OFF(Js::ConstPropPhase, this->func) || !PHASE_ON(Js::Int64ConstPropPhase, this->func))
  3432. {
  3433. return opnd;
  3434. }
  3435. Assert(this->func->GetJITFunctionBody()->IsWasmFunction());
  3436. if (this->func->GetJITFunctionBody()->IsWasmFunction() && opnd->IsInt64())
  3437. {
  3438. IR::Int64ConstOpnd *intOpnd = IR::Int64ConstOpnd::New(int64ConstantValue, opnd->GetType(), instr->m_func);
  3439. GOPT_TRACE_OPND(opnd, _u("Constant prop %lld (value:%lld)\n"), intOpnd->GetImmediateValue(instr->m_func), int64ConstantValue);
  3440. this->CaptureByteCodeSymUses(instr);
  3441. opnd = instr->ReplaceSrc(opnd, intOpnd);
  3442. }
  3443. return opnd;
  3444. }
  3445. Sym *opndSym = nullptr;
  3446. if (opnd->IsRegOpnd())
  3447. {
  3448. IR::RegOpnd *regOpnd = opnd->AsRegOpnd();
  3449. opndSym = regOpnd->m_sym;
  3450. }
  3451. else if (opnd->IsSymOpnd())
  3452. {
  3453. IR::SymOpnd *symOpnd = opnd->AsSymOpnd();
  3454. opndSym = symOpnd->m_sym;
  3455. }
  3456. if (!opndSym)
  3457. {
  3458. return opnd;
  3459. }
  3460. if (PHASE_OFF(Js::CopyPropPhase, this->func))
  3461. {
  3462. this->SetSymStoreDirect(valueInfo, opndSym);
  3463. return opnd;
  3464. }
  3465. // We should have dealt with field hoist already
  3466. Assert(!instr->TransfersSrcValue() || !opndSym->IsPropertySym() ||
  3467. !this->IsHoistedPropertySym(opndSym->AsPropertySym()));
  3468. StackSym *copySym = CurrentBlockData()->GetCopyPropSym(opndSym, val);
  3469. if (copySym != nullptr)
  3470. {
  3471. // Copy prop.
  3472. return CopyPropReplaceOpnd(instr, opnd, copySym, parentIndirOpnd);
  3473. }
  3474. else
  3475. {
  3476. if (valueInfo->GetSymStore() && instr->m_opcode == Js::OpCode::Ld_A && instr->GetDst()->IsRegOpnd()
  3477. && valueInfo->GetSymStore() == instr->GetDst()->AsRegOpnd()->m_sym)
  3478. {
  3479. // Avoid resetting symStore after fieldHoisting:
  3480. // t1 = LdFld field <- set symStore to fieldHoistSym
  3481. // fieldHoistSym = Ld_A t1 <- we're looking at t1 now, but want to copy-prop fieldHoistSym forward
  3482. return opnd;
  3483. }
  3484. this->SetSymStoreDirect(valueInfo, opndSym);
  3485. }
  3486. return opnd;
  3487. }
  3488. IR::Opnd *
  3489. GlobOpt::CopyPropReplaceOpnd(IR::Instr * instr, IR::Opnd * opnd, StackSym * copySym, IR::IndirOpnd *parentIndirOpnd)
  3490. {
  3491. Assert(
  3492. parentIndirOpnd
  3493. ? opnd == parentIndirOpnd->GetBaseOpnd() || opnd == parentIndirOpnd->GetIndexOpnd()
  3494. : opnd == instr->GetSrc1() || opnd == instr->GetSrc2() || opnd == instr->GetDst() && opnd->IsIndirOpnd());
  3495. Assert(CurrentBlockData()->IsLive(copySym));
  3496. IR::RegOpnd *regOpnd;
  3497. StackSym *newSym = copySym;
  3498. GOPT_TRACE_OPND(opnd, _u("Copy prop s%d\n"), newSym->m_id);
  3499. #if ENABLE_DEBUG_CONFIG_OPTIONS
  3500. //Need to update DumpFieldCopyPropTestTrace for every new opcode that is added for fieldcopyprop
  3501. if(Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FieldCopyPropPhase))
  3502. {
  3503. instr->DumpFieldCopyPropTestTrace();
  3504. }
  3505. #endif
  3506. this->CaptureByteCodeSymUses(instr);
  3507. if (opnd->IsRegOpnd())
  3508. {
  3509. regOpnd = opnd->AsRegOpnd();
  3510. regOpnd->m_sym = newSym;
  3511. regOpnd->SetIsJITOptimizedReg(true);
  3512. // The dead bit on the opnd is specific to the sym it is referencing. Since we replaced the sym, the bit is reset.
  3513. regOpnd->SetIsDead(false);
  3514. if(parentIndirOpnd)
  3515. {
  3516. return regOpnd;
  3517. }
  3518. }
  3519. else
  3520. {
  3521. // If this is an object type specialized field load inside a loop, and it produces a type value which wasn't live
  3522. // before, make sure the type check is left in the loop, because it may be the last type check in the loop protecting
  3523. // other fields which are not hoistable and are lexically upstream in the loop. If the check is not ultimately
  3524. // needed, the dead store pass will remove it.
  3525. if (this->currentBlock->loop != nullptr && opnd->IsSymOpnd() && opnd->AsSymOpnd()->IsPropertySymOpnd())
  3526. {
  3527. IR::PropertySymOpnd* propertySymOpnd = opnd->AsPropertySymOpnd();
  3528. if (CheckIfPropOpEmitsTypeCheck(instr, propertySymOpnd))
  3529. {
  3530. // We only set guarded properties in the dead store pass, so they shouldn't be set here yet. If they were
  3531. // we would need to move them from this operand to the operand which is being copy propagated.
  3532. Assert(propertySymOpnd->GetGuardedPropOps() == nullptr);
  3533. // We're creating a copy of this operand to be reused in the same spot in the flow, so we can copy all
  3534. // flow sensitive fields. However, we will do only a type check here (no property access) and only for
  3535. // the sake of downstream instructions, so the flags pertaining to this property access are irrelevant.
  3536. IR::PropertySymOpnd* checkObjTypeOpnd = CreateOpndForTypeCheckOnly(propertySymOpnd, instr->m_func);
  3537. IR::Instr* checkObjTypeInstr = IR::Instr::New(Js::OpCode::CheckObjType, instr->m_func);
  3538. checkObjTypeInstr->SetSrc1(checkObjTypeOpnd);
  3539. checkObjTypeInstr->SetByteCodeOffset(instr);
  3540. instr->InsertBefore(checkObjTypeInstr);
  3541. // Since we inserted this instruction before the one that is being processed in natural flow, we must process
  3542. // it for object type spec explicitly here.
  3543. FinishOptPropOp(checkObjTypeInstr, checkObjTypeOpnd);
  3544. Assert(!propertySymOpnd->IsTypeChecked());
  3545. checkObjTypeInstr = this->SetTypeCheckBailOut(checkObjTypeOpnd, checkObjTypeInstr, nullptr);
  3546. Assert(checkObjTypeInstr->HasBailOutInfo());
  3547. if (this->currentBlock->loop && !this->IsLoopPrePass())
  3548. {
  3549. // Try hoisting this checkObjType.
  3550. // But since this isn't the current instr being optimized, we need to play tricks with
  3551. // the byteCodeUse fields...
  3552. BVSparse<JitArenaAllocator> *currentBytecodeUses = this->byteCodeUses;
  3553. PropertySym * currentPropertySymUse = this->propertySymUse;
  3554. PropertySym * tempPropertySymUse = NULL;
  3555. this->byteCodeUses = NULL;
  3556. BVSparse<JitArenaAllocator> *tempByteCodeUse = JitAnew(this->tempAlloc, BVSparse<JitArenaAllocator>, this->tempAlloc);
  3557. #if DBG
  3558. BVSparse<JitArenaAllocator> *currentBytecodeUsesBeforeOpt = this->byteCodeUsesBeforeOpt;
  3559. this->byteCodeUsesBeforeOpt = tempByteCodeUse;
  3560. #endif
  3561. this->propertySymUse = NULL;
  3562. GlobOpt::TrackByteCodeSymUsed(checkObjTypeInstr, tempByteCodeUse, &tempPropertySymUse);
  3563. TryHoistInvariant(checkObjTypeInstr, this->currentBlock, NULL, CurrentBlockData()->FindValue(copySym), NULL, true);
  3564. this->byteCodeUses = currentBytecodeUses;
  3565. this->propertySymUse = currentPropertySymUse;
  3566. #if DBG
  3567. this->byteCodeUsesBeforeOpt = currentBytecodeUsesBeforeOpt;
  3568. #endif
  3569. }
  3570. }
  3571. }
  3572. if (opnd->IsSymOpnd() && opnd->GetIsDead())
  3573. {
  3574. // Take the property sym out of the live fields set
  3575. this->EndFieldLifetime(opnd->AsSymOpnd());
  3576. }
  3577. regOpnd = IR::RegOpnd::New(newSym, opnd->GetType(), instr->m_func);
  3578. regOpnd->SetIsJITOptimizedReg(true);
  3579. instr->ReplaceSrc(opnd, regOpnd);
  3580. }
  3581. switch (instr->m_opcode)
  3582. {
  3583. case Js::OpCode::Ld_A:
  3584. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  3585. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  3586. {
  3587. this->InsertByteCodeUses(instr, true);
  3588. instr->m_opcode = Js::OpCode::Nop;
  3589. }
  3590. break;
  3591. case Js::OpCode::LdSlot:
  3592. case Js::OpCode::LdSlotArr:
  3593. if (instr->GetDst()->IsRegOpnd() && instr->GetSrc1()->IsRegOpnd() &&
  3594. instr->GetDst()->AsRegOpnd()->GetStackSym() == instr->GetSrc1()->AsRegOpnd()->GetStackSym())
  3595. {
  3596. this->InsertByteCodeUses(instr, true);
  3597. instr->m_opcode = Js::OpCode::Nop;
  3598. }
  3599. else
  3600. {
  3601. instr->m_opcode = Js::OpCode::Ld_A;
  3602. }
  3603. break;
  3604. case Js::OpCode::StSlotChkUndecl:
  3605. if (instr->GetSrc2()->IsRegOpnd())
  3606. {
  3607. // Src2 here should refer to the same location as the Dst operand, which we need to keep live
  3608. // due to the implicit read for ChkUndecl.
  3609. instr->m_opcode = Js::OpCode::StSlot;
  3610. instr->FreeSrc2();
  3611. return nullptr;
  3612. }
  3613. break;
  3614. case Js::OpCode::LdFld:
  3615. case Js::OpCode::LdFldForTypeOf:
  3616. case Js::OpCode::LdRootFldForTypeOf:
  3617. case Js::OpCode::LdFldForCallApplyTarget:
  3618. case Js::OpCode::LdRootFld:
  3619. case Js::OpCode::LdMethodFld:
  3620. case Js::OpCode::LdRootMethodFld:
  3621. case Js::OpCode::ScopedLdMethodFld:
  3622. instr->m_opcode = Js::OpCode::Ld_A;
  3623. break;
  3624. case Js::OpCode::LdMethodFromFlags:
  3625. // The bailout is checked on the loop top and we don't need to check bailout again in loop.
  3626. instr->m_opcode = Js::OpCode::Ld_A;
  3627. instr->ClearBailOutInfo();
  3628. break;
  3629. case Js::OpCode::TypeofElem:
  3630. instr->m_opcode = Js::OpCode::Typeof;
  3631. break;
  3632. }
  3633. CurrentBlockData()->MarkTempLastUse(instr, regOpnd);
  3634. return regOpnd;
  3635. }
  3636. ValueNumber
  3637. GlobOpt::NewValueNumber()
  3638. {
  3639. ValueNumber valueNumber = this->currentValue++;
  3640. if (valueNumber == 0)
  3641. {
  3642. Js::Throw::OutOfMemory();
  3643. }
  3644. return valueNumber;
  3645. }
  3646. Value *GlobOpt::NewValue(ValueInfo *const valueInfo)
  3647. {
  3648. return NewValue(NewValueNumber(), valueInfo);
  3649. }
  3650. Value *GlobOpt::NewValue(const ValueNumber valueNumber, ValueInfo *const valueInfo)
  3651. {
  3652. Assert(valueInfo);
  3653. return Value::New(alloc, valueNumber, valueInfo);
  3654. }
  3655. Value *GlobOpt::CopyValue(Value const *const value)
  3656. {
  3657. return CopyValue(value, NewValueNumber());
  3658. }
  3659. Value *GlobOpt::CopyValue(Value const *const value, const ValueNumber valueNumber)
  3660. {
  3661. Assert(value);
  3662. return value->Copy(alloc, valueNumber);
  3663. }
  3664. Value *
  3665. GlobOpt::NewGenericValue(const ValueType valueType)
  3666. {
  3667. return NewGenericValue(valueType, static_cast<IR::Opnd *>(nullptr));
  3668. }
  3669. Value *
  3670. GlobOpt::NewGenericValue(const ValueType valueType, IR::Opnd *const opnd)
  3671. {
  3672. // Shouldn't assign a likely-int value to something that is definitely not an int
  3673. Assert(!(valueType.IsLikelyInt() && opnd && opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->m_isNotInt));
  3674. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  3675. Value *val = NewValue(valueInfo);
  3676. TrackNewValueForKills(val);
  3677. CurrentBlockData()->InsertNewValue(val, opnd);
  3678. return val;
  3679. }
  3680. Value *
  3681. GlobOpt::NewGenericValue(const ValueType valueType, Sym *const sym)
  3682. {
  3683. ValueInfo *valueInfo = ValueInfo::New(this->alloc, valueType);
  3684. Value *val = NewValue(valueInfo);
  3685. TrackNewValueForKills(val);
  3686. CurrentBlockData()->SetValue(val, sym);
  3687. return val;
  3688. }
  3689. Value *
  3690. GlobOpt::GetIntConstantValue(const int32 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  3691. {
  3692. Value *value = nullptr;
  3693. Value *const cachedValue = this->intConstantToValueMap->Lookup(intConst, nullptr);
  3694. if(cachedValue)
  3695. {
  3696. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3697. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3698. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3699. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3700. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3701. // Otherwise, we have to create a new value with a new value number.
  3702. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  3703. if (symStore && CurrentBlockData()->IsLive(symStore))
  3704. {
  3705. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3706. int32 symStoreIntConstantValue;
  3707. if (symStoreValue &&
  3708. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  3709. symStoreValue->GetValueInfo()->TryGetIntConstantValue(&symStoreIntConstantValue) &&
  3710. symStoreIntConstantValue == intConst)
  3711. {
  3712. value = symStoreValue;
  3713. }
  3714. }
  3715. }
  3716. if (!value)
  3717. {
  3718. value = NewIntConstantValue(intConst, instr, !Js::TaggedInt::IsOverflow(intConst));
  3719. }
  3720. return CurrentBlockData()->InsertNewValue(value, opnd);
  3721. }
  3722. Value *
  3723. GlobOpt::GetIntConstantValue(const int64 intConst, IR::Instr * instr, IR::Opnd *const opnd)
  3724. {
  3725. Assert(instr->m_func->GetJITFunctionBody()->IsWasmFunction());
  3726. Value *value = nullptr;
  3727. Value *const cachedValue = this->int64ConstantToValueMap->Lookup(intConst, nullptr);
  3728. if (cachedValue)
  3729. {
  3730. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3731. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3732. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3733. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3734. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3735. // Otherwise, we have to create a new value with a new value number.
  3736. Sym *const symStore = cachedValue->GetValueInfo()->GetSymStore();
  3737. if (symStore && this->currentBlock->globOptData.IsLive(symStore))
  3738. {
  3739. Value *const symStoreValue = this->currentBlock->globOptData.FindValue(symStore);
  3740. int64 symStoreIntConstantValue;
  3741. if (symStoreValue &&
  3742. symStoreValue->GetValueNumber() == cachedValue->GetValueNumber() &&
  3743. symStoreValue->GetValueInfo()->TryGetInt64ConstantValue(&symStoreIntConstantValue, false) &&
  3744. symStoreIntConstantValue == intConst)
  3745. {
  3746. value = symStoreValue;
  3747. }
  3748. }
  3749. }
  3750. if (!value)
  3751. {
  3752. value = NewInt64ConstantValue(intConst, instr);
  3753. }
  3754. return this->currentBlock->globOptData.InsertNewValue(value, opnd);
  3755. }
  3756. Value *
  3757. GlobOpt::NewInt64ConstantValue(const int64 intConst, IR::Instr* instr)
  3758. {
  3759. Value * value = NewValue(Int64ConstantValueInfo::New(this->alloc, intConst));
  3760. this->int64ConstantToValueMap->Item(intConst, value);
  3761. if (!value->GetValueInfo()->GetSymStore() &&
  3762. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  3763. {
  3764. StackSym * sym = instr->GetDst()->GetStackSym();
  3765. Assert(sym && !sym->IsTypeSpec());
  3766. this->currentBlock->globOptData.SetValue(value, sym);
  3767. this->currentBlock->globOptData.liveVarSyms->Set(sym->m_id);
  3768. }
  3769. return value;
  3770. }
  3771. Value *
  3772. GlobOpt::NewIntConstantValue(const int32 intConst, IR::Instr * instr, bool isTaggable)
  3773. {
  3774. Value * value = NewValue(IntConstantValueInfo::New(this->alloc, intConst));
  3775. this->intConstantToValueMap->Item(intConst, value);
  3776. if (isTaggable &&
  3777. !PHASE_OFF(Js::HoistConstIntPhase, this->func))
  3778. {
  3779. // When creating a new int constant value, make sure it gets a symstore. If the int const doesn't have a symstore,
  3780. // any downstream instruction using the same int will have to create a new value (object) for the int.
  3781. // This gets in the way of CSE.
  3782. value = HoistConstantLoadAndPropagateValueBackward(Js::TaggedInt::ToVarUnchecked(intConst), instr, value);
  3783. if (!value->GetValueInfo()->GetSymStore() &&
  3784. (instr->m_opcode == Js::OpCode::LdC_A_I4 || instr->m_opcode == Js::OpCode::Ld_I4))
  3785. {
  3786. StackSym * sym = instr->GetDst()->GetStackSym();
  3787. Assert(sym);
  3788. if (sym->IsTypeSpec())
  3789. {
  3790. Assert(sym->IsInt32());
  3791. StackSym * varSym = sym->GetVarEquivSym(instr->m_func);
  3792. CurrentBlockData()->SetValue(value, varSym);
  3793. CurrentBlockData()->liveInt32Syms->Set(varSym->m_id);
  3794. }
  3795. else
  3796. {
  3797. CurrentBlockData()->SetValue(value, sym);
  3798. CurrentBlockData()->liveVarSyms->Set(sym->m_id);
  3799. }
  3800. }
  3801. }
  3802. return value;
  3803. }
  3804. ValueInfo *
  3805. GlobOpt::NewIntRangeValueInfo(const int32 min, const int32 max, const bool wasNegativeZeroPreventedByBailout)
  3806. {
  3807. return ValueInfo::NewIntRangeValueInfo(this->alloc, min, max, wasNegativeZeroPreventedByBailout);
  3808. }
  3809. ValueInfo *GlobOpt::NewIntRangeValueInfo(
  3810. const ValueInfo *const originalValueInfo,
  3811. const int32 min,
  3812. const int32 max) const
  3813. {
  3814. Assert(originalValueInfo);
  3815. ValueInfo *valueInfo;
  3816. if(min == max)
  3817. {
  3818. // Since int constant values are const-propped, negative zero tracking does not track them, and so it's okay to ignore
  3819. // 'wasNegativeZeroPreventedByBailout'
  3820. valueInfo = IntConstantValueInfo::New(alloc, min);
  3821. }
  3822. else
  3823. {
  3824. valueInfo =
  3825. IntRangeValueInfo::New(
  3826. alloc,
  3827. min,
  3828. max,
  3829. min <= 0 && max >= 0 && originalValueInfo->WasNegativeZeroPreventedByBailout());
  3830. }
  3831. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  3832. return valueInfo;
  3833. }
  3834. Value *
  3835. GlobOpt::NewIntRangeValue(
  3836. const int32 min,
  3837. const int32 max,
  3838. const bool wasNegativeZeroPreventedByBailout,
  3839. IR::Opnd *const opnd)
  3840. {
  3841. ValueInfo *valueInfo = this->NewIntRangeValueInfo(min, max, wasNegativeZeroPreventedByBailout);
  3842. Value *val = NewValue(valueInfo);
  3843. if (opnd)
  3844. {
  3845. GOPT_TRACE_OPND(opnd, _u("Range %d (0x%X) to %d (0x%X)\n"), min, min, max, max);
  3846. }
  3847. CurrentBlockData()->InsertNewValue(val, opnd);
  3848. return val;
  3849. }
  3850. IntBoundedValueInfo *GlobOpt::NewIntBoundedValueInfo(
  3851. const ValueInfo *const originalValueInfo,
  3852. const IntBounds *const bounds) const
  3853. {
  3854. Assert(originalValueInfo);
  3855. bounds->Verify();
  3856. IntBoundedValueInfo *const valueInfo =
  3857. IntBoundedValueInfo::New(
  3858. originalValueInfo->Type(),
  3859. bounds,
  3860. (
  3861. bounds->ConstantLowerBound() <= 0 &&
  3862. bounds->ConstantUpperBound() >= 0 &&
  3863. originalValueInfo->WasNegativeZeroPreventedByBailout()
  3864. ),
  3865. alloc);
  3866. valueInfo->SetSymStore(originalValueInfo->GetSymStore());
  3867. return valueInfo;
  3868. }
  3869. Value *GlobOpt::NewIntBoundedValue(
  3870. const ValueType valueType,
  3871. const IntBounds *const bounds,
  3872. const bool wasNegativeZeroPreventedByBailout,
  3873. IR::Opnd *const opnd)
  3874. {
  3875. Value *const value = NewValue(IntBoundedValueInfo::New(valueType, bounds, wasNegativeZeroPreventedByBailout, alloc));
  3876. CurrentBlockData()->InsertNewValue(value, opnd);
  3877. return value;
  3878. }
  3879. Value *
  3880. GlobOpt::NewFloatConstantValue(const FloatConstType floatValue, IR::Opnd *const opnd)
  3881. {
  3882. FloatConstantValueInfo *valueInfo = FloatConstantValueInfo::New(this->alloc, floatValue);
  3883. Value *val = NewValue(valueInfo);
  3884. CurrentBlockData()->InsertNewValue(val, opnd);
  3885. return val;
  3886. }
  3887. Value *
  3888. GlobOpt::GetVarConstantValue(IR::AddrOpnd *addrOpnd)
  3889. {
  3890. bool isVar = addrOpnd->IsVar();
  3891. bool isString = isVar && addrOpnd->m_localAddress && JITJavascriptString::Is(addrOpnd->m_localAddress);
  3892. Value *val = nullptr;
  3893. Value *cachedValue = nullptr;
  3894. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  3895. {
  3896. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  3897. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  3898. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  3899. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  3900. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  3901. // Otherwise, we have to create a new value with a new value number.
  3902. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  3903. if(symStore && CurrentBlockData()->IsLive(symStore))
  3904. {
  3905. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3906. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  3907. {
  3908. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  3909. if(symStoreValueInfo->IsVarConstant() && symStoreValueInfo->AsVarConstant()->VarValue() == addrOpnd->m_address)
  3910. {
  3911. val = symStoreValue;
  3912. }
  3913. }
  3914. }
  3915. }
  3916. else if (isString)
  3917. {
  3918. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  3919. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  3920. if (this->stringConstantToValueMap->TryGetValue(internalString, &cachedValue))
  3921. {
  3922. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  3923. if (symStore && CurrentBlockData()->IsLive(symStore))
  3924. {
  3925. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  3926. if (symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  3927. {
  3928. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  3929. if (symStoreValueInfo->IsVarConstant())
  3930. {
  3931. JITJavascriptString * cachedString = JITJavascriptString::FromVar(symStoreValue->GetValueInfo()->AsVarConstant()->VarValue(true));
  3932. Js::InternalString cachedInternalString(cachedString->GetString(), cachedString->GetLength());
  3933. if (Js::InternalStringComparer::Equals(internalString, cachedInternalString))
  3934. {
  3935. val = symStoreValue;
  3936. }
  3937. }
  3938. }
  3939. }
  3940. }
  3941. }
  3942. if(!val)
  3943. {
  3944. val = NewVarConstantValue(addrOpnd, isString);
  3945. }
  3946. addrOpnd->SetValueType(val->GetValueInfo()->Type());
  3947. return val;
  3948. }
  3949. Value *
  3950. GlobOpt::NewVarConstantValue(IR::AddrOpnd *addrOpnd, bool isString)
  3951. {
  3952. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, addrOpnd->m_address, addrOpnd->GetValueType(), false, addrOpnd->m_localAddress);
  3953. Value * value = NewValue(valueInfo);
  3954. this->addrConstantToValueMap->Item(addrOpnd->m_address, value);
  3955. if (isString)
  3956. {
  3957. JITJavascriptString* jsString = JITJavascriptString::FromVar(addrOpnd->m_localAddress);
  3958. Js::InternalString internalString(jsString->GetString(), jsString->GetLength());
  3959. this->stringConstantToValueMap->Item(internalString, value);
  3960. }
  3961. return value;
  3962. }
  3963. Value *
  3964. GlobOpt::HoistConstantLoadAndPropagateValueBackward(Js::Var varConst, IR::Instr * origInstr, Value * value)
  3965. {
  3966. if (this->IsLoopPrePass() ||
  3967. ((this->currentBlock == this->func->m_fg->blockList) &&
  3968. origInstr->TransfersSrcValue()))
  3969. {
  3970. return value;
  3971. }
  3972. // Only hoisting taggable int const loads for now. Could be extended to other constants (floats, strings, addr opnds) if we see some benefit.
  3973. Assert(Js::TaggedInt::Is(varConst));
  3974. // Insert a load of the constant at the top of the function
  3975. StackSym * dstSym = StackSym::New(this->func);
  3976. IR::RegOpnd * constRegOpnd = IR::RegOpnd::New(dstSym, TyVar, this->func);
  3977. IR::Instr * loadInstr = IR::Instr::NewConstantLoad(constRegOpnd, (intptr_t)varConst, ValueType::GetInt(true), this->func);
  3978. this->func->m_fg->blockList->GetFirstInstr()->InsertAfter(loadInstr);
  3979. // Type-spec the load (Support for floats needs to be added when we start hoisting float constants).
  3980. bool typeSpecedToInt = false;
  3981. if (Js::TaggedInt::Is(varConst) && !IsTypeSpecPhaseOff(this->func))
  3982. {
  3983. typeSpecedToInt = true;
  3984. loadInstr->m_opcode = Js::OpCode::Ld_I4;
  3985. ToInt32Dst(loadInstr, loadInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  3986. loadInstr->GetDst()->GetStackSym()->SetIsConst();
  3987. }
  3988. else
  3989. {
  3990. CurrentBlockData()->liveVarSyms->Set(dstSym->m_id);
  3991. }
  3992. // Add the value (object) to the current block's symToValueMap and propagate the value backward to all relevant blocks so it is available on merges.
  3993. value = CurrentBlockData()->InsertNewValue(value, constRegOpnd);
  3994. BVSparse<JitArenaAllocator>* GlobOptBlockData::*bv;
  3995. bv = typeSpecedToInt ? &GlobOptBlockData::liveInt32Syms : &GlobOptBlockData::liveVarSyms; // Will need to be expanded when we start hoisting float constants.
  3996. if (this->currentBlock != this->func->m_fg->blockList)
  3997. {
  3998. for (InvariantBlockBackwardIterator it(this, this->currentBlock, this->func->m_fg->blockList, nullptr);
  3999. it.IsValid();
  4000. it.MoveNext())
  4001. {
  4002. BasicBlock * block = it.Block();
  4003. (block->globOptData.*bv)->Set(dstSym->m_id);
  4004. Assert(!block->globOptData.FindValue(dstSym));
  4005. Value *const valueCopy = CopyValue(value, value->GetValueNumber());
  4006. block->globOptData.SetValue(valueCopy, dstSym);
  4007. }
  4008. }
  4009. return value;
  4010. }
  4011. Value *
  4012. GlobOpt::NewFixedFunctionValue(Js::JavascriptFunction *function, IR::AddrOpnd *addrOpnd)
  4013. {
  4014. Assert(function != nullptr);
  4015. Value *val = nullptr;
  4016. Value *cachedValue = nullptr;
  4017. if(this->addrConstantToValueMap->TryGetValue(addrOpnd->m_address, &cachedValue))
  4018. {
  4019. // The cached value could be from a different block since this is a global (as opposed to a per-block) cache. Since
  4020. // values are cloned for each block, we can't use the same value object. We also can't have two values with the same
  4021. // number in one block, so we can't simply copy the cached value either. And finally, there is no deterministic and fast
  4022. // way to determine if a value with the same value number exists for this block. So the best we can do with a global
  4023. // cache is to check the sym-store's value in the current block to see if it has a value with the same number.
  4024. // Otherwise, we have to create a new value with a new value number.
  4025. Sym *symStore = cachedValue->GetValueInfo()->GetSymStore();
  4026. if(symStore && CurrentBlockData()->IsLive(symStore))
  4027. {
  4028. Value *const symStoreValue = CurrentBlockData()->FindValue(symStore);
  4029. if(symStoreValue && symStoreValue->GetValueNumber() == cachedValue->GetValueNumber())
  4030. {
  4031. ValueInfo *const symStoreValueInfo = symStoreValue->GetValueInfo();
  4032. if(symStoreValueInfo->IsVarConstant())
  4033. {
  4034. VarConstantValueInfo *const symStoreVarConstantValueInfo = symStoreValueInfo->AsVarConstant();
  4035. if(symStoreVarConstantValueInfo->VarValue() == addrOpnd->m_address &&
  4036. symStoreVarConstantValueInfo->IsFunction())
  4037. {
  4038. val = symStoreValue;
  4039. }
  4040. }
  4041. }
  4042. }
  4043. }
  4044. if(!val)
  4045. {
  4046. VarConstantValueInfo *valueInfo = VarConstantValueInfo::New(this->alloc, function, addrOpnd->GetValueType(), true, addrOpnd->m_localAddress);
  4047. val = NewValue(valueInfo);
  4048. this->addrConstantToValueMap->AddNew(addrOpnd->m_address, val);
  4049. }
  4050. CurrentBlockData()->InsertNewValue(val, addrOpnd);
  4051. return val;
  4052. }
  4053. StackSym *GlobOpt::GetTaggedIntConstantStackSym(const int32 intConstantValue) const
  4054. {
  4055. Assert(!Js::TaggedInt::IsOverflow(intConstantValue));
  4056. return intConstantToStackSymMap->Lookup(intConstantValue, nullptr);
  4057. }
  4058. StackSym *GlobOpt::GetOrCreateTaggedIntConstantStackSym(const int32 intConstantValue) const
  4059. {
  4060. StackSym *stackSym = GetTaggedIntConstantStackSym(intConstantValue);
  4061. if(stackSym)
  4062. {
  4063. return stackSym;
  4064. }
  4065. stackSym = StackSym::New(TyVar,func);
  4066. intConstantToStackSymMap->Add(intConstantValue, stackSym);
  4067. return stackSym;
  4068. }
  4069. Sym *
  4070. GlobOpt::SetSymStore(ValueInfo *valueInfo, Sym *sym)
  4071. {
  4072. if (sym->IsStackSym())
  4073. {
  4074. StackSym *stackSym = sym->AsStackSym();
  4075. if (stackSym->IsTypeSpec())
  4076. {
  4077. stackSym = stackSym->GetVarEquivSym(this->func);
  4078. sym = stackSym;
  4079. }
  4080. }
  4081. if (valueInfo->GetSymStore() == nullptr || valueInfo->GetSymStore()->IsPropertySym())
  4082. {
  4083. SetSymStoreDirect(valueInfo, sym);
  4084. }
  4085. return sym;
  4086. }
  4087. void
  4088. GlobOpt::SetSymStoreDirect(ValueInfo * valueInfo, Sym * sym)
  4089. {
  4090. Sym * prevSymStore = valueInfo->GetSymStore();
  4091. if (prevSymStore && prevSymStore->IsStackSym() &&
  4092. prevSymStore->AsStackSym()->HasByteCodeRegSlot())
  4093. {
  4094. CurrentBlockData()->SetChangedSym(prevSymStore->m_id);
  4095. }
  4096. valueInfo->SetSymStore(sym);
  4097. }
  4098. // Figure out the Value of this dst.
  4099. Value *
  4100. GlobOpt::ValueNumberDst(IR::Instr **pInstr, Value *src1Val, Value *src2Val)
  4101. {
  4102. IR::Instr *&instr = *pInstr;
  4103. IR::Opnd *dst = instr->GetDst();
  4104. Value *dstVal = nullptr;
  4105. Sym *sym;
  4106. if (instr->CallsSetter())
  4107. {
  4108. return nullptr;
  4109. }
  4110. if (dst == nullptr)
  4111. {
  4112. return nullptr;
  4113. }
  4114. switch (dst->GetKind())
  4115. {
  4116. case IR::OpndKindSym:
  4117. sym = dst->AsSymOpnd()->m_sym;
  4118. break;
  4119. case IR::OpndKindReg:
  4120. sym = dst->AsRegOpnd()->m_sym;
  4121. if (OpCodeAttr::TempNumberProducing(instr->m_opcode))
  4122. {
  4123. CurrentBlockData()->isTempSrc->Set(sym->m_id);
  4124. }
  4125. else if (OpCodeAttr::TempNumberTransfer(instr->m_opcode))
  4126. {
  4127. IR::Opnd *src1 = instr->GetSrc1();
  4128. if (src1->IsRegOpnd() && CurrentBlockData()->isTempSrc->Test(src1->AsRegOpnd()->m_sym->m_id))
  4129. {
  4130. StackSym *src1Sym = src1->AsRegOpnd()->m_sym;
  4131. // isTempSrc is used for marking isTempLastUse, which is used to generate AddLeftDead()
  4132. // calls instead of the normal Add helpers. It tells the runtime that concats can use string
  4133. // builders.
  4134. // We need to be careful in the case where src1 points to a string builder and is getting aliased.
  4135. // Clear the bit on src and dst of the transfer instr in this case, unless we can prove src1
  4136. // isn't pointing at a string builder, like if it is single def and the def instr is not an Add,
  4137. // but TempProducing.
  4138. if (src1Sym->IsSingleDef() && src1Sym->m_instrDef->m_opcode != Js::OpCode::Add_A
  4139. && OpCodeAttr::TempNumberProducing(src1Sym->m_instrDef->m_opcode))
  4140. {
  4141. CurrentBlockData()->isTempSrc->Set(sym->m_id);
  4142. }
  4143. else
  4144. {
  4145. CurrentBlockData()->isTempSrc->Clear(src1->AsRegOpnd()->m_sym->m_id);
  4146. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4147. }
  4148. }
  4149. else
  4150. {
  4151. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4152. }
  4153. }
  4154. else
  4155. {
  4156. CurrentBlockData()->isTempSrc->Clear(sym->m_id);
  4157. }
  4158. break;
  4159. case IR::OpndKindIndir:
  4160. return nullptr;
  4161. default:
  4162. return nullptr;
  4163. }
  4164. int32 min1, max1, min2, max2, newMin, newMax;
  4165. ValueInfo *src1ValueInfo = (src1Val ? src1Val->GetValueInfo() : nullptr);
  4166. ValueInfo *src2ValueInfo = (src2Val ? src2Val->GetValueInfo() : nullptr);
  4167. switch (instr->m_opcode)
  4168. {
  4169. case Js::OpCode::Conv_PrimStr:
  4170. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  4171. "Creator of this instruction should have set the type");
  4172. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsPrimitive())
  4173. {
  4174. break;
  4175. }
  4176. instr->m_opcode = Js::OpCode::Conv_Str;
  4177. // fall-through
  4178. case Js::OpCode::Conv_Str:
  4179. // This opcode is commented out since we don't track regex information in GlobOpt now.
  4180. //case Js::OpCode::Coerce_Regex:
  4181. case Js::OpCode::Coerce_Str:
  4182. AssertMsg(instr->GetDst()->GetValueType().IsString(),
  4183. "Creator of this instruction should have set the type");
  4184. // fall-through
  4185. case Js::OpCode::Coerce_StrOrRegex:
  4186. // We don't set the ValueType of src1 for Coerce_StrOrRegex, hence skip the ASSERT
  4187. if (this->IsLoopPrePass() || src1ValueInfo == nullptr || !src1ValueInfo->IsString())
  4188. {
  4189. break;
  4190. }
  4191. instr->m_opcode = Js::OpCode::Ld_A;
  4192. // fall-through
  4193. case Js::OpCode::BytecodeArgOutCapture:
  4194. case Js::OpCode::InitConst:
  4195. case Js::OpCode::LdAsmJsFunc:
  4196. case Js::OpCode::Ld_A:
  4197. case Js::OpCode::Ld_I4:
  4198. // Propagate sym attributes across the reg copy.
  4199. if (!this->IsLoopPrePass() && instr->GetSrc1()->IsRegOpnd())
  4200. {
  4201. if (dst->AsRegOpnd()->m_sym->IsSingleDef())
  4202. {
  4203. dst->AsRegOpnd()->m_sym->CopySymAttrs(instr->GetSrc1()->AsRegOpnd()->m_sym);
  4204. }
  4205. }
  4206. if (instr->IsProfiledInstr())
  4207. {
  4208. const ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  4209. if(!(
  4210. profiledValueType.IsLikelyInt() &&
  4211. (
  4212. (dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) ||
  4213. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  4214. )
  4215. ))
  4216. {
  4217. if(!src1ValueInfo)
  4218. {
  4219. dstVal = this->NewGenericValue(profiledValueType, dst);
  4220. }
  4221. else if(src1ValueInfo->IsUninitialized())
  4222. {
  4223. if(IsLoopPrePass())
  4224. {
  4225. dstVal = this->NewGenericValue(profiledValueType, dst);
  4226. }
  4227. else
  4228. {
  4229. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4230. // can improve the original value type.
  4231. src1ValueInfo->Type() = profiledValueType;
  4232. instr->GetSrc1()->SetValueType(profiledValueType);
  4233. }
  4234. }
  4235. }
  4236. }
  4237. if (dstVal == nullptr)
  4238. {
  4239. // Ld_A is just transferring the value
  4240. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4241. }
  4242. break;
  4243. case Js::OpCode::ExtendArg_A:
  4244. {
  4245. // SIMD_JS
  4246. // We avoid transforming EAs to Lds to keep the IR shape consistent and avoid CSEing of EAs.
  4247. // CSEOptimize only assigns a Value to the EA dst, and doesn't turn it to a Ld. If this happened, we shouldn't assign a new Value here.
  4248. if (DoCSE())
  4249. {
  4250. IR::Opnd * currDst = instr->GetDst();
  4251. Value * currDstVal = CurrentBlockData()->FindValue(currDst->GetStackSym());
  4252. if (currDstVal != nullptr)
  4253. {
  4254. return currDstVal;
  4255. }
  4256. }
  4257. break;
  4258. }
  4259. case Js::OpCode::CheckFixedFld:
  4260. AssertMsg(false, "CheckFixedFld doesn't have a dst, so we should never get here");
  4261. break;
  4262. case Js::OpCode::LdSlot:
  4263. case Js::OpCode::LdSlotArr:
  4264. case Js::OpCode::LdFld:
  4265. case Js::OpCode::LdFldForTypeOf:
  4266. case Js::OpCode::LdFldForCallApplyTarget:
  4267. // Do not transfer value type on ldFldForTypeOf to prevent copy-prop to LdRootFld in case the field doesn't exist since LdRootFldForTypeOf does not throw
  4268. //case Js::OpCode::LdRootFldForTypeOf:
  4269. case Js::OpCode::LdRootFld:
  4270. case Js::OpCode::LdMethodFld:
  4271. case Js::OpCode::LdRootMethodFld:
  4272. case Js::OpCode::ScopedLdMethodFld:
  4273. case Js::OpCode::LdMethodFromFlags:
  4274. if (instr->IsProfiledInstr())
  4275. {
  4276. ValueType profiledValueType(instr->AsProfiledInstr()->u.FldInfo().valueType);
  4277. if(!(profiledValueType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  4278. {
  4279. if(!src1ValueInfo)
  4280. {
  4281. dstVal = this->NewGenericValue(profiledValueType, dst);
  4282. }
  4283. else if(src1ValueInfo->IsUninitialized())
  4284. {
  4285. if(IsLoopPrePass() && (!dst->IsRegOpnd() || !dst->AsRegOpnd()->m_sym->IsSingleDef() || DoFieldHoisting()))
  4286. {
  4287. dstVal = this->NewGenericValue(profiledValueType, dst);
  4288. }
  4289. else
  4290. {
  4291. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4292. // can improve the original value type.
  4293. src1ValueInfo->Type() = profiledValueType;
  4294. instr->GetSrc1()->SetValueType(profiledValueType);
  4295. }
  4296. }
  4297. }
  4298. }
  4299. if (dstVal == nullptr)
  4300. {
  4301. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4302. }
  4303. if(!this->IsLoopPrePass())
  4304. {
  4305. // We cannot transfer value if the field hasn't been copy prop'd because we don't generate
  4306. // an implicit call bailout between those values if we don't have "live fields" unless, we are hoisting the field.
  4307. PropertySym *propertySym = instr->GetSrc1()->AsSymOpnd()->m_sym->AsPropertySym();
  4308. StackSym * fieldHoistSym;
  4309. Loop * loop = this->FindFieldHoistStackSym(this->currentBlock->loop, propertySym->m_id, &fieldHoistSym, instr);
  4310. ValueInfo *dstValueInfo = (dstVal ? dstVal->GetValueInfo() : nullptr);
  4311. // Update symStore for field hoisting
  4312. if (loop != nullptr && (dstValueInfo != nullptr))
  4313. {
  4314. this->SetSymStoreDirect(dstValueInfo, fieldHoistSym);
  4315. }
  4316. // Update symStore if it isn't a stackSym
  4317. if (dstVal && (!dstValueInfo->GetSymStore() || !dstValueInfo->GetSymStore()->IsStackSym()))
  4318. {
  4319. Assert(dst->IsRegOpnd());
  4320. this->SetSymStoreDirect(dstValueInfo, dst->AsRegOpnd()->m_sym);
  4321. }
  4322. if (src1Val != dstVal)
  4323. {
  4324. CurrentBlockData()->SetValue(dstVal, instr->GetSrc1());
  4325. }
  4326. }
  4327. break;
  4328. case Js::OpCode::LdC_A_R8:
  4329. case Js::OpCode::LdC_A_I4:
  4330. case Js::OpCode::ArgIn_A:
  4331. dstVal = src1Val;
  4332. break;
  4333. case Js::OpCode::LdStr:
  4334. if (src1Val == nullptr)
  4335. {
  4336. src1Val = NewGenericValue(ValueType::String, dst);
  4337. }
  4338. dstVal = src1Val;
  4339. break;
  4340. // LdElemUndef only assign undef if the field doesn't exist.
  4341. // So we don't actually know what the value is, so we can't really copy prop it.
  4342. //case Js::OpCode::LdElemUndef:
  4343. case Js::OpCode::StSlot:
  4344. case Js::OpCode::StSlotChkUndecl:
  4345. case Js::OpCode::StFld:
  4346. case Js::OpCode::StRootFld:
  4347. case Js::OpCode::StFldStrict:
  4348. case Js::OpCode::StRootFldStrict:
  4349. if (DoFieldCopyProp())
  4350. {
  4351. if (src1Val == nullptr)
  4352. {
  4353. // src1 may have no value if it's not a valid var, e.g., NULL for let/const initialization.
  4354. // Consider creating generic values for such things.
  4355. return nullptr;
  4356. }
  4357. AssertMsg(!src2Val, "Bad src Values...");
  4358. Assert(sym->IsPropertySym());
  4359. SymID symId = sym->m_id;
  4360. Assert(instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl || !CurrentBlockData()->liveFields->Test(symId));
  4361. if (IsHoistablePropertySym(symId))
  4362. {
  4363. // We have changed the value of a hoistable field, load afterwards shouldn't get hoisted,
  4364. // but we will still copy prop the pre-assign sym to it if we have a live value.
  4365. Assert((instr->m_opcode == Js::OpCode::StSlot || instr->m_opcode == Js::OpCode::StSlotChkUndecl) && CurrentBlockData()->liveFields->Test(symId));
  4366. CurrentBlockData()->hoistableFields->Clear(symId);
  4367. }
  4368. CurrentBlockData()->liveFields->Set(symId);
  4369. if (!this->IsLoopPrePass() && dst->GetIsDead())
  4370. {
  4371. // Take the property sym out of the live fields set (with special handling for loops).
  4372. this->EndFieldLifetime(dst->AsSymOpnd());
  4373. }
  4374. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4375. }
  4376. else
  4377. {
  4378. return nullptr;
  4379. }
  4380. break;
  4381. case Js::OpCode::Conv_Num:
  4382. if(src1ValueInfo->IsNumber())
  4383. {
  4384. dstVal = ValueNumberTransferDst(instr, src1Val);
  4385. }
  4386. else
  4387. {
  4388. return NewGenericValue(src1ValueInfo->Type().ToDefiniteAnyNumber(), dst);
  4389. }
  4390. break;
  4391. case Js::OpCode::Not_A:
  4392. {
  4393. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  4394. {
  4395. min1 = INT32_MIN;
  4396. max1 = INT32_MAX;
  4397. }
  4398. this->PropagateIntRangeForNot(min1, max1, &newMin, &newMax);
  4399. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  4400. }
  4401. case Js::OpCode::Xor_A:
  4402. case Js::OpCode::Or_A:
  4403. case Js::OpCode::And_A:
  4404. case Js::OpCode::Shl_A:
  4405. case Js::OpCode::Shr_A:
  4406. case Js::OpCode::ShrU_A:
  4407. {
  4408. if (!src1Val || !src1ValueInfo->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec()))
  4409. {
  4410. min1 = INT32_MIN;
  4411. max1 = INT32_MAX;
  4412. }
  4413. if (!src2Val || !src2ValueInfo->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec()))
  4414. {
  4415. min2 = INT32_MIN;
  4416. max2 = INT32_MAX;
  4417. }
  4418. if (instr->m_opcode == Js::OpCode::ShrU_A &&
  4419. min1 < 0 &&
  4420. IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  4421. {
  4422. // Src1 may be too large to represent as a signed int32, and src2 may be zero.
  4423. // Since the result can therefore be too large to represent as a signed int32,
  4424. // include Number in the value type.
  4425. return CreateDstUntransferredValue(
  4426. ValueType::AnyNumber.SetCanBeTaggedValue(true), instr, src1Val, src2Val);
  4427. }
  4428. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  4429. return CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  4430. }
  4431. case Js::OpCode::Incr_A:
  4432. case Js::OpCode::Decr_A:
  4433. {
  4434. ValueType valueType;
  4435. if(src1Val)
  4436. {
  4437. valueType = src1Val->GetValueInfo()->Type().ToDefiniteAnyNumber();
  4438. }
  4439. else
  4440. {
  4441. valueType = ValueType::Number;
  4442. }
  4443. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4444. }
  4445. case Js::OpCode::Add_A:
  4446. {
  4447. ValueType valueType;
  4448. if (src1Val && src1ValueInfo->IsLikelyNumber() && src2Val && src2ValueInfo->IsLikelyNumber())
  4449. {
  4450. if(src1ValueInfo->IsLikelyInt() && src2ValueInfo->IsLikelyInt())
  4451. {
  4452. // When doing aggressiveIntType, just assume the result is likely going to be int
  4453. // if both input is int.
  4454. const bool isLikelyTagged = src1ValueInfo->IsLikelyTaggedInt() && src2ValueInfo->IsLikelyTaggedInt();
  4455. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4456. {
  4457. // If both of them are numbers then we can definitely say that the result is a number.
  4458. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  4459. }
  4460. else
  4461. {
  4462. // This is only likely going to be int but can be a string as well.
  4463. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  4464. }
  4465. }
  4466. else
  4467. {
  4468. // We can only be certain of any thing if both of them are numbers.
  4469. // Otherwise, the result could be string.
  4470. if (src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4471. {
  4472. if (src1ValueInfo->IsFloat() || src2ValueInfo->IsFloat())
  4473. {
  4474. // If one of them is a float, the result probably is a float instead of just int
  4475. // but should always be a number.
  4476. valueType = ValueType::Float;
  4477. }
  4478. else
  4479. {
  4480. // Could be int, could be number
  4481. valueType = ValueType::Number;
  4482. }
  4483. }
  4484. else if (src1ValueInfo->IsLikelyFloat() || src2ValueInfo->IsLikelyFloat())
  4485. {
  4486. // Result is likely a float (but can be anything)
  4487. valueType = ValueType::Float.ToLikely();
  4488. }
  4489. else
  4490. {
  4491. // Otherwise it is a likely int or float (but can be anything)
  4492. valueType = ValueType::Number.ToLikely();
  4493. }
  4494. }
  4495. }
  4496. else if((src1Val && src1ValueInfo->IsString()) || (src2Val && src2ValueInfo->IsString()))
  4497. {
  4498. // String + anything should always result in a string
  4499. valueType = ValueType::String;
  4500. }
  4501. else if((src1Val && src1ValueInfo->IsNotString() && src1ValueInfo->IsPrimitive())
  4502. && (src2Val && src2ValueInfo->IsNotString() && src2ValueInfo->IsPrimitive()))
  4503. {
  4504. // If src1 and src2 are not strings and primitive, add should yield a number.
  4505. valueType = ValueType::Number;
  4506. }
  4507. else if((src1Val && src1ValueInfo->IsLikelyString()) || (src2Val && src2ValueInfo->IsLikelyString()))
  4508. {
  4509. // likelystring + anything should always result in a likelystring
  4510. valueType = ValueType::String.ToLikely();
  4511. }
  4512. else
  4513. {
  4514. // Number or string. Could make the value a merge of Number and String, but Uninitialized is more useful at the moment.
  4515. Assert(valueType.IsUninitialized());
  4516. }
  4517. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4518. }
  4519. case Js::OpCode::Div_A:
  4520. {
  4521. ValueType divValueType = GetDivValueType(instr, src1Val, src2Val, false);
  4522. if (divValueType.IsLikelyInt() || divValueType.IsFloat())
  4523. {
  4524. return CreateDstUntransferredValue(divValueType, instr, src1Val, src2Val);
  4525. }
  4526. }
  4527. // fall-through
  4528. case Js::OpCode::Sub_A:
  4529. case Js::OpCode::Mul_A:
  4530. case Js::OpCode::Rem_A:
  4531. {
  4532. ValueType valueType;
  4533. if( src1Val &&
  4534. src1ValueInfo->IsLikelyInt() &&
  4535. src2Val &&
  4536. src2ValueInfo->IsLikelyInt() &&
  4537. instr->m_opcode != Js::OpCode::Div_A)
  4538. {
  4539. const bool isLikelyTagged =
  4540. src1ValueInfo->IsLikelyTaggedInt() && (src2ValueInfo->IsLikelyTaggedInt() || instr->m_opcode == Js::OpCode::Rem_A);
  4541. if(src1ValueInfo->IsNumber() && src2ValueInfo->IsNumber())
  4542. {
  4543. valueType = ValueType::GetNumberAndLikelyInt(isLikelyTagged);
  4544. }
  4545. else
  4546. {
  4547. valueType = ValueType::GetInt(isLikelyTagged).ToLikely();
  4548. }
  4549. }
  4550. else if ((src1Val && src1ValueInfo->IsLikelyFloat()) || (src2Val && src2ValueInfo->IsLikelyFloat()))
  4551. {
  4552. // This should ideally be NewNumberAndLikelyFloatValue since we know the result is a number but not sure if it will
  4553. // be a float value. However, that Number/LikelyFloat value type doesn't exist currently and all the necessary
  4554. // checks are done for float values (tagged int checks, etc.) so it's sufficient to just create a float value here.
  4555. valueType = ValueType::Float;
  4556. }
  4557. else
  4558. {
  4559. valueType = ValueType::Number;
  4560. }
  4561. return CreateDstUntransferredValue(valueType, instr, src1Val, src2Val);
  4562. }
  4563. case Js::OpCode::CallI:
  4564. Assert(dst->IsRegOpnd());
  4565. return NewGenericValue(dst->AsRegOpnd()->GetValueType(), dst);
  4566. case Js::OpCode::LdElemI_A:
  4567. {
  4568. dstVal = ValueNumberLdElemDst(pInstr, src1Val);
  4569. const ValueType baseValueType(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType());
  4570. if( (
  4571. baseValueType.IsLikelyNativeArray() ||
  4572. #ifdef _M_IX86
  4573. (
  4574. !AutoSystemInfo::Data.SSE2Available() &&
  4575. baseValueType.IsLikelyObject() &&
  4576. (
  4577. baseValueType.GetObjectType() == ObjectType::Float32Array ||
  4578. baseValueType.GetObjectType() == ObjectType::Float64Array
  4579. )
  4580. )
  4581. #else
  4582. false
  4583. #endif
  4584. ) &&
  4585. instr->GetDst()->IsVar() &&
  4586. instr->HasBailOutInfo())
  4587. {
  4588. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4589. // path. Note that the removed bailouts should not be necessary for correctness.
  4590. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4591. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4592. {
  4593. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4594. }
  4595. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4596. {
  4597. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4598. }
  4599. if(bailOutKind)
  4600. {
  4601. instr->SetBailOutKind(bailOutKind);
  4602. }
  4603. else
  4604. {
  4605. instr->ClearBailOutInfo();
  4606. }
  4607. }
  4608. return dstVal;
  4609. }
  4610. case Js::OpCode::LdMethodElem:
  4611. // Not worth profiling this, just assume it's likely object (should be likely function but ValueType does not track
  4612. // functions currently, so using ObjectType::Object instead)
  4613. dstVal = NewGenericValue(ValueType::GetObject(ObjectType::Object).ToLikely(), dst);
  4614. if(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyNativeArray() && instr->HasBailOutInfo())
  4615. {
  4616. // The lowerer is not going to generate a fast path for this case. Remove any bailouts that require the fast
  4617. // path. Note that the removed bailouts should not be necessary for correctness.
  4618. IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  4619. if(bailOutKind & IR::BailOutOnArrayAccessHelperCall)
  4620. {
  4621. bailOutKind -= IR::BailOutOnArrayAccessHelperCall;
  4622. }
  4623. if(bailOutKind == IR::BailOutOnImplicitCallsPreOp)
  4624. {
  4625. bailOutKind -= IR::BailOutOnImplicitCallsPreOp;
  4626. }
  4627. if(bailOutKind)
  4628. {
  4629. instr->SetBailOutKind(bailOutKind);
  4630. }
  4631. else
  4632. {
  4633. instr->ClearBailOutInfo();
  4634. }
  4635. }
  4636. return dstVal;
  4637. case Js::OpCode::StElemI_A:
  4638. case Js::OpCode::StElemI_A_Strict:
  4639. dstVal = this->ValueNumberTransferDst(instr, src1Val);
  4640. break;
  4641. case Js::OpCode::LdLen_A:
  4642. if (instr->IsProfiledInstr())
  4643. {
  4644. const ValueType profiledValueType(instr->AsProfiledInstr()->u.ldElemInfo->GetElementType());
  4645. if(!(profiledValueType.IsLikelyInt() && dst->AsRegOpnd()->m_sym->m_isNotInt))
  4646. {
  4647. return this->NewGenericValue(profiledValueType, dst);
  4648. }
  4649. }
  4650. break;
  4651. case Js::OpCode::BrOnEmpty:
  4652. case Js::OpCode::BrOnNotEmpty:
  4653. Assert(dst->IsRegOpnd());
  4654. Assert(dst->GetValueType().IsString());
  4655. return this->NewGenericValue(ValueType::String, dst);
  4656. case Js::OpCode::IsInst:
  4657. case Js::OpCode::LdTrue:
  4658. case Js::OpCode::LdFalse:
  4659. return this->NewGenericValue(ValueType::Boolean, dst);
  4660. case Js::OpCode::LdUndef:
  4661. return this->NewGenericValue(ValueType::Undefined, dst);
  4662. case Js::OpCode::LdC_A_Null:
  4663. return this->NewGenericValue(ValueType::Null, dst);
  4664. case Js::OpCode::LdThis:
  4665. if (!PHASE_OFF(Js::OptTagChecksPhase, this->func) &&
  4666. (src1ValueInfo == nullptr || src1ValueInfo->IsUninitialized()))
  4667. {
  4668. return this->NewGenericValue(ValueType::GetObject(ObjectType::Object), dst);
  4669. }
  4670. break;
  4671. case Js::OpCode::Typeof:
  4672. return this->NewGenericValue(ValueType::String, dst);
  4673. case Js::OpCode::InitLocalClosure:
  4674. Assert(instr->GetDst());
  4675. Assert(instr->GetDst()->IsRegOpnd());
  4676. IR::RegOpnd *regOpnd = instr->GetDst()->AsRegOpnd();
  4677. StackSym *opndStackSym = regOpnd->m_sym;
  4678. Assert(opndStackSym != nullptr);
  4679. ObjectSymInfo *objectSymInfo = opndStackSym->m_objectInfo;
  4680. Assert(objectSymInfo != nullptr);
  4681. for (PropertySym *localVarSlotList = objectSymInfo->m_propertySymList; localVarSlotList; localVarSlotList = localVarSlotList->m_nextInStackSymList)
  4682. {
  4683. this->slotSyms->Set(localVarSlotList->m_id);
  4684. }
  4685. break;
  4686. }
  4687. #ifdef ENABLE_SIMDJS
  4688. // SIMD_JS
  4689. if (Js::IsSimd128Opcode(instr->m_opcode) && !func->GetJITFunctionBody()->IsAsmJsMode())
  4690. {
  4691. ThreadContext::SimdFuncSignature simdFuncSignature;
  4692. instr->m_func->GetScriptContext()->GetThreadContext()->GetSimdFuncSignatureFromOpcode(instr->m_opcode, simdFuncSignature);
  4693. return this->NewGenericValue(simdFuncSignature.returnType, dst);
  4694. }
  4695. #endif
  4696. if (dstVal == nullptr)
  4697. {
  4698. return this->NewGenericValue(dst->GetValueType(), dst);
  4699. }
  4700. return CurrentBlockData()->SetValue(dstVal, dst);
  4701. }
  4702. Value *
  4703. GlobOpt::ValueNumberLdElemDst(IR::Instr **pInstr, Value *srcVal)
  4704. {
  4705. IR::Instr *&instr = *pInstr;
  4706. IR::Opnd *dst = instr->GetDst();
  4707. Value *dstVal = nullptr;
  4708. int32 newMin, newMax;
  4709. ValueInfo *srcValueInfo = (srcVal ? srcVal->GetValueInfo() : nullptr);
  4710. ValueType profiledElementType;
  4711. if (instr->IsProfiledInstr())
  4712. {
  4713. profiledElementType = instr->AsProfiledInstr()->u.ldElemInfo->GetElementType();
  4714. if(!(profiledElementType.IsLikelyInt() && dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->m_isNotInt) &&
  4715. srcVal &&
  4716. srcValueInfo->IsUninitialized())
  4717. {
  4718. if(IsLoopPrePass())
  4719. {
  4720. dstVal = NewGenericValue(profiledElementType, dst);
  4721. }
  4722. else
  4723. {
  4724. // Assuming the profile data gives more precise value types based on the path it took at runtime, we
  4725. // can improve the original value type.
  4726. srcValueInfo->Type() = profiledElementType;
  4727. instr->GetSrc1()->SetValueType(profiledElementType);
  4728. }
  4729. }
  4730. }
  4731. IR::IndirOpnd *src = instr->GetSrc1()->AsIndirOpnd();
  4732. const ValueType baseValueType(src->GetBaseOpnd()->GetValueType());
  4733. if (instr->DoStackArgsOpt(this->func) ||
  4734. !(
  4735. baseValueType.IsLikelyOptimizedTypedArray() ||
  4736. (baseValueType.IsLikelyNativeArray() && instr->IsProfiledInstr()) // Specialized native array lowering for LdElem requires that it is profiled.
  4737. ) ||
  4738. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  4739. // Don't do type spec on native array with a history of accessing gaps, as this is a bailout
  4740. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  4741. !ShouldExpectConventionalArrayIndexValue(src))
  4742. {
  4743. if(DoTypedArrayTypeSpec() && !IsLoopPrePass())
  4744. {
  4745. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access.\n"));
  4746. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  4747. {
  4748. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4749. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4750. baseValueType.ToString(baseValueTypeStr);
  4751. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because %s.\n"),
  4752. this->func->GetJITFunctionBody()->GetDisplayName(),
  4753. this->func->GetDebugNumberSet(debugStringBuffer),
  4754. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  4755. baseValueTypeStr,
  4756. instr->DoStackArgsOpt(this->func) ? _u("instruction uses the arguments object") :
  4757. baseValueType.IsLikelyOptimizedTypedArray() ? _u("index is negative or likely not int") : _u("of array type"));
  4758. Output::Flush();
  4759. }
  4760. }
  4761. if(!dstVal)
  4762. {
  4763. if(srcVal)
  4764. {
  4765. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4766. }
  4767. else
  4768. {
  4769. dstVal = NewGenericValue(profiledElementType, dst);
  4770. }
  4771. }
  4772. return dstVal;
  4773. }
  4774. Assert(instr->GetSrc1()->IsIndirOpnd());
  4775. IRType toType = TyVar;
  4776. IR::BailOutKind bailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  4777. switch(baseValueType.GetObjectType())
  4778. {
  4779. case ObjectType::Int8Array:
  4780. case ObjectType::Int8VirtualArray:
  4781. case ObjectType::Int8MixedArray:
  4782. newMin = Int8ConstMin;
  4783. newMax = Int8ConstMax;
  4784. goto IntArrayCommon;
  4785. case ObjectType::Uint8Array:
  4786. case ObjectType::Uint8VirtualArray:
  4787. case ObjectType::Uint8MixedArray:
  4788. case ObjectType::Uint8ClampedArray:
  4789. case ObjectType::Uint8ClampedVirtualArray:
  4790. case ObjectType::Uint8ClampedMixedArray:
  4791. newMin = Uint8ConstMin;
  4792. newMax = Uint8ConstMax;
  4793. goto IntArrayCommon;
  4794. case ObjectType::Int16Array:
  4795. case ObjectType::Int16VirtualArray:
  4796. case ObjectType::Int16MixedArray:
  4797. newMin = Int16ConstMin;
  4798. newMax = Int16ConstMax;
  4799. goto IntArrayCommon;
  4800. case ObjectType::Uint16Array:
  4801. case ObjectType::Uint16VirtualArray:
  4802. case ObjectType::Uint16MixedArray:
  4803. newMin = Uint16ConstMin;
  4804. newMax = Uint16ConstMax;
  4805. goto IntArrayCommon;
  4806. case ObjectType::Int32Array:
  4807. case ObjectType::Int32VirtualArray:
  4808. case ObjectType::Int32MixedArray:
  4809. case ObjectType::Uint32Array: // int-specialized loads from uint32 arrays will bail out on values that don't fit in an int32
  4810. case ObjectType::Uint32VirtualArray:
  4811. case ObjectType::Uint32MixedArray:
  4812. Int32Array:
  4813. newMin = Int32ConstMin;
  4814. newMax = Int32ConstMax;
  4815. goto IntArrayCommon;
  4816. IntArrayCommon:
  4817. Assert(dst->IsRegOpnd());
  4818. // If int type spec is disabled, it is ok to load int values as they can help float type spec, and merging int32 with float64 => float64.
  4819. // But if float type spec is also disabled, we'll have problems because float64 merged with var => float64...
  4820. if (!this->DoAggressiveIntTypeSpec() && !this->DoFloatTypeSpec())
  4821. {
  4822. if (!dstVal)
  4823. {
  4824. if (srcVal)
  4825. {
  4826. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4827. }
  4828. else
  4829. {
  4830. dstVal = NewGenericValue(profiledElementType, dst);
  4831. }
  4832. }
  4833. return dstVal;
  4834. }
  4835. if (!this->IsLoopPrePass())
  4836. {
  4837. if (instr->HasBailOutInfo())
  4838. {
  4839. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  4840. Assert(
  4841. (
  4842. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  4843. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  4844. ) &&
  4845. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  4846. if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  4847. {
  4848. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  4849. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  4850. // bails out for the right reason.
  4851. instr->SetBailOutKind(
  4852. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  4853. }
  4854. else
  4855. {
  4856. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  4857. // calls to occur, so it must be merged in to eliminate generating the helper call
  4858. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  4859. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  4860. }
  4861. }
  4862. else
  4863. {
  4864. GenerateBailAtOperation(&instr, bailOutKind);
  4865. }
  4866. }
  4867. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, bailOutKind, newMin, newMax, &dstVal);
  4868. toType = TyInt32;
  4869. break;
  4870. case ObjectType::Float32Array:
  4871. case ObjectType::Float32VirtualArray:
  4872. case ObjectType::Float32MixedArray:
  4873. case ObjectType::Float64Array:
  4874. case ObjectType::Float64VirtualArray:
  4875. case ObjectType::Float64MixedArray:
  4876. Float64Array:
  4877. Assert(dst->IsRegOpnd());
  4878. // If float type spec is disabled, don't load float64 values
  4879. if (!this->DoFloatTypeSpec())
  4880. {
  4881. if (!dstVal)
  4882. {
  4883. if (srcVal)
  4884. {
  4885. dstVal = this->ValueNumberTransferDst(instr, srcVal);
  4886. }
  4887. else
  4888. {
  4889. dstVal = NewGenericValue(profiledElementType, dst);
  4890. }
  4891. }
  4892. return dstVal;
  4893. }
  4894. if (!this->IsLoopPrePass())
  4895. {
  4896. if (instr->HasBailOutInfo())
  4897. {
  4898. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  4899. Assert(
  4900. (
  4901. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  4902. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  4903. ) &&
  4904. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  4905. if (bailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  4906. {
  4907. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  4908. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  4909. // bails out for the right reason.
  4910. instr->SetBailOutKind(
  4911. bailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  4912. }
  4913. else
  4914. {
  4915. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  4916. // calls to occur, so it must be merged in to eliminate generating the helper call
  4917. Assert(bailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  4918. instr->SetBailOutKind(oldBailOutKind | bailOutKind);
  4919. }
  4920. }
  4921. else
  4922. {
  4923. GenerateBailAtOperation(&instr, bailOutKind);
  4924. }
  4925. }
  4926. TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, &dstVal);
  4927. toType = TyFloat64;
  4928. break;
  4929. default:
  4930. Assert(baseValueType.IsLikelyNativeArray());
  4931. bailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  4932. if(baseValueType.HasIntElements())
  4933. {
  4934. goto Int32Array;
  4935. }
  4936. Assert(baseValueType.HasFloatElements());
  4937. goto Float64Array;
  4938. }
  4939. if(!dstVal)
  4940. {
  4941. dstVal = NewGenericValue(profiledElementType, dst);
  4942. }
  4943. Assert(toType != TyVar);
  4944. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  4945. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  4946. {
  4947. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  4948. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4949. baseValueType.ToString(baseValueTypeStr);
  4950. char dstValTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  4951. dstVal->GetValueInfo()->Type().ToString(dstValTypeStr);
  4952. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s producing %S"),
  4953. this->func->GetJITFunctionBody()->GetDisplayName(),
  4954. this->func->GetDebugNumberSet(debugStringBuffer),
  4955. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  4956. baseValueTypeStr,
  4957. toType == TyInt32 ? _u("int32") : _u("float64"),
  4958. dstValTypeStr);
  4959. #if DBG_DUMP
  4960. Output::Print(_u(" ("));
  4961. dstVal->Dump();
  4962. Output::Print(_u(").\n"));
  4963. #else
  4964. Output::Print(_u(".\n"));
  4965. #endif
  4966. Output::Flush();
  4967. }
  4968. return dstVal;
  4969. }
  4970. ValueType
  4971. GlobOpt::GetPrepassValueTypeForDst(
  4972. const ValueType desiredValueType,
  4973. IR::Instr *const instr,
  4974. Value *const src1Value,
  4975. Value *const src2Value,
  4976. bool *const isValueInfoPreciseRef) const
  4977. {
  4978. // Values with definite types can be created in the loop prepass only when it is guaranteed that the value type will be the
  4979. // same on any iteration of the loop. The heuristics currently used are:
  4980. // - If the source sym is not live on the back-edge, then it acquires a new value for each iteration of the loop, so
  4981. // that value type can be definite
  4982. // - Consider: A better solution for this is to track values that originate in this loop, which can have definite value
  4983. // types. That catches more cases, should look into that in the future.
  4984. // - If the source sym has a constant value that doesn't change for the duration of the function
  4985. // - The operation always results in a definite value type. For instance, signed bitwise operations always result in an
  4986. // int32, conv_num and ++ always result in a number, etc.
  4987. // - For operations that always result in an int32, the resulting int range is precise only if the source syms pass
  4988. // the above heuristics. Otherwise, the range must be expanded to the full int32 range.
  4989. Assert(IsLoopPrePass());
  4990. Assert(instr);
  4991. if(isValueInfoPreciseRef)
  4992. {
  4993. *isValueInfoPreciseRef = false;
  4994. }
  4995. if(!desiredValueType.IsDefinite())
  4996. {
  4997. return desiredValueType;
  4998. }
  4999. if((instr->GetSrc1() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Value)) ||
  5000. (instr->GetSrc2() && !IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Value)))
  5001. {
  5002. // If the desired value type is not precise, the value type of the destination is derived from the value types of the
  5003. // sources. Since the value type of a source sym is not definite, the destination value type also cannot be definite.
  5004. if(desiredValueType.IsInt() && OpCodeAttr::IsInt32(instr->m_opcode))
  5005. {
  5006. // The op always produces an int32, but not always a tagged int
  5007. return ValueType::GetInt(desiredValueType.IsLikelyTaggedInt());
  5008. }
  5009. if(desiredValueType.IsNumber() && OpCodeAttr::ProducesNumber(instr->m_opcode))
  5010. {
  5011. // The op always produces a number, but not always an int
  5012. return desiredValueType.ToDefiniteAnyNumber();
  5013. }
  5014. return desiredValueType.ToLikely();
  5015. }
  5016. if(isValueInfoPreciseRef)
  5017. {
  5018. // The produced value info is derived from the sources, which have precise value infos
  5019. *isValueInfoPreciseRef = true;
  5020. }
  5021. return desiredValueType;
  5022. }
  5023. bool
  5024. GlobOpt::IsPrepassSrcValueInfoPrecise(IR::Opnd *const src, Value *const srcValue) const
  5025. {
  5026. Assert(IsLoopPrePass());
  5027. Assert(src);
  5028. if(!src->IsRegOpnd() || !srcValue)
  5029. {
  5030. return false;
  5031. }
  5032. ValueInfo *const srcValueInfo = srcValue->GetValueInfo();
  5033. if(!srcValueInfo->IsDefinite())
  5034. {
  5035. return false;
  5036. }
  5037. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  5038. Assert(!srcSym->IsTypeSpec());
  5039. int32 intConstantValue;
  5040. return
  5041. srcSym->IsFromByteCodeConstantTable() ||
  5042. (
  5043. srcValueInfo->TryGetIntConstantValue(&intConstantValue) &&
  5044. !Js::TaggedInt::IsOverflow(intConstantValue) &&
  5045. GetTaggedIntConstantStackSym(intConstantValue) == srcSym
  5046. ) ||
  5047. !currentBlock->loop->regAlloc.liveOnBackEdgeSyms->Test(srcSym->m_id);
  5048. }
  5049. Value *GlobOpt::CreateDstUntransferredIntValue(
  5050. const int32 min,
  5051. const int32 max,
  5052. IR::Instr *const instr,
  5053. Value *const src1Value,
  5054. Value *const src2Value)
  5055. {
  5056. Assert(instr);
  5057. Assert(instr->GetDst());
  5058. Assert(OpCodeAttr::ProducesNumber(instr->m_opcode)
  5059. || (instr->m_opcode == Js::OpCode::Add_A && src1Value->GetValueInfo()->IsNumber()
  5060. && src2Value->GetValueInfo()->IsNumber()));
  5061. ValueType valueType(ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  5062. Assert(valueType.IsInt());
  5063. bool isValueInfoPrecise;
  5064. if(IsLoopPrePass())
  5065. {
  5066. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  5067. }
  5068. else
  5069. {
  5070. isValueInfoPrecise = true;
  5071. }
  5072. IR::Opnd *const dst = instr->GetDst();
  5073. if(isValueInfoPrecise)
  5074. {
  5075. Assert(valueType == ValueType::GetInt(IntConstantBounds(min, max).IsLikelyTaggable()));
  5076. Assert(!(dst->IsRegOpnd() && dst->AsRegOpnd()->m_sym->IsTypeSpec()));
  5077. return NewIntRangeValue(min, max, false, dst);
  5078. }
  5079. return NewGenericValue(valueType, dst);
  5080. }
  5081. Value *
  5082. GlobOpt::CreateDstUntransferredValue(
  5083. const ValueType desiredValueType,
  5084. IR::Instr *const instr,
  5085. Value *const src1Value,
  5086. Value *const src2Value)
  5087. {
  5088. Assert(instr);
  5089. Assert(instr->GetDst());
  5090. Assert(!desiredValueType.IsInt()); // use CreateDstUntransferredIntValue instead
  5091. ValueType valueType(desiredValueType);
  5092. if(IsLoopPrePass())
  5093. {
  5094. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value);
  5095. }
  5096. return NewGenericValue(valueType, instr->GetDst());
  5097. }
  5098. Value *
  5099. GlobOpt::ValueNumberTransferDst(IR::Instr *const instr, Value * src1Val)
  5100. {
  5101. Value *dstVal = this->IsLoopPrePass() ? this->ValueNumberTransferDstInPrepass(instr, src1Val) : src1Val;
  5102. // Don't copy-prop a temp over a user symbol. This is likely to extend the temp's lifetime, as the user symbol
  5103. // is more likely to already have later references.
  5104. // REVIEW: Enabling this does cause perf issues...
  5105. #if 0
  5106. if (dstVal != src1Val)
  5107. {
  5108. return dstVal;
  5109. }
  5110. Sym *dstSym = dst->GetStackSym();
  5111. if (dstVal && dstSym && dstSym->IsStackSym() && !dstSym->AsStackSym()->m_isBytecodeTmp)
  5112. {
  5113. Sym *dstValSym = dstVal->GetValueInfo()->GetSymStore();
  5114. if (dstValSym && dstValSym->AsStackSym()->m_isBytecodeTmp /* src->GetIsDead()*/)
  5115. {
  5116. dstVal->GetValueInfo()->SetSymStore(dstSym);
  5117. }
  5118. }
  5119. #endif
  5120. return dstVal;
  5121. }
  5122. bool
  5123. GlobOpt::IsSafeToTransferInPrePass(IR::Opnd *src, Value *srcValue)
  5124. {
  5125. if (this->DoFieldHoisting())
  5126. {
  5127. return false;
  5128. }
  5129. if (src->IsRegOpnd())
  5130. {
  5131. StackSym *srcSym = src->AsRegOpnd()->m_sym;
  5132. if (srcSym->IsFromByteCodeConstantTable())
  5133. {
  5134. return true;
  5135. }
  5136. ValueInfo *srcValueInfo = srcValue->GetValueInfo();
  5137. int32 srcIntConstantValue;
  5138. if (srcValueInfo->TryGetIntConstantValue(&srcIntConstantValue) && !Js::TaggedInt::IsOverflow(srcIntConstantValue)
  5139. && GetTaggedIntConstantStackSym(srcIntConstantValue) == srcSym)
  5140. {
  5141. return true;
  5142. }
  5143. }
  5144. return false;
  5145. }
  5146. Value *
  5147. GlobOpt::ValueNumberTransferDstInPrepass(IR::Instr *const instr, Value *const src1Val)
  5148. {
  5149. Value *dstVal = nullptr;
  5150. if (!src1Val)
  5151. {
  5152. return nullptr;
  5153. }
  5154. bool isValueInfoPrecise;
  5155. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  5156. // TODO: This conflicts with new values created by the type specialization code
  5157. // We should re-enable if we change that code to avoid the new values.
  5158. #if 0
  5159. if (this->IsSafeToTransferInPrePass(instr->GetSrc1(), src1Val))
  5160. {
  5161. return src1Val;
  5162. }
  5163. if (this->IsPREInstrCandidateLoad(instr->m_opcode) && instr->GetDst())
  5164. {
  5165. StackSym *dstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  5166. for (Loop *curLoop = this->currentBlock->loop; curLoop; curLoop = curLoop->parent)
  5167. {
  5168. if (curLoop->fieldPRESymStore->Test(dstSym->m_id))
  5169. {
  5170. return src1Val;
  5171. }
  5172. }
  5173. }
  5174. if (!this->DoFieldHoisting())
  5175. {
  5176. if (instr->GetDst()->IsRegOpnd())
  5177. {
  5178. StackSym *stackSym = instr->GetDst()->AsRegOpnd()->m_sym;
  5179. if (stackSym->IsSingleDef() || this->IsLive(stackSym, this->prePassLoop->landingPad))
  5180. {
  5181. IntConstantBounds src1IntConstantBounds;
  5182. if (src1ValueInfo->TryGetIntConstantBounds(&src1IntConstantBounds) &&
  5183. !(
  5184. src1IntConstantBounds.LowerBound() == INT32_MIN &&
  5185. src1IntConstantBounds.UpperBound() == INT32_MAX
  5186. ))
  5187. {
  5188. const ValueType valueType(
  5189. GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  5190. if (isValueInfoPrecise)
  5191. {
  5192. return src1Val;
  5193. }
  5194. }
  5195. else
  5196. {
  5197. return src1Val;
  5198. }
  5199. }
  5200. }
  5201. }
  5202. #endif
  5203. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  5204. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  5205. // first pass when transferring a value that is live on the back-edge.
  5206. // In prepass we are going to copy the value but with a different value number
  5207. // for aggressive int type spec.
  5208. const ValueType valueType(GetPrepassValueTypeForDst(src1ValueInfo->Type(), instr, src1Val, nullptr, &isValueInfoPrecise));
  5209. if(isValueInfoPrecise || (valueType == src1ValueInfo->Type() && src1ValueInfo->IsGeneric()))
  5210. {
  5211. Assert(valueType == src1ValueInfo->Type());
  5212. dstVal = CopyValue(src1Val);
  5213. TrackCopiedValueForKills(dstVal);
  5214. }
  5215. else
  5216. {
  5217. dstVal = NewGenericValue(valueType);
  5218. dstVal->GetValueInfo()->SetSymStore(src1ValueInfo->GetSymStore());
  5219. }
  5220. return dstVal;
  5221. }
  5222. void
  5223. GlobOpt::PropagateIntRangeForNot(int32 minimum, int32 maximum, int32 *pNewMin, int32* pNewMax)
  5224. {
  5225. int32 tmp;
  5226. Int32Math::Not(minimum, pNewMin);
  5227. *pNewMax = *pNewMin;
  5228. Int32Math::Not(maximum, &tmp);
  5229. *pNewMin = min(*pNewMin, tmp);
  5230. *pNewMax = max(*pNewMax, tmp);
  5231. }
  5232. void
  5233. GlobOpt::PropagateIntRangeBinary(IR::Instr *instr, int32 min1, int32 max1,
  5234. int32 min2, int32 max2, int32 *pNewMin, int32* pNewMax)
  5235. {
  5236. int32 min, max, tmp, tmp2;
  5237. min = INT32_MIN;
  5238. max = INT32_MAX;
  5239. switch (instr->m_opcode)
  5240. {
  5241. case Js::OpCode::Xor_A:
  5242. case Js::OpCode::Or_A:
  5243. // Find range with highest high order bit
  5244. tmp = ::max((uint32)min1, (uint32)max1);
  5245. tmp2 = ::max((uint32)min2, (uint32)max2);
  5246. if ((uint32)tmp > (uint32)tmp2)
  5247. {
  5248. max = tmp;
  5249. }
  5250. else
  5251. {
  5252. max = tmp2;
  5253. }
  5254. if (max < 0)
  5255. {
  5256. min = INT32_MIN; // REVIEW: conservative...
  5257. max = INT32_MAX;
  5258. }
  5259. else
  5260. {
  5261. // Turn values like 0x1010 into 0x1111
  5262. max = 1 << Math::Log2(max);
  5263. max = (uint32)(max << 1) - 1;
  5264. min = 0;
  5265. }
  5266. break;
  5267. case Js::OpCode::And_A:
  5268. if (min1 == INT32_MIN && min2 == INT32_MIN)
  5269. {
  5270. // Shortcut
  5271. break;
  5272. }
  5273. // Find range with lowest higher bit
  5274. tmp = ::max((uint32)min1, (uint32)max1);
  5275. tmp2 = ::max((uint32)min2, (uint32)max2);
  5276. if ((uint32)tmp < (uint32)tmp2)
  5277. {
  5278. min = min1;
  5279. max = max1;
  5280. }
  5281. else
  5282. {
  5283. min = min2;
  5284. max = max2;
  5285. }
  5286. // To compute max, look if min has higher high bit
  5287. if ((uint32)min > (uint32)max)
  5288. {
  5289. max = min;
  5290. }
  5291. // If max is negative, max let's assume it could be -1, so result in MAX_INT
  5292. if (max < 0)
  5293. {
  5294. max = INT32_MAX;
  5295. }
  5296. // If min is positive, the resulting min is zero
  5297. if (min >= 0)
  5298. {
  5299. min = 0;
  5300. }
  5301. else
  5302. {
  5303. min = INT32_MIN;
  5304. }
  5305. break;
  5306. case Js::OpCode::Shl_A:
  5307. {
  5308. // Shift count
  5309. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  5310. {
  5311. min2 = 0;
  5312. max2 = 0x1F;
  5313. }
  5314. else
  5315. {
  5316. min2 &= 0x1F;
  5317. max2 &= 0x1F;
  5318. }
  5319. int32 min1FreeTopBitCount = min1 ? (sizeof(int32) * 8) - (Math::Log2(min1) + 1) : (sizeof(int32) * 8);
  5320. int32 max1FreeTopBitCount = max1 ? (sizeof(int32) * 8) - (Math::Log2(max1) + 1) : (sizeof(int32) * 8);
  5321. if (min1FreeTopBitCount <= max2 || max1FreeTopBitCount <= max2)
  5322. {
  5323. // If the shift is going to touch the sign bit return the max range
  5324. min = INT32_MIN;
  5325. max = INT32_MAX;
  5326. }
  5327. else
  5328. {
  5329. // Compute max
  5330. // Turn values like 0x1010 into 0x1111
  5331. if (min1)
  5332. {
  5333. min1 = 1 << Math::Log2(min1);
  5334. min1 = (min1 << 1) - 1;
  5335. }
  5336. if (max1)
  5337. {
  5338. max1 = 1 << Math::Log2(max1);
  5339. max1 = (uint32)(max1 << 1) - 1;
  5340. }
  5341. if (max1 > 0)
  5342. {
  5343. int32 nrTopBits = (sizeof(int32) * 8) - Math::Log2(max1);
  5344. if (nrTopBits < ::min(max2, 30))
  5345. max = INT32_MAX;
  5346. else
  5347. max = ::max((max1 << ::min(max2, 30)) & ~0x80000000, (min1 << min2) & ~0x80000000);
  5348. }
  5349. else
  5350. {
  5351. max = (max1 << min2) & ~0x80000000;
  5352. }
  5353. // Compute min
  5354. if (min1 < 0)
  5355. {
  5356. min = ::min(min1 << max2, max1 << max2);
  5357. }
  5358. else
  5359. {
  5360. min = ::min(min1 << min2, max1 << max2);
  5361. }
  5362. // Turn values like 0x1110 into 0x1000
  5363. if (min)
  5364. {
  5365. min = 1 << Math::Log2(min);
  5366. }
  5367. }
  5368. }
  5369. break;
  5370. case Js::OpCode::Shr_A:
  5371. // Shift count
  5372. if (min2 != max2 && ((uint32)min2 > 0x1F || (uint32)max2 > 0x1F))
  5373. {
  5374. min2 = 0;
  5375. max2 = 0x1F;
  5376. }
  5377. else
  5378. {
  5379. min2 &= 0x1F;
  5380. max2 &= 0x1F;
  5381. }
  5382. // Compute max
  5383. if (max1 < 0)
  5384. {
  5385. max = max1 >> max2;
  5386. }
  5387. else
  5388. {
  5389. max = max1 >> min2;
  5390. }
  5391. // Compute min
  5392. if (min1 < 0)
  5393. {
  5394. min = min1 >> min2;
  5395. }
  5396. else
  5397. {
  5398. min = min1 >> max2;
  5399. }
  5400. break;
  5401. case Js::OpCode::ShrU_A:
  5402. // shift count is constant zero
  5403. if ((min2 == max2) && (max2 & 0x1f) == 0)
  5404. {
  5405. // We can't encode uint32 result, so it has to be used as int32 only or the original value is positive.
  5406. Assert(instr->ignoreIntOverflow || min1 >= 0);
  5407. // We can transfer the signed int32 range.
  5408. min = min1;
  5409. max = max1;
  5410. break;
  5411. }
  5412. const IntConstantBounds src2NewBounds = IntConstantBounds(min2, max2).And_0x1f();
  5413. // Zero is only allowed if result is always a signed int32 or always used as a signed int32
  5414. Assert(min1 >= 0 || instr->ignoreIntOverflow || !src2NewBounds.Contains(0));
  5415. min2 = src2NewBounds.LowerBound();
  5416. max2 = src2NewBounds.UpperBound();
  5417. Assert(min2 <= max2);
  5418. // zero shift count is only allowed if result is used as int32 and/or value is positive
  5419. Assert(min2 > 0 || instr->ignoreIntOverflow || min1 >= 0);
  5420. uint32 umin1 = (uint32)min1;
  5421. uint32 umax1 = (uint32)max1;
  5422. if (umin1 > umax1)
  5423. {
  5424. uint32 temp = umax1;
  5425. umax1 = umin1;
  5426. umin1 = temp;
  5427. }
  5428. Assert(min2 >= 0 && max2 < 32);
  5429. // Compute max
  5430. if (min1 < 0)
  5431. {
  5432. umax1 = UINT32_MAX;
  5433. }
  5434. max = umax1 >> min2;
  5435. // Compute min
  5436. if (min1 <= 0 && max1 >=0)
  5437. {
  5438. min = 0;
  5439. }
  5440. else
  5441. {
  5442. min = umin1 >> max2;
  5443. }
  5444. // We should be able to fit uint32 range as int32
  5445. Assert(instr->ignoreIntOverflow || (min >= 0 && max >= 0) );
  5446. if (min > max)
  5447. {
  5448. // can only happen if shift count can be zero
  5449. Assert(min2 == 0 && (instr->ignoreIntOverflow || min1 >= 0));
  5450. min = Int32ConstMin;
  5451. max = Int32ConstMax;
  5452. }
  5453. break;
  5454. }
  5455. *pNewMin = min;
  5456. *pNewMax = max;
  5457. }
  5458. IR::Instr *
  5459. GlobOpt::TypeSpecialization(
  5460. IR::Instr *instr,
  5461. Value **pSrc1Val,
  5462. Value **pSrc2Val,
  5463. Value **pDstVal,
  5464. bool *redoTypeSpecRef,
  5465. bool *const forceInvariantHoistingRef)
  5466. {
  5467. Value *&src1Val = *pSrc1Val;
  5468. Value *&src2Val = *pSrc2Val;
  5469. *redoTypeSpecRef = false;
  5470. Assert(!*forceInvariantHoistingRef);
  5471. this->ignoredIntOverflowForCurrentInstr = false;
  5472. this->ignoredNegativeZeroForCurrentInstr = false;
  5473. // - Int32 values that can't be tagged are created as float constant values instead because a JavascriptNumber var is needed
  5474. // for that value at runtime. For the purposes of type specialization, recover the int32 values so that they will be
  5475. // treated as ints.
  5476. // - If int overflow does not matter for the instruction, we can additionally treat uint32 values as int32 values because
  5477. // the value resulting from the operation will eventually be converted to int32 anyway
  5478. Value *const src1OriginalVal = src1Val;
  5479. Value *const src2OriginalVal = src2Val;
  5480. #ifdef ENABLE_SIMDJS
  5481. // SIMD_JS
  5482. if (TypeSpecializeSimd128(instr, pSrc1Val, pSrc2Val, pDstVal))
  5483. {
  5484. return instr;
  5485. }
  5486. #endif
  5487. if(!instr->ShouldCheckForIntOverflow())
  5488. {
  5489. if(src1Val && src1Val->GetValueInfo()->IsFloatConstant())
  5490. {
  5491. int32 int32Value;
  5492. bool isInt32;
  5493. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  5494. src1Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  5495. &int32Value,
  5496. &isInt32))
  5497. {
  5498. src1Val = GetIntConstantValue(int32Value, instr);
  5499. if(!isInt32)
  5500. {
  5501. this->ignoredIntOverflowForCurrentInstr = true;
  5502. }
  5503. }
  5504. }
  5505. if(src2Val && src2Val->GetValueInfo()->IsFloatConstant())
  5506. {
  5507. int32 int32Value;
  5508. bool isInt32;
  5509. if(Js::JavascriptNumber::TryGetInt32OrUInt32Value(
  5510. src2Val->GetValueInfo()->AsFloatConstant()->FloatValue(),
  5511. &int32Value,
  5512. &isInt32))
  5513. {
  5514. src2Val = GetIntConstantValue(int32Value, instr);
  5515. if(!isInt32)
  5516. {
  5517. this->ignoredIntOverflowForCurrentInstr = true;
  5518. }
  5519. }
  5520. }
  5521. }
  5522. const AutoRestoreVal autoRestoreSrc1Val(src1OriginalVal, &src1Val);
  5523. const AutoRestoreVal autoRestoreSrc2Val(src2OriginalVal, &src2Val);
  5524. if (src1Val && instr->GetSrc2() == nullptr)
  5525. {
  5526. // Unary
  5527. // Note make sure that native array StElemI gets to TypeSpecializeStElem. Do this for typed arrays, too?
  5528. int32 intConstantValue;
  5529. if (!this->IsLoopPrePass() &&
  5530. !instr->IsBranchInstr() &&
  5531. src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) &&
  5532. !(
  5533. // Nothing to fold for element stores. Go into type specialization to see if they can at least be specialized.
  5534. instr->m_opcode == Js::OpCode::StElemI_A ||
  5535. instr->m_opcode == Js::OpCode::StElemI_A_Strict ||
  5536. instr->m_opcode == Js::OpCode::StElemC ||
  5537. instr->m_opcode == Js::OpCode::MultiBr ||
  5538. instr->m_opcode == Js::OpCode::InlineArrayPop
  5539. ))
  5540. {
  5541. if (OptConstFoldUnary(&instr, intConstantValue, src1Val == src1OriginalVal, pDstVal))
  5542. {
  5543. return instr;
  5544. }
  5545. }
  5546. else if (this->TypeSpecializeUnary(
  5547. &instr,
  5548. &src1Val,
  5549. pDstVal,
  5550. src1OriginalVal,
  5551. redoTypeSpecRef,
  5552. forceInvariantHoistingRef))
  5553. {
  5554. return instr;
  5555. }
  5556. else if(*redoTypeSpecRef)
  5557. {
  5558. return instr;
  5559. }
  5560. }
  5561. else if (instr->GetSrc2() && !instr->IsBranchInstr())
  5562. {
  5563. // Binary
  5564. if (!this->IsLoopPrePass())
  5565. {
  5566. if (GetIsAsmJSFunc())
  5567. {
  5568. if (CONFIG_FLAG(WasmFold))
  5569. {
  5570. bool success = instr->GetSrc1()->IsInt64() ?
  5571. this->OptConstFoldBinaryWasm<int64>(&instr, src1Val, src2Val, pDstVal) :
  5572. this->OptConstFoldBinaryWasm<int>(&instr, src1Val, src2Val, pDstVal);
  5573. if (success)
  5574. {
  5575. return instr;
  5576. }
  5577. }
  5578. }
  5579. else
  5580. {
  5581. // OptConstFoldBinary doesn't do type spec, so only deal with things we are sure are int (IntConstant and IntRange)
  5582. // and not just likely ints TypeSpecializeBinary will deal with type specializing them and fold them again
  5583. IntConstantBounds src1IntConstantBounds, src2IntConstantBounds;
  5584. if (src1Val && src1Val->GetValueInfo()->TryGetIntConstantBounds(&src1IntConstantBounds))
  5585. {
  5586. if (src2Val && src2Val->GetValueInfo()->TryGetIntConstantBounds(&src2IntConstantBounds))
  5587. {
  5588. if (this->OptConstFoldBinary(&instr, src1IntConstantBounds, src2IntConstantBounds, pDstVal))
  5589. {
  5590. return instr;
  5591. }
  5592. }
  5593. }
  5594. }
  5595. }
  5596. }
  5597. if (instr->GetSrc2() && this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2OriginalVal, redoTypeSpecRef))
  5598. {
  5599. if (!this->IsLoopPrePass() &&
  5600. instr->m_opcode != Js::OpCode::Nop &&
  5601. instr->m_opcode != Js::OpCode::Br && // We may have const fold a branch
  5602. // Cannot const-peep if the result of the operation is required for a bailout check
  5603. !(instr->HasBailOutInfo() && instr->GetBailOutKind() & IR::BailOutOnResultConditions))
  5604. {
  5605. if (src1Val && src1Val->GetValueInfo()->HasIntConstantValue())
  5606. {
  5607. if (this->OptConstPeep(instr, instr->GetSrc1(), pDstVal, src1Val->GetValueInfo()))
  5608. {
  5609. return instr;
  5610. }
  5611. }
  5612. else if (src2Val && src2Val->GetValueInfo()->HasIntConstantValue())
  5613. {
  5614. if (this->OptConstPeep(instr, instr->GetSrc2(), pDstVal, src2Val->GetValueInfo()))
  5615. {
  5616. return instr;
  5617. }
  5618. }
  5619. }
  5620. return instr;
  5621. }
  5622. else if(*redoTypeSpecRef)
  5623. {
  5624. return instr;
  5625. }
  5626. if (instr->IsBranchInstr() && !this->IsLoopPrePass())
  5627. {
  5628. if (this->OptConstFoldBranch(instr, src1Val, src2Val, pDstVal))
  5629. {
  5630. return instr;
  5631. }
  5632. }
  5633. // We didn't type specialize, make sure the srcs are unspecialized
  5634. IR::Opnd *src1 = instr->GetSrc1();
  5635. if (src1)
  5636. {
  5637. instr = this->ToVarUses(instr, src1, false, src1Val);
  5638. IR::Opnd *src2 = instr->GetSrc2();
  5639. if (src2)
  5640. {
  5641. instr = this->ToVarUses(instr, src2, false, src2Val);
  5642. }
  5643. }
  5644. IR::Opnd *dst = instr->GetDst();
  5645. if (dst)
  5646. {
  5647. instr = this->ToVarUses(instr, dst, true, nullptr);
  5648. // Handling for instructions other than built-ins that may require only dst type specialization
  5649. // should be added here.
  5650. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode) && !GetIsAsmJSFunc()) // don't need to do typespec for asmjs
  5651. {
  5652. this->TypeSpecializeInlineBuiltInDst(&instr, pDstVal);
  5653. return instr;
  5654. }
  5655. // Clear the int specialized bit on the dst.
  5656. if (dst->IsRegOpnd())
  5657. {
  5658. IR::RegOpnd *dstRegOpnd = dst->AsRegOpnd();
  5659. if (!dstRegOpnd->m_sym->IsTypeSpec())
  5660. {
  5661. this->ToVarRegOpnd(dstRegOpnd, this->currentBlock);
  5662. }
  5663. else if (dstRegOpnd->m_sym->IsInt32())
  5664. {
  5665. this->ToInt32Dst(instr, dstRegOpnd, this->currentBlock);
  5666. }
  5667. else if (dstRegOpnd->m_sym->IsUInt32() && GetIsAsmJSFunc())
  5668. {
  5669. this->ToUInt32Dst(instr, dstRegOpnd, this->currentBlock);
  5670. }
  5671. else if (dstRegOpnd->m_sym->IsFloat64())
  5672. {
  5673. this->ToFloat64Dst(instr, dstRegOpnd, this->currentBlock);
  5674. }
  5675. }
  5676. else if (dst->IsSymOpnd() && dst->AsSymOpnd()->m_sym->IsStackSym())
  5677. {
  5678. this->ToVarStackSym(dst->AsSymOpnd()->m_sym->AsStackSym(), this->currentBlock);
  5679. }
  5680. }
  5681. return instr;
  5682. }
  5683. bool
  5684. GlobOpt::OptConstPeep(IR::Instr *instr, IR::Opnd *constSrc, Value **pDstVal, ValueInfo *valuInfo)
  5685. {
  5686. int32 value;
  5687. IR::Opnd *src;
  5688. IR::Opnd *nonConstSrc = (constSrc == instr->GetSrc1() ? instr->GetSrc2() : instr->GetSrc1());
  5689. // Try to find the value from value info first
  5690. if (valuInfo->TryGetIntConstantValue(&value))
  5691. {
  5692. }
  5693. else if (constSrc->IsAddrOpnd())
  5694. {
  5695. IR::AddrOpnd *addrOpnd = constSrc->AsAddrOpnd();
  5696. #ifdef _M_X64
  5697. Assert(addrOpnd->IsVar() || Math::FitsInDWord((size_t)addrOpnd->m_address));
  5698. #else
  5699. Assert(sizeof(value) == sizeof(addrOpnd->m_address));
  5700. #endif
  5701. if (addrOpnd->IsVar())
  5702. {
  5703. value = Js::TaggedInt::ToInt32(addrOpnd->m_address);
  5704. }
  5705. else
  5706. {
  5707. // We asserted that the address will fit in a DWORD above
  5708. value = ::Math::PointerCastToIntegral<int32>(constSrc->AsAddrOpnd()->m_address);
  5709. }
  5710. }
  5711. else if (constSrc->IsIntConstOpnd())
  5712. {
  5713. value = constSrc->AsIntConstOpnd()->AsInt32();
  5714. }
  5715. else
  5716. {
  5717. return false;
  5718. }
  5719. switch(instr->m_opcode)
  5720. {
  5721. // Can't do all Add_A because of string concats.
  5722. // Sub_A cannot be transformed to a NEG_A because 0 - 0 != -0
  5723. case Js::OpCode::Add_A:
  5724. src = nonConstSrc;
  5725. if (!src->GetValueType().IsInt())
  5726. {
  5727. // 0 + -0 != -0
  5728. // "Foo" + 0 != "Foo
  5729. return false;
  5730. }
  5731. // fall-through
  5732. case Js::OpCode::Add_I4:
  5733. if (value != 0)
  5734. {
  5735. return false;
  5736. }
  5737. if (constSrc == instr->GetSrc1())
  5738. {
  5739. src = instr->GetSrc2();
  5740. }
  5741. else
  5742. {
  5743. src = instr->GetSrc1();
  5744. }
  5745. break;
  5746. case Js::OpCode::Mul_A:
  5747. case Js::OpCode::Mul_I4:
  5748. if (value == 0)
  5749. {
  5750. // -0 * 0 != 0
  5751. return false;
  5752. }
  5753. else if (value == 1)
  5754. {
  5755. src = nonConstSrc;
  5756. }
  5757. else
  5758. {
  5759. return false;
  5760. }
  5761. break;
  5762. case Js::OpCode::Div_A:
  5763. if (value == 1 && constSrc == instr->GetSrc2())
  5764. {
  5765. src = instr->GetSrc1();
  5766. }
  5767. else
  5768. {
  5769. return false;
  5770. }
  5771. break;
  5772. case Js::OpCode::Or_I4:
  5773. if (value == -1)
  5774. {
  5775. src = constSrc;
  5776. }
  5777. else if (value == 0)
  5778. {
  5779. src = nonConstSrc;
  5780. }
  5781. else
  5782. {
  5783. return false;
  5784. }
  5785. break;
  5786. case Js::OpCode::And_I4:
  5787. if (value == -1)
  5788. {
  5789. src = nonConstSrc;
  5790. }
  5791. else if (value == 0)
  5792. {
  5793. src = constSrc;
  5794. }
  5795. else
  5796. {
  5797. return false;
  5798. }
  5799. break;
  5800. case Js::OpCode::Shl_I4:
  5801. case Js::OpCode::ShrU_I4:
  5802. case Js::OpCode::Shr_I4:
  5803. if (value != 0 || constSrc != instr->GetSrc2())
  5804. {
  5805. return false;
  5806. }
  5807. src = instr->GetSrc1();
  5808. break;
  5809. default:
  5810. return false;
  5811. }
  5812. this->CaptureByteCodeSymUses(instr);
  5813. if (src == instr->GetSrc1())
  5814. {
  5815. instr->FreeSrc2();
  5816. }
  5817. else
  5818. {
  5819. Assert(src == instr->GetSrc2());
  5820. instr->ReplaceSrc1(instr->UnlinkSrc2());
  5821. }
  5822. instr->m_opcode = Js::OpCode::Ld_A;
  5823. return true;
  5824. }
  5825. Js::Var // TODO: michhol OOP JIT, shouldn't play with Vars
  5826. GlobOpt::GetConstantVar(IR::Opnd *opnd, Value *val)
  5827. {
  5828. ValueInfo *valueInfo = val->GetValueInfo();
  5829. if (valueInfo->IsVarConstant() && valueInfo->IsPrimitive())
  5830. {
  5831. return valueInfo->AsVarConstant()->VarValue();
  5832. }
  5833. if (opnd->IsAddrOpnd())
  5834. {
  5835. IR::AddrOpnd *addrOpnd = opnd->AsAddrOpnd();
  5836. if (addrOpnd->IsVar())
  5837. {
  5838. return addrOpnd->m_address;
  5839. }
  5840. }
  5841. else if (opnd->IsIntConstOpnd())
  5842. {
  5843. if (!Js::TaggedInt::IsOverflow(opnd->AsIntConstOpnd()->AsInt32()))
  5844. {
  5845. return Js::TaggedInt::ToVarUnchecked(opnd->AsIntConstOpnd()->AsInt32());
  5846. }
  5847. }
  5848. else if (opnd->IsRegOpnd() && opnd->AsRegOpnd()->m_sym->IsSingleDef())
  5849. {
  5850. if (valueInfo->IsBoolean())
  5851. {
  5852. IR::Instr * defInstr = opnd->AsRegOpnd()->m_sym->GetInstrDef();
  5853. if (defInstr->m_opcode != Js::OpCode::Ld_A || !defInstr->GetSrc1()->IsAddrOpnd())
  5854. {
  5855. return nullptr;
  5856. }
  5857. Assert(defInstr->GetSrc1()->AsAddrOpnd()->IsVar());
  5858. return defInstr->GetSrc1()->AsAddrOpnd()->m_address;
  5859. }
  5860. else if (valueInfo->IsUndefined())
  5861. {
  5862. return (Js::Var)this->func->GetScriptContextInfo()->GetUndefinedAddr();
  5863. }
  5864. else if (valueInfo->IsNull())
  5865. {
  5866. return (Js::Var)this->func->GetScriptContextInfo()->GetNullAddr();
  5867. }
  5868. }
  5869. return nullptr;
  5870. }
  5871. bool BoolAndIntStaticAndTypeMismatch(Value* src1Val, Value* src2Val, Js::Var src1Var, Js::Var src2Var)
  5872. {
  5873. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  5874. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  5875. return (src1ValInfo->IsNumber() && src1Var && src2ValInfo->IsBoolean() && src1Var != Js::TaggedInt::ToVarUnchecked(0) && src1Var != Js::TaggedInt::ToVarUnchecked(1)) ||
  5876. (src2ValInfo->IsNumber() && src2Var && src1ValInfo->IsBoolean() && src2Var != Js::TaggedInt::ToVarUnchecked(0) && src2Var != Js::TaggedInt::ToVarUnchecked(1));
  5877. }
  5878. bool
  5879. GlobOpt::OptConstFoldBranch(IR::Instr *instr, Value *src1Val, Value*src2Val, Value **pDstVal)
  5880. {
  5881. if (!src1Val)
  5882. {
  5883. return false;
  5884. }
  5885. int64 left64, right64;
  5886. Js::Var src1Var = this->GetConstantVar(instr->GetSrc1(), src1Val);
  5887. Js::Var src2Var = nullptr;
  5888. if (instr->GetSrc2())
  5889. {
  5890. if (!src2Val)
  5891. {
  5892. return false;
  5893. }
  5894. src2Var = this->GetConstantVar(instr->GetSrc2(), src2Val);
  5895. }
  5896. auto AreSourcesEqual = [&](Value * val1, Value * val2) -> bool
  5897. {
  5898. // NaN !== NaN, and objects can have valueOf/toString
  5899. return val1->IsEqualTo(val2) &&
  5900. val1->GetValueInfo()->IsPrimitive() && val1->GetValueInfo()->IsNotFloat();
  5901. };
  5902. // Make sure GetConstantVar only returns primitives.
  5903. // TODO: OOP JIT, enabled these asserts
  5904. //Assert(!src1Var || !Js::JavascriptOperators::IsObject(src1Var));
  5905. //Assert(!src2Var || !Js::JavascriptOperators::IsObject(src2Var));
  5906. BOOL result;
  5907. int32 constVal;
  5908. switch (instr->m_opcode)
  5909. {
  5910. #define BRANCH(OPCODE,CMP,TYPE,UNSIGNEDNESS) \
  5911. case Js::OpCode::##OPCODE: \
  5912. if (src1Val->GetValueInfo()->TryGetInt64ConstantValue(&left64, UNSIGNEDNESS) && \
  5913. src2Val->GetValueInfo()->TryGetInt64ConstantValue(&right64, UNSIGNEDNESS)) \
  5914. { \
  5915. result = (TYPE)left64 CMP (TYPE)right64; \
  5916. } \
  5917. else if (AreSourcesEqual(src1Val, src2Val)) \
  5918. { \
  5919. result = 0 CMP 0; \
  5920. } \
  5921. else \
  5922. { \
  5923. return false; \
  5924. } \
  5925. break;
  5926. BRANCH(BrEq_I4, == , int64, false)
  5927. BRANCH(BrGe_I4, >= , int64, false)
  5928. BRANCH(BrGt_I4, >, int64, false)
  5929. BRANCH(BrLt_I4, <, int64, false)
  5930. BRANCH(BrLe_I4, <= , int64, false)
  5931. BRANCH(BrNeq_I4, != , int64, false)
  5932. BRANCH(BrUnGe_I4, >= , uint64, true)
  5933. BRANCH(BrUnGt_I4, >, uint64, true)
  5934. BRANCH(BrUnLt_I4, <, uint64, true)
  5935. BRANCH(BrUnLe_I4, <= , uint64, true)
  5936. case Js::OpCode::BrEq_A:
  5937. case Js::OpCode::BrNotNeq_A:
  5938. if (!src1Var || !src2Var)
  5939. {
  5940. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  5941. {
  5942. result = false;
  5943. }
  5944. else if (AreSourcesEqual(src1Val, src2Val))
  5945. {
  5946. result = true;
  5947. }
  5948. else
  5949. {
  5950. return false;
  5951. }
  5952. }
  5953. else
  5954. {
  5955. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  5956. {
  5957. // TODO: OOP JIT, const folding
  5958. return false;
  5959. }
  5960. result = Js::JavascriptOperators::Equal(src1Var, src2Var, this->func->GetScriptContext());
  5961. }
  5962. break;
  5963. case Js::OpCode::BrNeq_A:
  5964. case Js::OpCode::BrNotEq_A:
  5965. if (!src1Var || !src2Var)
  5966. {
  5967. if (BoolAndIntStaticAndTypeMismatch(src1Val, src2Val, src1Var, src2Var))
  5968. {
  5969. result = true;
  5970. }
  5971. else if (AreSourcesEqual(src1Val, src2Val))
  5972. {
  5973. result = false;
  5974. }
  5975. else
  5976. {
  5977. return false;
  5978. }
  5979. }
  5980. else
  5981. {
  5982. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  5983. {
  5984. // TODO: OOP JIT, const folding
  5985. return false;
  5986. }
  5987. result = Js::JavascriptOperators::NotEqual(src1Var, src2Var, this->func->GetScriptContext());
  5988. }
  5989. break;
  5990. case Js::OpCode::BrSrEq_A:
  5991. case Js::OpCode::BrSrNotNeq_A:
  5992. if (!src1Var || !src2Var)
  5993. {
  5994. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  5995. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  5996. if (
  5997. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  5998. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  5999. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  6000. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  6001. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  6002. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  6003. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  6004. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  6005. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  6006. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  6007. )
  6008. {
  6009. result = false;
  6010. }
  6011. else if (AreSourcesEqual(src1Val, src2Val))
  6012. {
  6013. result = true;
  6014. }
  6015. else
  6016. {
  6017. return false;
  6018. }
  6019. }
  6020. else
  6021. {
  6022. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  6023. {
  6024. // TODO: OOP JIT, const folding
  6025. return false;
  6026. }
  6027. result = Js::JavascriptOperators::StrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  6028. }
  6029. break;
  6030. case Js::OpCode::BrSrNeq_A:
  6031. case Js::OpCode::BrSrNotEq_A:
  6032. if (!src1Var || !src2Var)
  6033. {
  6034. ValueInfo *src1ValInfo = src1Val->GetValueInfo();
  6035. ValueInfo *src2ValInfo = src2Val->GetValueInfo();
  6036. if (
  6037. (src1ValInfo->IsUndefined() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenUndefined()) ||
  6038. (src1ValInfo->IsNull() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNull()) ||
  6039. (src1ValInfo->IsBoolean() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenBoolean()) ||
  6040. (src1ValInfo->IsNumber() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenNumber()) ||
  6041. (src1ValInfo->IsString() && src2ValInfo->IsDefinite() && !src2ValInfo->HasBeenString()) ||
  6042. (src2ValInfo->IsUndefined() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenUndefined()) ||
  6043. (src2ValInfo->IsNull() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNull()) ||
  6044. (src2ValInfo->IsBoolean() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenBoolean()) ||
  6045. (src2ValInfo->IsNumber() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenNumber()) ||
  6046. (src2ValInfo->IsString() && src1ValInfo->IsDefinite() && !src1ValInfo->HasBeenString())
  6047. )
  6048. {
  6049. result = true;
  6050. }
  6051. else if (AreSourcesEqual(src1Val, src2Val))
  6052. {
  6053. result = false;
  6054. }
  6055. else
  6056. {
  6057. return false;
  6058. }
  6059. }
  6060. else
  6061. {
  6062. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  6063. {
  6064. // TODO: OOP JIT, const folding
  6065. return false;
  6066. }
  6067. result = Js::JavascriptOperators::NotStrictEqual(src1Var, src2Var, this->func->GetScriptContext());
  6068. }
  6069. break;
  6070. case Js::OpCode::BrFalse_A:
  6071. case Js::OpCode::BrTrue_A:
  6072. {
  6073. ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  6074. if(src1ValueInfo->IsNull() || src1ValueInfo->IsUndefined())
  6075. {
  6076. result = instr->m_opcode == Js::OpCode::BrFalse_A;
  6077. break;
  6078. }
  6079. if(src1ValueInfo->IsObject() && src1ValueInfo->GetObjectType() > ObjectType::Object)
  6080. {
  6081. // Specific object types that are tracked are equivalent to 'true'
  6082. result = instr->m_opcode == Js::OpCode::BrTrue_A;
  6083. break;
  6084. }
  6085. if (func->IsOOPJIT() || !CONFIG_FLAG(OOPJITMissingOpts))
  6086. {
  6087. // TODO: OOP JIT, const folding
  6088. return false;
  6089. }
  6090. if (!src1Var)
  6091. {
  6092. return false;
  6093. }
  6094. result = Js::JavascriptConversion::ToBoolean(src1Var, this->func->GetScriptContext());
  6095. if(instr->m_opcode == Js::OpCode::BrFalse_A)
  6096. {
  6097. result = !result;
  6098. }
  6099. break;
  6100. }
  6101. case Js::OpCode::BrFalse_I4:
  6102. // this path would probably work outside of asm.js, but we should verify that if we ever hit this scenario
  6103. Assert(GetIsAsmJSFunc());
  6104. constVal = 0;
  6105. if (!src1Val->GetValueInfo()->TryGetIntConstantValue(&constVal))
  6106. {
  6107. return false;
  6108. }
  6109. result = constVal == 0;
  6110. break;
  6111. default:
  6112. return false;
  6113. #undef BRANCH
  6114. }
  6115. this->OptConstFoldBr(!!result, instr);
  6116. return true;
  6117. }
  6118. bool
  6119. GlobOpt::OptConstFoldUnary(
  6120. IR::Instr * *pInstr,
  6121. const int32 intConstantValue,
  6122. const bool isUsingOriginalSrc1Value,
  6123. Value **pDstVal)
  6124. {
  6125. IR::Instr * &instr = *pInstr;
  6126. int32 value = 0;
  6127. IR::Opnd *constOpnd;
  6128. bool isInt = true;
  6129. bool doSetDstVal = true;
  6130. FloatConstType fValue = 0.0;
  6131. if (!DoConstFold())
  6132. {
  6133. return false;
  6134. }
  6135. if (instr->GetDst() && !instr->GetDst()->IsRegOpnd())
  6136. {
  6137. return false;
  6138. }
  6139. switch(instr->m_opcode)
  6140. {
  6141. case Js::OpCode::Neg_A:
  6142. if (intConstantValue == 0)
  6143. {
  6144. // Could fold to -0.0
  6145. return false;
  6146. }
  6147. if (Int32Math::Neg(intConstantValue, &value))
  6148. {
  6149. return false;
  6150. }
  6151. break;
  6152. case Js::OpCode::Not_A:
  6153. Int32Math::Not(intConstantValue, &value);
  6154. break;
  6155. case Js::OpCode::Ld_A:
  6156. if (instr->HasBailOutInfo())
  6157. {
  6158. //The profile data for switch expr can be string and in GlobOpt we realize it is an int.
  6159. if(instr->GetBailOutKind() == IR::BailOutExpectingString)
  6160. {
  6161. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingString);
  6162. }
  6163. Assert(instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  6164. instr->ClearBailOutInfo();
  6165. }
  6166. value = intConstantValue;
  6167. if(isUsingOriginalSrc1Value)
  6168. {
  6169. doSetDstVal = false; // Let OptDst do it by copying src1Val
  6170. }
  6171. break;
  6172. case Js::OpCode::Conv_Num:
  6173. case Js::OpCode::LdC_A_I4:
  6174. value = intConstantValue;
  6175. if(isUsingOriginalSrc1Value)
  6176. {
  6177. doSetDstVal = false; // Let OptDst do it by copying src1Val
  6178. }
  6179. break;
  6180. case Js::OpCode::Incr_A:
  6181. if (Int32Math::Inc(intConstantValue, &value))
  6182. {
  6183. return false;
  6184. }
  6185. break;
  6186. case Js::OpCode::Decr_A:
  6187. if (Int32Math::Dec(intConstantValue, &value))
  6188. {
  6189. return false;
  6190. }
  6191. break;
  6192. case Js::OpCode::InlineMathAcos:
  6193. fValue = Js::Math::Acos((double)intConstantValue);
  6194. isInt = false;
  6195. break;
  6196. case Js::OpCode::InlineMathAsin:
  6197. fValue = Js::Math::Asin((double)intConstantValue);
  6198. isInt = false;
  6199. break;
  6200. case Js::OpCode::InlineMathAtan:
  6201. fValue = Js::Math::Atan((double)intConstantValue);
  6202. isInt = false;
  6203. break;
  6204. case Js::OpCode::InlineMathCos:
  6205. fValue = Js::Math::Cos((double)intConstantValue);
  6206. isInt = false;
  6207. break;
  6208. case Js::OpCode::InlineMathExp:
  6209. fValue = Js::Math::Exp((double)intConstantValue);
  6210. isInt = false;
  6211. break;
  6212. case Js::OpCode::InlineMathLog:
  6213. fValue = Js::Math::Log((double)intConstantValue);
  6214. isInt = false;
  6215. break;
  6216. case Js::OpCode::InlineMathSin:
  6217. fValue = Js::Math::Sin((double)intConstantValue);
  6218. isInt = false;
  6219. break;
  6220. case Js::OpCode::InlineMathSqrt:
  6221. fValue = ::sqrt((double)intConstantValue);
  6222. isInt = false;
  6223. break;
  6224. case Js::OpCode::InlineMathTan:
  6225. fValue = ::tan((double)intConstantValue);
  6226. isInt = false;
  6227. break;
  6228. case Js::OpCode::InlineMathFround:
  6229. fValue = (double) (float) intConstantValue;
  6230. isInt = false;
  6231. break;
  6232. case Js::OpCode::InlineMathAbs:
  6233. if (intConstantValue == INT32_MIN)
  6234. {
  6235. if (instr->GetDst()->IsInt32())
  6236. {
  6237. // if dst is an int (e.g. in asm.js), we should coerce it, not convert to float
  6238. value = static_cast<int32>(2147483648U);
  6239. }
  6240. else
  6241. {
  6242. // Rejit with AggressiveIntTypeSpecDisabled for Math.abs(INT32_MIN) because it causes dst
  6243. // to be float type which could be different with previous type spec result in LoopPrePass
  6244. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  6245. }
  6246. }
  6247. else
  6248. {
  6249. value = ::abs(intConstantValue);
  6250. }
  6251. break;
  6252. case Js::OpCode::InlineMathClz:
  6253. DWORD clz;
  6254. if (_BitScanReverse(&clz, intConstantValue))
  6255. {
  6256. value = 31 - clz;
  6257. }
  6258. else
  6259. {
  6260. value = 32;
  6261. }
  6262. instr->ClearBailOutInfo();
  6263. break;
  6264. case Js::OpCode::Ctz:
  6265. Assert(func->GetJITFunctionBody()->IsWasmFunction());
  6266. Assert(!instr->HasBailOutInfo());
  6267. DWORD ctz;
  6268. if (_BitScanForward(&ctz, intConstantValue))
  6269. {
  6270. value = ctz;
  6271. }
  6272. else
  6273. {
  6274. value = 32;
  6275. }
  6276. break;
  6277. case Js::OpCode::InlineMathFloor:
  6278. value = intConstantValue;
  6279. instr->ClearBailOutInfo();
  6280. break;
  6281. case Js::OpCode::InlineMathCeil:
  6282. value = intConstantValue;
  6283. instr->ClearBailOutInfo();
  6284. break;
  6285. case Js::OpCode::InlineMathRound:
  6286. value = intConstantValue;
  6287. instr->ClearBailOutInfo();
  6288. break;
  6289. case Js::OpCode::ToVar:
  6290. if (Js::TaggedInt::IsOverflow(intConstantValue))
  6291. {
  6292. return false;
  6293. }
  6294. else
  6295. {
  6296. value = intConstantValue;
  6297. instr->ClearBailOutInfo();
  6298. break;
  6299. }
  6300. default:
  6301. return false;
  6302. }
  6303. this->CaptureByteCodeSymUses(instr);
  6304. Assert(!instr->HasBailOutInfo()); // If we are, in fact, successful in constant folding the instruction, there is no point in having the bailoutinfo around anymore.
  6305. // Make sure that it is cleared if it was initially present.
  6306. if (!isInt)
  6307. {
  6308. value = (int32)fValue;
  6309. if (fValue == (double)value)
  6310. {
  6311. isInt = true;
  6312. }
  6313. }
  6314. if (isInt)
  6315. {
  6316. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  6317. GOPT_TRACE(_u("Constant folding to %d\n"), value);
  6318. }
  6319. else
  6320. {
  6321. constOpnd = IR::FloatConstOpnd::New(fValue, TyFloat64, instr->m_func);
  6322. GOPT_TRACE(_u("Constant folding to %f\n"), fValue);
  6323. }
  6324. instr->ReplaceSrc1(constOpnd);
  6325. this->OptSrc(constOpnd, &instr);
  6326. IR::Opnd *dst = instr->GetDst();
  6327. Assert(dst->IsRegOpnd());
  6328. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  6329. if (isInt)
  6330. {
  6331. if (dstSym->IsSingleDef())
  6332. {
  6333. dstSym->SetIsIntConst(value);
  6334. }
  6335. if (doSetDstVal)
  6336. {
  6337. *pDstVal = GetIntConstantValue(value, instr, dst);
  6338. }
  6339. if (IsTypeSpecPhaseOff(this->func))
  6340. {
  6341. instr->m_opcode = Js::OpCode::LdC_A_I4;
  6342. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  6343. }
  6344. else
  6345. {
  6346. instr->m_opcode = Js::OpCode::Ld_I4;
  6347. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  6348. StackSym * currDstSym = instr->GetDst()->AsRegOpnd()->m_sym;
  6349. if (currDstSym->IsSingleDef())
  6350. {
  6351. currDstSym->SetIsIntConst(value);
  6352. }
  6353. }
  6354. }
  6355. else
  6356. {
  6357. *pDstVal = NewFloatConstantValue(fValue, dst);
  6358. if (IsTypeSpecPhaseOff(this->func))
  6359. {
  6360. instr->m_opcode = Js::OpCode::LdC_A_R8;
  6361. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  6362. }
  6363. else
  6364. {
  6365. instr->m_opcode = Js::OpCode::LdC_F8_R8;
  6366. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  6367. }
  6368. }
  6369. return true;
  6370. }
  6371. //------------------------------------------------------------------------------------------------------
  6372. // Type specialization
  6373. //------------------------------------------------------------------------------------------------------
  6374. bool
  6375. GlobOpt::IsWorthSpecializingToInt32DueToSrc(IR::Opnd *const src, Value *const val)
  6376. {
  6377. Assert(src);
  6378. Assert(val);
  6379. ValueInfo *valueInfo = val->GetValueInfo();
  6380. Assert(valueInfo->IsLikelyInt());
  6381. // If it is not known that the operand is definitely an int, the operand is not already type-specialized, and it's not live
  6382. // in the loop landing pad (if we're in a loop), it's probably not worth type-specializing this instruction. The common case
  6383. // where type-specializing this would be bad is where the operations are entirely on properties or array elements, where the
  6384. // ratio of FromVars and ToVars to the number of actual operations is high, and the conversions would dominate the time
  6385. // spent. On the other hand, if we're using a function formal parameter more than once, it would probably be worth
  6386. // type-specializing it, hence the IsDead check on the operands.
  6387. return
  6388. valueInfo->IsInt() ||
  6389. valueInfo->HasIntConstantValue(true) ||
  6390. !src->GetIsDead() ||
  6391. !src->IsRegOpnd() ||
  6392. CurrentBlockData()->IsInt32TypeSpecialized(src->AsRegOpnd()->m_sym) ||
  6393. (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(src->AsRegOpnd()->m_sym));
  6394. }
  6395. bool
  6396. GlobOpt::IsWorthSpecializingToInt32DueToDst(IR::Opnd *const dst)
  6397. {
  6398. Assert(dst);
  6399. const auto sym = dst->AsRegOpnd()->m_sym;
  6400. return
  6401. CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
  6402. (this->currentBlock->loop && this->currentBlock->loop->landingPad->globOptData.IsLive(sym));
  6403. }
  6404. bool
  6405. GlobOpt::IsWorthSpecializingToInt32(IR::Instr *const instr, Value *const src1Val, Value *const src2Val)
  6406. {
  6407. Assert(instr);
  6408. const auto src1 = instr->GetSrc1();
  6409. const auto src2 = instr->GetSrc2();
  6410. // In addition to checking each operand and the destination, if for any reason we only have to do a maximum of two
  6411. // conversions instead of the worst-case 3 conversions, it's probably worth specializing.
  6412. if (IsWorthSpecializingToInt32DueToSrc(src1, src1Val) ||
  6413. (src2Val && IsWorthSpecializingToInt32DueToSrc(src2, src2Val)))
  6414. {
  6415. return true;
  6416. }
  6417. IR::Opnd *dst = instr->GetDst();
  6418. if (!dst || IsWorthSpecializingToInt32DueToDst(dst))
  6419. {
  6420. return true;
  6421. }
  6422. if (dst->IsEqual(src1) || (src2Val && (dst->IsEqual(src2) || src1->IsEqual(src2))))
  6423. {
  6424. return true;
  6425. }
  6426. IR::Instr *instrNext = instr->GetNextRealInstrOrLabel();
  6427. // Skip useless Ld_A's
  6428. do
  6429. {
  6430. switch (instrNext->m_opcode)
  6431. {
  6432. case Js::OpCode::Ld_A:
  6433. if (!dst->IsEqual(instrNext->GetSrc1()))
  6434. {
  6435. goto done;
  6436. }
  6437. dst = instrNext->GetDst();
  6438. break;
  6439. case Js::OpCode::LdFld:
  6440. case Js::OpCode::LdRootFld:
  6441. case Js::OpCode::LdRootFldForTypeOf:
  6442. case Js::OpCode::LdFldForTypeOf:
  6443. case Js::OpCode::LdElemI_A:
  6444. case Js::OpCode::ByteCodeUses:
  6445. break;
  6446. default:
  6447. goto done;
  6448. }
  6449. instrNext = instrNext->GetNextRealInstrOrLabel();
  6450. } while (true);
  6451. done:
  6452. // If the next instr could also be type specialized, then it is probably worth it.
  6453. if ((instrNext->GetSrc1() && dst->IsEqual(instrNext->GetSrc1())) || (instrNext->GetSrc2() && dst->IsEqual(instrNext->GetSrc2())))
  6454. {
  6455. switch (instrNext->m_opcode)
  6456. {
  6457. case Js::OpCode::Add_A:
  6458. case Js::OpCode::Sub_A:
  6459. case Js::OpCode::Mul_A:
  6460. case Js::OpCode::Div_A:
  6461. case Js::OpCode::Rem_A:
  6462. case Js::OpCode::Xor_A:
  6463. case Js::OpCode::And_A:
  6464. case Js::OpCode::Or_A:
  6465. case Js::OpCode::Shl_A:
  6466. case Js::OpCode::Shr_A:
  6467. case Js::OpCode::Incr_A:
  6468. case Js::OpCode::Decr_A:
  6469. case Js::OpCode::Neg_A:
  6470. case Js::OpCode::Not_A:
  6471. case Js::OpCode::Conv_Num:
  6472. case Js::OpCode::BrEq_I4:
  6473. case Js::OpCode::BrTrue_I4:
  6474. case Js::OpCode::BrFalse_I4:
  6475. case Js::OpCode::BrGe_I4:
  6476. case Js::OpCode::BrGt_I4:
  6477. case Js::OpCode::BrLt_I4:
  6478. case Js::OpCode::BrLe_I4:
  6479. case Js::OpCode::BrNeq_I4:
  6480. return true;
  6481. }
  6482. }
  6483. return false;
  6484. }
  6485. bool
  6486. GlobOpt::TypeSpecializeNumberUnary(IR::Instr *instr, Value *src1Val, Value **pDstVal)
  6487. {
  6488. Assert(src1Val->GetValueInfo()->IsNumber());
  6489. if (this->IsLoopPrePass())
  6490. {
  6491. return false;
  6492. }
  6493. switch (instr->m_opcode)
  6494. {
  6495. case Js::OpCode::Conv_Num:
  6496. // Optimize Conv_Num away since we know this is a number
  6497. instr->m_opcode = Js::OpCode::Ld_A;
  6498. return false;
  6499. }
  6500. return false;
  6501. }
  6502. bool
  6503. GlobOpt::TypeSpecializeUnary(
  6504. IR::Instr **pInstr,
  6505. Value **pSrc1Val,
  6506. Value **pDstVal,
  6507. Value *const src1OriginalVal,
  6508. bool *redoTypeSpecRef,
  6509. bool *const forceInvariantHoistingRef)
  6510. {
  6511. Assert(pSrc1Val);
  6512. Value *&src1Val = *pSrc1Val;
  6513. Assert(src1Val);
  6514. // We don't need to do typespec for asmjs
  6515. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  6516. {
  6517. return false;
  6518. }
  6519. IR::Instr *&instr = *pInstr;
  6520. int32 min, max;
  6521. // Inline built-ins explicitly specify how srcs/dst must be specialized.
  6522. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  6523. {
  6524. TypeSpecializeInlineBuiltInUnary(pInstr, &src1Val, pDstVal, src1OriginalVal, redoTypeSpecRef);
  6525. return true;
  6526. }
  6527. // Consider: If type spec wasn't completely done, make sure that we don't type-spec the dst 2nd time.
  6528. if(instr->m_opcode == Js::OpCode::LdLen_A && TypeSpecializeLdLen(&instr, &src1Val, pDstVal, forceInvariantHoistingRef))
  6529. {
  6530. return true;
  6531. }
  6532. if (!src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, this->DoAggressiveIntTypeSpec()))
  6533. {
  6534. src1Val = src1OriginalVal;
  6535. if (src1Val->GetValueInfo()->IsLikelyFloat())
  6536. {
  6537. // Try to type specialize to float
  6538. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  6539. }
  6540. else if (src1Val->GetValueInfo()->IsNumber())
  6541. {
  6542. return TypeSpecializeNumberUnary(instr, src1Val, pDstVal);
  6543. }
  6544. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6545. }
  6546. return this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, min, max, src1OriginalVal, redoTypeSpecRef);
  6547. }
  6548. // Returns true if the built-in requested type specialization, and no further action needed,
  6549. // otherwise returns false.
  6550. void
  6551. GlobOpt::TypeSpecializeInlineBuiltInUnary(IR::Instr **pInstr, Value **pSrc1Val, Value **pDstVal, Value *const src1OriginalVal, bool *redoTypeSpecRef)
  6552. {
  6553. IR::Instr *&instr = *pInstr;
  6554. Assert(pSrc1Val);
  6555. Value *&src1Val = *pSrc1Val;
  6556. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6557. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  6558. Assert(builtInId != Js::BuiltinFunction::None);
  6559. // Consider using different bailout for float/int FromVars, so that when the arg cannot be converted to number we don't disable
  6560. // type spec for other parts of the big function but rather just don't inline that built-in instr.
  6561. // E.g. could do that if the value is not likelyInt/likelyFloat.
  6562. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  6563. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_Args) == Js::BuiltInFlags::BIF_TypeSpecUnaryToFloat;
  6564. if (areAllArgsAlwaysFloat)
  6565. {
  6566. // InlineMathAcos, InlineMathAsin, InlineMathAtan, InlineMathCos, InlineMathExp, InlineMathLog, InlineMathSin, InlineMathSqrt, InlineMathTan.
  6567. Assert(this->DoFloatTypeSpec());
  6568. // Type-spec the src.
  6569. src1Val = src1OriginalVal;
  6570. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  6571. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized to float, but something failed during the process.");
  6572. // Type-spec the dst.
  6573. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  6574. }
  6575. else if (instr->m_opcode == Js::OpCode::InlineMathAbs)
  6576. {
  6577. // Consider the case when the value is unknown - because of bailout in abs we may disable type spec for the whole function which is too much.
  6578. // First, try int.
  6579. int minVal, maxVal;
  6580. bool shouldTypeSpecToInt = src1Val->GetValueInfo()->GetIntValMinMax(&minVal, &maxVal, /* doAggressiveIntTypeSpec = */ true);
  6581. if (shouldTypeSpecToInt)
  6582. {
  6583. Assert(this->DoAggressiveIntTypeSpec());
  6584. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, minVal, maxVal, src1OriginalVal, redoTypeSpecRef, true);
  6585. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (int), but something failed during the process.");
  6586. if (!this->IsLoopPrePass())
  6587. {
  6588. // Create bailout for INT_MIN which does not have corresponding int value on the positive side.
  6589. // Check int range: if we know the range is out of overflow, we do not need the bail out at all.
  6590. if (minVal == INT32_MIN)
  6591. {
  6592. GenerateBailAtOperation(&instr, IR::BailOnIntMin);
  6593. }
  6594. }
  6595. // Account for ::abs(INT_MIN) == INT_MIN (which is less than 0).
  6596. maxVal = ::max(
  6597. ::abs(Int32Math::NearestInRangeTo(minVal, INT_MIN + 1, INT_MAX)),
  6598. ::abs(Int32Math::NearestInRangeTo(maxVal, INT_MIN + 1, INT_MAX)));
  6599. minVal = minVal >= 0 ? minVal : 0;
  6600. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, nullptr, IR::BailOutInvalid, minVal, maxVal, pDstVal);
  6601. }
  6602. else
  6603. {
  6604. // If we couldn't do int, do float.
  6605. Assert(this->DoFloatTypeSpec());
  6606. src1Val = src1OriginalVal;
  6607. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, true);
  6608. AssertMsg(retVal, "For inline built-ins the args have to be type-specialized (float), but something failed during the process.");
  6609. this->TypeSpecializeFloatDst(instr, nullptr, src1Val, nullptr, pDstVal);
  6610. }
  6611. }
  6612. else if (instr->m_opcode == Js::OpCode::InlineMathFloor || instr->m_opcode == Js::OpCode::InlineMathCeil || instr->m_opcode == Js::OpCode::InlineMathRound)
  6613. {
  6614. // Type specialize src to float
  6615. src1Val = src1OriginalVal;
  6616. bool retVal = this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal, /* skipDst = */ true);
  6617. AssertMsg(retVal, "For inline Math.floor and Math.ceil the src has to be type-specialized to float, but something failed during the process.");
  6618. // Type specialize dst to int
  6619. this->TypeSpecializeIntDst(
  6620. instr,
  6621. instr->m_opcode,
  6622. nullptr,
  6623. src1Val,
  6624. nullptr,
  6625. IR::BailOutInvalid,
  6626. INT32_MIN,
  6627. INT32_MAX,
  6628. pDstVal);
  6629. }
  6630. else if(instr->m_opcode == Js::OpCode::InlineArrayPop)
  6631. {
  6632. IR::Opnd *const thisOpnd = instr->GetSrc1();
  6633. Assert(thisOpnd);
  6634. // Ensure src1 (Array) is a var
  6635. this->ToVarUses(instr, thisOpnd, false, src1Val);
  6636. if(!this->IsLoopPrePass() && thisOpnd->GetValueType().IsLikelyNativeArray())
  6637. {
  6638. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during the run time.
  6639. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  6640. }
  6641. if(!instr->GetDst())
  6642. {
  6643. return;
  6644. }
  6645. // Try Type Specializing the element (return item from Pop) based on the array's profile data.
  6646. if(thisOpnd->GetValueType().IsLikelyNativeIntArray())
  6647. {
  6648. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, nullptr, nullptr, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  6649. }
  6650. else if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  6651. {
  6652. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  6653. }
  6654. else
  6655. {
  6656. // We reached here so the Element is not yet type specialized. Ensure element is a var
  6657. if(instr->GetDst()->IsRegOpnd())
  6658. {
  6659. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  6660. }
  6661. }
  6662. }
  6663. else if (instr->m_opcode == Js::OpCode::InlineMathClz)
  6664. {
  6665. Assert(this->DoAggressiveIntTypeSpec());
  6666. Assert(this->DoLossyIntTypeSpec());
  6667. //Type specialize to int
  6668. bool retVal = this->TypeSpecializeIntUnary(pInstr, &src1Val, pDstVal, INT32_MIN, INT32_MAX, src1OriginalVal, redoTypeSpecRef);
  6669. AssertMsg(retVal, "For clz32, the arg has to be type-specialized to int.");
  6670. }
  6671. else
  6672. {
  6673. AssertMsg(FALSE, "Unsupported built-in!");
  6674. }
  6675. }
  6676. void
  6677. GlobOpt::TypeSpecializeInlineBuiltInBinary(IR::Instr **pInstr, Value *src1Val, Value* src2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal)
  6678. {
  6679. IR::Instr *&instr = *pInstr;
  6680. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6681. switch(instr->m_opcode)
  6682. {
  6683. case Js::OpCode::InlineMathAtan2:
  6684. {
  6685. Js::BuiltinFunction builtInId = Js::JavascriptLibrary::GetBuiltInInlineCandidateId(instr->m_opcode); // From actual instr, not profile based.
  6686. Js::BuiltInFlags builtInFlags = Js::JavascriptLibrary::GetFlagsForBuiltIn(builtInId);
  6687. bool areAllArgsAlwaysFloat = (builtInFlags & Js::BuiltInFlags::BIF_TypeSpecAllToFloat) != 0;
  6688. Assert(areAllArgsAlwaysFloat);
  6689. Assert(this->DoFloatTypeSpec());
  6690. // Type-spec the src1, src2 and dst.
  6691. src1Val = src1OriginalVal;
  6692. src2Val = src2OriginalVal;
  6693. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6694. AssertMsg(retVal, "For pow and atnan2 the args have to be type-specialized to float, but something failed during the process.");
  6695. break;
  6696. }
  6697. case Js::OpCode::InlineMathPow:
  6698. {
  6699. #ifndef _M_ARM32_OR_ARM64
  6700. if (src2Val->GetValueInfo()->IsLikelyInt())
  6701. {
  6702. bool lossy = false;
  6703. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, lossy);
  6704. IR::Opnd* src1 = instr->GetSrc1();
  6705. int32 valueMin, valueMax;
  6706. if (src1Val->GetValueInfo()->IsLikelyInt() &&
  6707. this->DoPowIntIntTypeSpec() &&
  6708. src2Val->GetValueInfo()->GetIntValMinMax(&valueMin, &valueMax, this->DoAggressiveIntTypeSpec()) &&
  6709. valueMin >= 0)
  6710. {
  6711. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  6712. this->TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, INT32_MIN, INT32_MAX, pDstVal);
  6713. if(!this->IsLoopPrePass())
  6714. {
  6715. GenerateBailAtOperation(&instr, IR::BailOutOnPowIntIntOverflow);
  6716. }
  6717. }
  6718. else
  6719. {
  6720. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  6721. TypeSpecializeFloatDst(instr, nullptr, src1Val, src2Val, pDstVal);
  6722. }
  6723. }
  6724. else
  6725. {
  6726. #endif
  6727. this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6728. #ifndef _M_ARM32_OR_ARM64
  6729. }
  6730. #endif
  6731. break;
  6732. }
  6733. case Js::OpCode::InlineMathImul:
  6734. {
  6735. Assert(this->DoAggressiveIntTypeSpec());
  6736. Assert(this->DoLossyIntTypeSpec());
  6737. //Type specialize to int
  6738. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, false /* skipDst */);
  6739. AssertMsg(retVal, "For imul, the args have to be type-specialized to int but something failed during the process.");
  6740. break;
  6741. }
  6742. case Js::OpCode::InlineMathMin:
  6743. case Js::OpCode::InlineMathMax:
  6744. {
  6745. if(src1Val->GetValueInfo()->IsLikelyInt() && src2Val->GetValueInfo()->IsLikelyInt())
  6746. {
  6747. // Compute resulting range info
  6748. int32 min1 = INT32_MIN;
  6749. int32 max1 = INT32_MAX;
  6750. int32 min2 = INT32_MIN;
  6751. int32 max2 = INT32_MAX;
  6752. int32 newMin, newMax;
  6753. Assert(this->DoAggressiveIntTypeSpec());
  6754. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  6755. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  6756. if (instr->m_opcode == Js::OpCode::InlineMathMin)
  6757. {
  6758. newMin = min(min1, min2);
  6759. newMax = min(max1, max2);
  6760. }
  6761. else
  6762. {
  6763. Assert(instr->m_opcode == Js::OpCode::InlineMathMax);
  6764. newMin = max(min1, min2);
  6765. newMax = max(max1, max2);
  6766. }
  6767. // Type specialize to int
  6768. bool retVal = this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, newMin, newMax, false /* skipDst */);
  6769. AssertMsg(retVal, "For min and max, the args have to be type-specialized to int if any one of the sources is an int, but something failed during the process.");
  6770. }
  6771. // Couldn't type specialize to int, type specialize to float
  6772. else
  6773. {
  6774. Assert(this->DoFloatTypeSpec());
  6775. src1Val = src1OriginalVal;
  6776. src2Val = src2OriginalVal;
  6777. bool retVal = this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  6778. AssertMsg(retVal, "For min and max, the args have to be type-specialized to float if any one of the sources is a float, but something failed during the process.");
  6779. }
  6780. break;
  6781. }
  6782. case Js::OpCode::InlineArrayPush:
  6783. {
  6784. IR::Opnd *const thisOpnd = instr->GetSrc1();
  6785. Assert(thisOpnd);
  6786. if(instr->GetDst() && instr->GetDst()->IsRegOpnd())
  6787. {
  6788. // Set the dst as live here, as the built-ins return early from the TypeSpecialization functions - before the dst is marked as live.
  6789. // Also, we are not specializing the dst separately and we are skipping the dst to be handled when we specialize the instruction above.
  6790. this->ToVarRegOpnd(instr->GetDst()->AsRegOpnd(), currentBlock);
  6791. }
  6792. // Ensure src1 (Array) is a var
  6793. this->ToVarUses(instr, thisOpnd, false, src1Val);
  6794. if(!this->IsLoopPrePass())
  6795. {
  6796. if(thisOpnd->GetValueType().IsLikelyNativeArray())
  6797. {
  6798. // We bail out, if there is illegal access or a mismatch in the Native array type that is optimized for, during run time.
  6799. GenerateBailAtOperation(&instr, IR::BailOutConventionalNativeArrayAccessOnly);
  6800. }
  6801. else
  6802. {
  6803. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  6804. }
  6805. }
  6806. // Try Type Specializing the element based on the array's profile data.
  6807. if(thisOpnd->GetValueType().IsLikelyNativeFloatArray())
  6808. {
  6809. src1Val = src1OriginalVal;
  6810. src2Val = src2OriginalVal;
  6811. }
  6812. if((thisOpnd->GetValueType().IsLikelyNativeIntArray() && this->TypeSpecializeIntBinary(pInstr, src1Val, src2Val, pDstVal, INT32_MIN, INT32_MAX, true))
  6813. || (thisOpnd->GetValueType().IsLikelyNativeFloatArray() && this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal)))
  6814. {
  6815. break;
  6816. }
  6817. // The Element is not yet type specialized. Ensure element is a var
  6818. this->ToVarUses(instr, instr->GetSrc2(), false, src2Val);
  6819. break;
  6820. }
  6821. }
  6822. }
  6823. void
  6824. GlobOpt::TypeSpecializeInlineBuiltInDst(IR::Instr **pInstr, Value **pDstVal)
  6825. {
  6826. IR::Instr *&instr = *pInstr;
  6827. Assert(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  6828. if (instr->m_opcode == Js::OpCode::InlineMathRandom)
  6829. {
  6830. Assert(this->DoFloatTypeSpec());
  6831. // Type specialize dst to float
  6832. this->TypeSpecializeFloatDst(instr, nullptr, nullptr, nullptr, pDstVal);
  6833. }
  6834. }
  6835. bool
  6836. GlobOpt::TryTypeSpecializeUnaryToFloatHelper(IR::Instr** pInstr, Value** pSrc1Val, Value* const src1OriginalVal, Value **pDstVal)
  6837. {
  6838. // It has been determined that this instruction cannot be int-specialized. We need to determine whether to attempt to
  6839. // float-specialize the instruction, or leave it unspecialized.
  6840. #if !INT32VAR
  6841. Value*& src1Val = *pSrc1Val;
  6842. if(src1Val->GetValueInfo()->IsLikelyUntaggedInt())
  6843. {
  6844. // An input range is completely outside the range of an int31. Even if the operation may overflow, it is
  6845. // unlikely to overflow on these operations, so we leave it unspecialized on 64-bit platforms. However, on
  6846. // 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is significantly slower to
  6847. // use in an unspecialized operation compared to a tagged int. So, try to float-specialize the instruction.
  6848. src1Val = src1OriginalVal;
  6849. return this->TypeSpecializeFloatUnary(pInstr, src1Val, pDstVal);
  6850. }
  6851. #endif
  6852. return false;
  6853. }
  6854. bool
  6855. GlobOpt::TypeSpecializeIntBinary(IR::Instr **pInstr, Value *src1Val, Value *src2Val, Value **pDstVal, int32 min, int32 max, bool skipDst /* = false */)
  6856. {
  6857. // Consider moving the code for int type spec-ing binary functions here.
  6858. IR::Instr *&instr = *pInstr;
  6859. bool lossy = false;
  6860. if(OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  6861. {
  6862. if(instr->m_opcode == Js::OpCode::InlineArrayPush)
  6863. {
  6864. int32 intConstantValue;
  6865. bool isIntConstMissingItem = src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue);
  6866. if(isIntConstMissingItem)
  6867. {
  6868. isIntConstMissingItem = Js::SparseArraySegment<int>::IsMissingItem(&intConstantValue);
  6869. }
  6870. // Don't specialize if the element is not likelyInt or an IntConst which is a missing item value.
  6871. if(!(src2Val->GetValueInfo()->IsLikelyInt()) || isIntConstMissingItem)
  6872. {
  6873. return false;
  6874. }
  6875. // We don't want to specialize both the source operands, though it is a binary instr.
  6876. IR::Opnd * elementOpnd = instr->GetSrc2();
  6877. this->ToInt32(instr, elementOpnd, this->currentBlock, src2Val, nullptr, lossy);
  6878. }
  6879. else
  6880. {
  6881. IR::Opnd *src1 = instr->GetSrc1();
  6882. this->ToInt32(instr, src1, this->currentBlock, src1Val, nullptr, lossy);
  6883. IR::Opnd *src2 = instr->GetSrc2();
  6884. this->ToInt32(instr, src2, this->currentBlock, src2Val, nullptr, lossy);
  6885. }
  6886. if(!skipDst)
  6887. {
  6888. IR::Opnd *dst = instr->GetDst();
  6889. if (dst)
  6890. {
  6891. TypeSpecializeIntDst(instr, instr->m_opcode, nullptr, src1Val, src2Val, IR::BailOutInvalid, min, max, pDstVal);
  6892. }
  6893. }
  6894. return true;
  6895. }
  6896. else
  6897. {
  6898. AssertMsg(false, "Yet to move code for other binary functions here");
  6899. return false;
  6900. }
  6901. }
  6902. bool
  6903. GlobOpt::TypeSpecializeIntUnary(
  6904. IR::Instr **pInstr,
  6905. Value **pSrc1Val,
  6906. Value **pDstVal,
  6907. int32 min,
  6908. int32 max,
  6909. Value *const src1OriginalVal,
  6910. bool *redoTypeSpecRef,
  6911. bool skipDst /* = false */)
  6912. {
  6913. IR::Instr *&instr = *pInstr;
  6914. Assert(pSrc1Val);
  6915. Value *&src1Val = *pSrc1Val;
  6916. bool isTransfer = false;
  6917. Js::OpCode opcode;
  6918. int32 newMin, newMax;
  6919. bool lossy = false;
  6920. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  6921. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  6922. bool ignoredNegativeZero = false;
  6923. bool checkTypeSpecWorth = false;
  6924. if(instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt)
  6925. {
  6926. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6927. }
  6928. AddSubConstantInfo addSubConstantInfo;
  6929. switch(instr->m_opcode)
  6930. {
  6931. case Js::OpCode::Ld_A:
  6932. if (instr->GetSrc1()->IsRegOpnd())
  6933. {
  6934. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  6935. if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
  6936. {
  6937. // Type specializing an Ld_A isn't worth it, unless the src
  6938. // is already type specialized.
  6939. return false;
  6940. }
  6941. }
  6942. newMin = min;
  6943. newMax = max;
  6944. opcode = Js::OpCode::Ld_I4;
  6945. isTransfer = true;
  6946. break;
  6947. case Js::OpCode::Conv_Num:
  6948. newMin = min;
  6949. newMax = max;
  6950. opcode = Js::OpCode::Ld_I4;
  6951. isTransfer = true;
  6952. break;
  6953. case Js::OpCode::LdC_A_I4:
  6954. newMin = newMax = instr->GetSrc1()->AsIntConstOpnd()->AsInt32();
  6955. opcode = Js::OpCode::Ld_I4;
  6956. break;
  6957. case Js::OpCode::Neg_A:
  6958. if (min <= 0 && max >= 0)
  6959. {
  6960. if(instr->ShouldCheckForNegativeZero())
  6961. {
  6962. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  6963. if(!DoAggressiveIntTypeSpec())
  6964. {
  6965. // May result in -0
  6966. // Consider adding a dynamic check for src1 == 0
  6967. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6968. }
  6969. if(min == 0 && max == 0)
  6970. {
  6971. // Always results in -0
  6972. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6973. }
  6974. bailOutKind |= IR::BailOutOnNegativeZero;
  6975. }
  6976. else
  6977. {
  6978. ignoredNegativeZero = true;
  6979. }
  6980. }
  6981. if (Int32Math::Neg(min, &newMax))
  6982. {
  6983. if(instr->ShouldCheckForIntOverflow())
  6984. {
  6985. if(!DoAggressiveIntTypeSpec())
  6986. {
  6987. // May overflow
  6988. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6989. }
  6990. if(min == max)
  6991. {
  6992. // Always overflows
  6993. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  6994. }
  6995. bailOutKind |= IR::BailOutOnOverflow;
  6996. newMax = INT32_MAX;
  6997. }
  6998. else
  6999. {
  7000. ignoredIntOverflow = true;
  7001. }
  7002. }
  7003. if (Int32Math::Neg(max, &newMin))
  7004. {
  7005. if(instr->ShouldCheckForIntOverflow())
  7006. {
  7007. if(!DoAggressiveIntTypeSpec())
  7008. {
  7009. // May overflow
  7010. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7011. }
  7012. bailOutKind |= IR::BailOutOnOverflow;
  7013. newMin = INT32_MAX;
  7014. }
  7015. else
  7016. {
  7017. ignoredIntOverflow = true;
  7018. }
  7019. }
  7020. if(!instr->ShouldCheckForIntOverflow() && newMin > newMax)
  7021. {
  7022. // When ignoring overflow, the range needs to account for overflow. Since MIN_INT is the only int32 value that
  7023. // overflows on Neg, and the value resulting from overflow is also MIN_INT, if calculating only the new min or new
  7024. // max overflowed but not both, then the new min will be greater than the new max. In that case we need to consider
  7025. // the full range of int32s as possible resulting values.
  7026. newMin = INT32_MIN;
  7027. newMax = INT32_MAX;
  7028. }
  7029. opcode = Js::OpCode::Neg_I4;
  7030. checkTypeSpecWorth = true;
  7031. break;
  7032. case Js::OpCode::Not_A:
  7033. if(!DoLossyIntTypeSpec())
  7034. {
  7035. return false;
  7036. }
  7037. this->PropagateIntRangeForNot(min, max, &newMin, &newMax);
  7038. opcode = Js::OpCode::Not_I4;
  7039. lossy = true;
  7040. break;
  7041. case Js::OpCode::Incr_A:
  7042. do // while(false)
  7043. {
  7044. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  7045. {
  7046. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7047. return
  7048. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  7049. src1ValueInfo->IsIntBounded() &&
  7050. src1ValueInfo->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(1);
  7051. };
  7052. if (Int32Math::Inc(min, &newMin))
  7053. {
  7054. if(CannotOverflowBasedOnRelativeBounds())
  7055. {
  7056. newMin = INT32_MAX;
  7057. }
  7058. else if(instr->ShouldCheckForIntOverflow())
  7059. {
  7060. // Always overflows
  7061. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7062. }
  7063. else
  7064. {
  7065. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  7066. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  7067. // we use the full range of int32s.
  7068. ignoredIntOverflow = true;
  7069. newMin = INT32_MIN;
  7070. newMax = INT32_MAX;
  7071. break;
  7072. }
  7073. }
  7074. if (Int32Math::Inc(max, &newMax))
  7075. {
  7076. if(CannotOverflowBasedOnRelativeBounds())
  7077. {
  7078. newMax = INT32_MAX;
  7079. }
  7080. else if(instr->ShouldCheckForIntOverflow())
  7081. {
  7082. if(!DoAggressiveIntTypeSpec())
  7083. {
  7084. // May overflow
  7085. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7086. }
  7087. bailOutKind |= IR::BailOutOnOverflow;
  7088. newMax = INT32_MAX;
  7089. }
  7090. else
  7091. {
  7092. // See comment about ignoring overflow above
  7093. ignoredIntOverflow = true;
  7094. newMin = INT32_MIN;
  7095. newMax = INT32_MAX;
  7096. break;
  7097. }
  7098. }
  7099. } while(false);
  7100. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  7101. {
  7102. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, 1);
  7103. }
  7104. opcode = Js::OpCode::Add_I4;
  7105. if (!this->IsLoopPrePass())
  7106. {
  7107. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  7108. }
  7109. checkTypeSpecWorth = true;
  7110. break;
  7111. case Js::OpCode::Decr_A:
  7112. do // while(false)
  7113. {
  7114. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  7115. {
  7116. const ValueInfo *const src1ValueInfo = src1Val->GetValueInfo();
  7117. return
  7118. (src1ValueInfo->IsInt() || DoAggressiveIntTypeSpec()) &&
  7119. src1ValueInfo->IsIntBounded() &&
  7120. src1ValueInfo->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(1);
  7121. };
  7122. if (Int32Math::Dec(max, &newMax))
  7123. {
  7124. if(CannotOverflowBasedOnRelativeBounds())
  7125. {
  7126. newMax = INT32_MIN;
  7127. }
  7128. else if(instr->ShouldCheckForIntOverflow())
  7129. {
  7130. // Always overflows
  7131. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7132. }
  7133. else
  7134. {
  7135. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  7136. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints, we
  7137. // use the full range of int32s.
  7138. ignoredIntOverflow = true;
  7139. newMin = INT32_MIN;
  7140. newMax = INT32_MAX;
  7141. break;
  7142. }
  7143. }
  7144. if (Int32Math::Dec(min, &newMin))
  7145. {
  7146. if(CannotOverflowBasedOnRelativeBounds())
  7147. {
  7148. newMin = INT32_MIN;
  7149. }
  7150. else if(instr->ShouldCheckForIntOverflow())
  7151. {
  7152. if(!DoAggressiveIntTypeSpec())
  7153. {
  7154. // May overflow
  7155. return TryTypeSpecializeUnaryToFloatHelper(pInstr, &src1Val, src1OriginalVal, pDstVal);
  7156. }
  7157. bailOutKind |= IR::BailOutOnOverflow;
  7158. newMin = INT32_MIN;
  7159. }
  7160. else
  7161. {
  7162. // See comment about ignoring overflow above
  7163. ignoredIntOverflow = true;
  7164. newMin = INT32_MIN;
  7165. newMax = INT32_MAX;
  7166. break;
  7167. }
  7168. }
  7169. } while(false);
  7170. if(!ignoredIntOverflow && instr->GetSrc1()->IsRegOpnd())
  7171. {
  7172. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min == max, -1);
  7173. }
  7174. opcode = Js::OpCode::Sub_I4;
  7175. if (!this->IsLoopPrePass())
  7176. {
  7177. instr->SetSrc2(IR::IntConstOpnd::New(1, TyInt32, instr->m_func));
  7178. }
  7179. checkTypeSpecWorth = true;
  7180. break;
  7181. case Js::OpCode::BrFalse_A:
  7182. case Js::OpCode::BrTrue_A:
  7183. {
  7184. if(DoConstFold() && !IsLoopPrePass() && TryOptConstFoldBrFalse(instr, src1Val, min, max))
  7185. {
  7186. return true;
  7187. }
  7188. bool specialize = true;
  7189. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  7190. {
  7191. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  7192. if (CurrentBlockData()->IsInt32TypeSpecialized(sym) == false)
  7193. {
  7194. // Type specializing a BrTrue_A/BrFalse_A isn't worth it, unless the src
  7195. // is already type specialized
  7196. specialize = false;
  7197. }
  7198. }
  7199. if(instr->m_opcode == Js::OpCode::BrTrue_A)
  7200. {
  7201. UpdateIntBoundsForNotEqualBranch(src1Val, nullptr, 0);
  7202. opcode = Js::OpCode::BrTrue_I4;
  7203. }
  7204. else
  7205. {
  7206. UpdateIntBoundsForEqualBranch(src1Val, nullptr, 0);
  7207. opcode = Js::OpCode::BrFalse_I4;
  7208. }
  7209. if(!specialize)
  7210. {
  7211. return false;
  7212. }
  7213. newMin = 2; newMax = 1; // We'll assert if we make a range where min > max
  7214. break;
  7215. }
  7216. case Js::OpCode::MultiBr:
  7217. newMin = min;
  7218. newMax = max;
  7219. opcode = instr->m_opcode;
  7220. break;
  7221. case Js::OpCode::StElemI_A:
  7222. case Js::OpCode::StElemI_A_Strict:
  7223. case Js::OpCode::StElemC:
  7224. if(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsLikelyAnyArrayWithNativeFloatValues())
  7225. {
  7226. src1Val = src1OriginalVal;
  7227. }
  7228. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  7229. case Js::OpCode::NewScArray:
  7230. case Js::OpCode::NewScArrayWithMissingValues:
  7231. case Js::OpCode::InitFld:
  7232. case Js::OpCode::InitRootFld:
  7233. case Js::OpCode::StSlot:
  7234. case Js::OpCode::StSlotChkUndecl:
  7235. #if !FLOATVAR
  7236. case Js::OpCode::StSlotBoxTemp:
  7237. #endif
  7238. case Js::OpCode::StFld:
  7239. case Js::OpCode::StRootFld:
  7240. case Js::OpCode::StFldStrict:
  7241. case Js::OpCode::StRootFldStrict:
  7242. case Js::OpCode::ArgOut_A:
  7243. case Js::OpCode::ArgOut_A_Inline:
  7244. case Js::OpCode::ArgOut_A_FixupForStackArgs:
  7245. case Js::OpCode::ArgOut_A_Dynamic:
  7246. case Js::OpCode::ArgOut_A_FromStackArgs:
  7247. case Js::OpCode::ArgOut_A_SpreadArg:
  7248. // For this one we need to implement type specialization
  7249. //case Js::OpCode::ArgOut_A_InlineBuiltIn:
  7250. case Js::OpCode::Ret:
  7251. case Js::OpCode::LdElemUndef:
  7252. case Js::OpCode::LdElemUndefScoped:
  7253. return false;
  7254. default:
  7255. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  7256. {
  7257. newMin = min;
  7258. newMax = max;
  7259. opcode = instr->m_opcode;
  7260. break; // Note: we must keep checkTypeSpecWorth = false to make sure we never return false from this function.
  7261. }
  7262. return false;
  7263. }
  7264. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it (won't
  7265. // leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value needs to be
  7266. // guaranteed to be an int
  7267. if(checkTypeSpecWorth &&
  7268. !ignoredIntOverflow &&
  7269. !ignoredNegativeZero &&
  7270. instr->ShouldCheckForIntOverflow() &&
  7271. !IsWorthSpecializingToInt32(instr, src1Val))
  7272. {
  7273. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  7274. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  7275. // the following cases.
  7276. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to be
  7277. // an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  7278. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  7279. // that case, especially if the dst sym is live on the back-edge.
  7280. if(bailOutKind == IR::BailOutInvalid &&
  7281. instr->GetDst() &&
  7282. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  7283. {
  7284. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, nullptr);
  7285. }
  7286. if(instr->GetSrc2())
  7287. {
  7288. instr->FreeSrc2();
  7289. }
  7290. return false;
  7291. }
  7292. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  7293. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  7294. {
  7295. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  7296. Value *src1IndirIndexVal = nullptr;
  7297. Value *src2Val = nullptr;
  7298. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  7299. {
  7300. *redoTypeSpecRef = true;
  7301. return false;
  7302. }
  7303. }
  7304. const Js::OpCode originalOpCode = instr->m_opcode;
  7305. if (!this->IsLoopPrePass())
  7306. {
  7307. // No re-write on prepass
  7308. instr->m_opcode = opcode;
  7309. }
  7310. Value *src1ValueToSpecialize = src1Val;
  7311. if(lossy)
  7312. {
  7313. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  7314. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  7315. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  7316. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  7317. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  7318. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  7319. src1ValueToSpecialize = src1OriginalVal;
  7320. }
  7321. // Make sure the srcs are specialized
  7322. IR::Opnd *src1 = instr->GetSrc1();
  7323. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, lossy);
  7324. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  7325. {
  7326. GenerateBailAtOperation(&instr, bailOutKind);
  7327. }
  7328. if (!skipDst)
  7329. {
  7330. IR::Opnd *dst = instr->GetDst();
  7331. if (dst)
  7332. {
  7333. AssertMsg(!(isTransfer && !this->IsLoopPrePass()) || min == newMin && max == newMax, "If this is just a copy, old/new min/max should be the same");
  7334. TypeSpecializeIntDst(
  7335. instr,
  7336. originalOpCode,
  7337. isTransfer ? src1Val : nullptr,
  7338. src1Val,
  7339. nullptr,
  7340. bailOutKind,
  7341. newMin,
  7342. newMax,
  7343. pDstVal,
  7344. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  7345. }
  7346. }
  7347. if(bailOutKind == IR::BailOutInvalid)
  7348. {
  7349. GOPT_TRACE(_u("Type specialized to INT\n"));
  7350. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7351. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7352. {
  7353. Output::Print(_u("Type specialized to INT: "));
  7354. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7355. }
  7356. #endif
  7357. }
  7358. else
  7359. {
  7360. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  7361. if(bailOutKind & IR::BailOutOnOverflow)
  7362. {
  7363. GOPT_TRACE(_u(" Overflow\n"));
  7364. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7365. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7366. {
  7367. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  7368. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7369. }
  7370. #endif
  7371. }
  7372. if(bailOutKind & IR::BailOutOnNegativeZero)
  7373. {
  7374. GOPT_TRACE(_u(" Zero\n"));
  7375. #if ENABLE_DEBUG_CONFIG_OPTIONS
  7376. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  7377. {
  7378. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  7379. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  7380. }
  7381. #endif
  7382. }
  7383. }
  7384. return true;
  7385. }
  7386. void
  7387. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7388. {
  7389. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, ValueType::GetInt(IntConstantBounds(newMin, newMax).IsLikelyTaggable()), newMin, newMax, pDstVal, addSubConstantInfo);
  7390. }
  7391. void
  7392. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7393. {
  7394. this->TypeSpecializeIntDst(instr, originalOpCode, valToTransfer, src1Value, src2Value, bailOutKind, valueType, 0, 0, pDstVal, addSubConstantInfo);
  7395. }
  7396. void
  7397. GlobOpt::TypeSpecializeIntDst(IR::Instr* instr, Js::OpCode originalOpCode, Value* valToTransfer, Value *const src1Value, Value *const src2Value, const IR::BailOutKind bailOutKind, ValueType valueType, int32 newMin, int32 newMax, Value** pDstVal, const AddSubConstantInfo *const addSubConstantInfo)
  7398. {
  7399. Assert(valueType.IsInt() || (valueType.IsNumber() && valueType.IsLikelyInt() && newMin == 0 && newMax == 0));
  7400. Assert(!valToTransfer || valToTransfer == src1Value);
  7401. Assert(!addSubConstantInfo || addSubConstantInfo->HasInfo());
  7402. IR::Opnd *dst = instr->GetDst();
  7403. Assert(dst);
  7404. bool isValueInfoPrecise;
  7405. if(IsLoopPrePass())
  7406. {
  7407. valueType = GetPrepassValueTypeForDst(valueType, instr, src1Value, src2Value, &isValueInfoPrecise);
  7408. }
  7409. else
  7410. {
  7411. isValueInfoPrecise = true;
  7412. }
  7413. // If dst has a circular reference in a loop, it probably won't get specialized. Don't mark the dst as type-specialized on
  7414. // the pre-pass. With aggressive int spec though, it will take care of bailing out if necessary so there's no need to assume
  7415. // that the dst will be a var even if it's live on the back-edge. Also if the op always produces an int32, then there's no
  7416. // ambiguity in the dst's value type even in the prepass.
  7417. if (!DoAggressiveIntTypeSpec() && this->IsLoopPrePass() && !valueType.IsInt())
  7418. {
  7419. if (dst->IsRegOpnd())
  7420. {
  7421. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  7422. }
  7423. return;
  7424. }
  7425. const IntBounds *dstBounds = nullptr;
  7426. if(addSubConstantInfo && !addSubConstantInfo->SrcValueIsLikelyConstant() && DoTrackRelativeIntBounds())
  7427. {
  7428. Assert(!ignoredIntOverflowForCurrentInstr);
  7429. // Track bounds for add or sub with a constant. For instance, consider (b = a + 2). The value of 'b' should track that
  7430. // it is equal to (the value of 'a') + 2. Additionally, the value of 'b' should inherit the bounds of 'a', offset by
  7431. // the constant value.
  7432. if(!valueType.IsInt() || !isValueInfoPrecise)
  7433. {
  7434. newMin = INT32_MIN;
  7435. newMax = INT32_MAX;
  7436. }
  7437. dstBounds =
  7438. IntBounds::Add(
  7439. addSubConstantInfo->SrcValue(),
  7440. addSubConstantInfo->Offset(),
  7441. isValueInfoPrecise,
  7442. IntConstantBounds(newMin, newMax),
  7443. alloc);
  7444. }
  7445. // Src1's value could change later in the loop, so the value wouldn't be the same for each
  7446. // iteration. Since we don't iterate over loops "while (!changed)", go conservative on the
  7447. // pre-pass.
  7448. if (valToTransfer)
  7449. {
  7450. // If this is just a copy, no need for creating a new value.
  7451. Assert(!addSubConstantInfo);
  7452. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  7453. CurrentBlockData()->InsertNewValue(*pDstVal, dst);
  7454. }
  7455. else if (valueType.IsInt() && isValueInfoPrecise)
  7456. {
  7457. bool wasNegativeZeroPreventedByBailout = false;
  7458. if(newMin <= 0 && newMax >= 0)
  7459. {
  7460. switch(originalOpCode)
  7461. {
  7462. case Js::OpCode::Add_A:
  7463. // -0 + -0 == -0
  7464. Assert(src1Value);
  7465. Assert(src2Value);
  7466. wasNegativeZeroPreventedByBailout =
  7467. src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout() &&
  7468. src2Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  7469. break;
  7470. case Js::OpCode::Sub_A:
  7471. // -0 - 0 == -0
  7472. Assert(src1Value);
  7473. wasNegativeZeroPreventedByBailout = src1Value->GetValueInfo()->WasNegativeZeroPreventedByBailout();
  7474. break;
  7475. case Js::OpCode::Neg_A:
  7476. case Js::OpCode::Mul_A:
  7477. case Js::OpCode::Div_A:
  7478. case Js::OpCode::Rem_A:
  7479. wasNegativeZeroPreventedByBailout = !!(bailOutKind & IR::BailOutOnNegativeZero);
  7480. break;
  7481. }
  7482. }
  7483. *pDstVal =
  7484. dstBounds
  7485. ? NewIntBoundedValue(valueType, dstBounds, wasNegativeZeroPreventedByBailout, nullptr)
  7486. : NewIntRangeValue(newMin, newMax, wasNegativeZeroPreventedByBailout, nullptr);
  7487. }
  7488. else
  7489. {
  7490. *pDstVal = dstBounds ? NewIntBoundedValue(valueType, dstBounds, false, nullptr) : NewGenericValue(valueType);
  7491. }
  7492. if(addSubConstantInfo || updateInductionVariableValueNumber)
  7493. {
  7494. TrackIntSpecializedAddSubConstant(instr, addSubConstantInfo, *pDstVal, !!dstBounds);
  7495. }
  7496. CurrentBlockData()->SetValue(*pDstVal, dst);
  7497. AssertMsg(dst->IsRegOpnd(), "What else?");
  7498. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  7499. }
  7500. bool
  7501. GlobOpt::TypeSpecializeBinary(IR::Instr **pInstr, Value **pSrc1Val, Value **pSrc2Val, Value **pDstVal, Value *const src1OriginalVal, Value *const src2OriginalVal, bool *redoTypeSpecRef)
  7502. {
  7503. IR::Instr *&instr = *pInstr;
  7504. int32 min1 = INT32_MIN, max1 = INT32_MAX, min2 = INT32_MIN, max2 = INT32_MAX, newMin, newMax, tmp;
  7505. Js::OpCode opcode;
  7506. Value *&src1Val = *pSrc1Val;
  7507. Value *&src2Val = *pSrc2Val;
  7508. // We don't need to do typespec for asmjs
  7509. if (IsTypeSpecPhaseOff(this->func) || GetIsAsmJSFunc())
  7510. {
  7511. return false;
  7512. }
  7513. if (OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  7514. {
  7515. this->TypeSpecializeInlineBuiltInBinary(pInstr, src1Val, src2Val, pDstVal, src1OriginalVal, src2OriginalVal);
  7516. return true;
  7517. }
  7518. if (src1Val)
  7519. {
  7520. src1Val->GetValueInfo()->GetIntValMinMax(&min1, &max1, this->DoAggressiveIntTypeSpec());
  7521. }
  7522. if (src2Val)
  7523. {
  7524. src2Val->GetValueInfo()->GetIntValMinMax(&min2, &max2, this->DoAggressiveIntTypeSpec());
  7525. }
  7526. // Type specialize binary operators to int32
  7527. bool src1Lossy = true;
  7528. bool src2Lossy = true;
  7529. IR::BailOutKind bailOutKind = IR::BailOutInvalid;
  7530. bool ignoredIntOverflow = this->ignoredIntOverflowForCurrentInstr;
  7531. bool ignoredNegativeZero = false;
  7532. bool skipSrc2 = false;
  7533. bool skipDst = false;
  7534. bool needsBoolConv = false;
  7535. AddSubConstantInfo addSubConstantInfo;
  7536. switch (instr->m_opcode)
  7537. {
  7538. case Js::OpCode::Or_A:
  7539. if (!DoLossyIntTypeSpec())
  7540. {
  7541. return false;
  7542. }
  7543. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7544. opcode = Js::OpCode::Or_I4;
  7545. break;
  7546. case Js::OpCode::And_A:
  7547. if (!DoLossyIntTypeSpec())
  7548. {
  7549. return false;
  7550. }
  7551. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7552. opcode = Js::OpCode::And_I4;
  7553. break;
  7554. case Js::OpCode::Xor_A:
  7555. if (!DoLossyIntTypeSpec())
  7556. {
  7557. return false;
  7558. }
  7559. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7560. opcode = Js::OpCode::Xor_I4;
  7561. break;
  7562. case Js::OpCode::Shl_A:
  7563. if (!DoLossyIntTypeSpec())
  7564. {
  7565. return false;
  7566. }
  7567. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7568. opcode = Js::OpCode::Shl_I4;
  7569. break;
  7570. case Js::OpCode::Shr_A:
  7571. if (!DoLossyIntTypeSpec())
  7572. {
  7573. return false;
  7574. }
  7575. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7576. opcode = Js::OpCode::Shr_I4;
  7577. break;
  7578. case Js::OpCode::ShrU_A:
  7579. if (!DoLossyIntTypeSpec())
  7580. {
  7581. return false;
  7582. }
  7583. if (min1 < 0 && IntConstantBounds(min2, max2).And_0x1f().Contains(0))
  7584. {
  7585. // Src1 may be too large to represent as a signed int32, and src2 may be zero. Unless the resulting value is only
  7586. // used as a signed int32 (hence allowing us to ignore the result's sign), don't specialize the instruction.
  7587. if (!instr->ignoreIntOverflow)
  7588. return false;
  7589. ignoredIntOverflow = true;
  7590. }
  7591. this->PropagateIntRangeBinary(instr, min1, max1, min2, max2, &newMin, &newMax);
  7592. opcode = Js::OpCode::ShrU_I4;
  7593. break;
  7594. case Js::OpCode::BrUnLe_A:
  7595. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7596. // int already, so require that both sources are likely int for folding.
  7597. if (DoConstFold() &&
  7598. !IsLoopPrePass() &&
  7599. TryOptConstFoldBrUnsignedGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  7600. {
  7601. return true;
  7602. }
  7603. if (min1 >= 0 && min2 >= 0)
  7604. {
  7605. // Only handle positive values since this is unsigned...
  7606. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7607. // (INT32_MIN, INT32_MAX), so we're good.
  7608. Assert(src1Val);
  7609. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7610. Assert(src2Val);
  7611. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7612. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  7613. }
  7614. if (!DoLossyIntTypeSpec())
  7615. {
  7616. return false;
  7617. }
  7618. newMin = newMax = 0;
  7619. opcode = Js::OpCode::BrUnLe_I4;
  7620. break;
  7621. case Js::OpCode::BrUnLt_A:
  7622. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7623. // int already, so require that both sources are likely int for folding.
  7624. if (DoConstFold() &&
  7625. !IsLoopPrePass() &&
  7626. TryOptConstFoldBrUnsignedLessThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  7627. {
  7628. return true;
  7629. }
  7630. if (min1 >= 0 && min2 >= 0)
  7631. {
  7632. // Only handle positive values since this is unsigned...
  7633. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7634. // (INT32_MIN, INT32_MAX), so we're good.
  7635. Assert(src1Val);
  7636. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7637. Assert(src2Val);
  7638. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7639. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  7640. }
  7641. if (!DoLossyIntTypeSpec())
  7642. {
  7643. return false;
  7644. }
  7645. newMin = newMax = 0;
  7646. opcode = Js::OpCode::BrUnLt_I4;
  7647. break;
  7648. case Js::OpCode::BrUnGe_A:
  7649. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7650. // int already, so require that both sources are likely int for folding.
  7651. if (DoConstFold() &&
  7652. !IsLoopPrePass() &&
  7653. TryOptConstFoldBrUnsignedLessThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  7654. {
  7655. return true;
  7656. }
  7657. if (min1 >= 0 && min2 >= 0)
  7658. {
  7659. // Only handle positive values since this is unsigned...
  7660. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7661. // (INT32_MIN, INT32_MAX), so we're good.
  7662. Assert(src1Val);
  7663. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7664. Assert(src2Val);
  7665. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7666. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  7667. }
  7668. if (!DoLossyIntTypeSpec())
  7669. {
  7670. return false;
  7671. }
  7672. newMin = newMax = 0;
  7673. opcode = Js::OpCode::BrUnGe_I4;
  7674. break;
  7675. case Js::OpCode::BrUnGt_A:
  7676. // Folding the branch based on bounds will attempt a lossless int32 conversion of the sources if they are not definitely
  7677. // int already, so require that both sources are likely int for folding.
  7678. if (DoConstFold() &&
  7679. !IsLoopPrePass() &&
  7680. TryOptConstFoldBrUnsignedGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  7681. {
  7682. return true;
  7683. }
  7684. if (min1 >= 0 && min2 >= 0)
  7685. {
  7686. // Only handle positive values since this is unsigned...
  7687. // Bounds are tracked only for likely int values. Only likely int values may have bounds that are not the defaults
  7688. // (INT32_MIN, INT32_MAX), so we're good.
  7689. Assert(src1Val);
  7690. Assert(src1Val->GetValueInfo()->IsLikelyInt());
  7691. Assert(src2Val);
  7692. Assert(src2Val->GetValueInfo()->IsLikelyInt());
  7693. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  7694. }
  7695. if (!DoLossyIntTypeSpec())
  7696. {
  7697. return false;
  7698. }
  7699. newMin = newMax = 0;
  7700. opcode = Js::OpCode::BrUnGt_I4;
  7701. break;
  7702. case Js::OpCode::CmUnLe_A:
  7703. if (!DoLossyIntTypeSpec())
  7704. {
  7705. return false;
  7706. }
  7707. newMin = 0;
  7708. newMax = 1;
  7709. opcode = Js::OpCode::CmUnLe_I4;
  7710. needsBoolConv = true;
  7711. break;
  7712. case Js::OpCode::CmUnLt_A:
  7713. if (!DoLossyIntTypeSpec())
  7714. {
  7715. return false;
  7716. }
  7717. newMin = 0;
  7718. newMax = 1;
  7719. opcode = Js::OpCode::CmUnLt_I4;
  7720. needsBoolConv = true;
  7721. break;
  7722. case Js::OpCode::CmUnGe_A:
  7723. if (!DoLossyIntTypeSpec())
  7724. {
  7725. return false;
  7726. }
  7727. newMin = 0;
  7728. newMax = 1;
  7729. opcode = Js::OpCode::CmUnGe_I4;
  7730. needsBoolConv = true;
  7731. break;
  7732. case Js::OpCode::CmUnGt_A:
  7733. if (!DoLossyIntTypeSpec())
  7734. {
  7735. return false;
  7736. }
  7737. newMin = 0;
  7738. newMax = 1;
  7739. opcode = Js::OpCode::CmUnGt_I4;
  7740. needsBoolConv = true;
  7741. break;
  7742. case Js::OpCode::Expo_A:
  7743. {
  7744. src1Val = src1OriginalVal;
  7745. src2Val = src2OriginalVal;
  7746. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7747. }
  7748. case Js::OpCode::Div_A:
  7749. {
  7750. ValueType specializedValueType = GetDivValueType(instr, src1Val, src2Val, true);
  7751. if (specializedValueType.IsFloat())
  7752. {
  7753. // Either result is float or 1/x or cst1/cst2 where cst1%cst2 != 0
  7754. // Note: We should really constant fold cst1%cst2...
  7755. src1Val = src1OriginalVal;
  7756. src2Val = src2OriginalVal;
  7757. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7758. }
  7759. #ifdef _M_ARM
  7760. if (!AutoSystemInfo::Data.ArmDivAvailable())
  7761. {
  7762. return false;
  7763. }
  7764. #endif
  7765. if (specializedValueType.IsInt())
  7766. {
  7767. if (max2 == 0x80000000 || (min2 == 0 && max2 == 00))
  7768. {
  7769. return false;
  7770. }
  7771. if (min1 == 0x80000000 && min2 <= -1 && max2 >= -1)
  7772. {
  7773. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  7774. // Or we know we are dividing by zero (which is weird to have because the profile data
  7775. // say we got an int)
  7776. bailOutKind = IR::BailOutOnDivOfMinInt;
  7777. }
  7778. src1Lossy = false; // Detect -0 on the sources
  7779. src2Lossy = false;
  7780. opcode = Js::OpCode::Div_I4;
  7781. Assert(!instr->GetSrc1()->IsUnsigned());
  7782. bailOutKind |= IR::BailOnDivResultNotInt;
  7783. if (max2 >= 0 && min2 <= 0)
  7784. {
  7785. // Need to check for divide by zero if the denominator range includes 0
  7786. bailOutKind |= IR::BailOutOnDivByZero;
  7787. }
  7788. if (max1 >= 0 && min1 <= 0)
  7789. {
  7790. // Numerator contains 0 so the result contains 0
  7791. newMin = 0;
  7792. newMax = 0;
  7793. if (min2 < 0)
  7794. {
  7795. // Denominator may be negative, so the result could be negative 0
  7796. if (instr->ShouldCheckForNegativeZero())
  7797. {
  7798. bailOutKind |= IR::BailOutOnNegativeZero;
  7799. }
  7800. else
  7801. {
  7802. ignoredNegativeZero = true;
  7803. }
  7804. }
  7805. }
  7806. else
  7807. {
  7808. // Initialize to invalid value, one of the condition below will update it correctly
  7809. newMin = INT_MAX;
  7810. newMax = INT_MIN;
  7811. }
  7812. // Deal with the positive and negative range separately for both the numerator and the denominator,
  7813. // and integrate to the overall min and max.
  7814. // If the result is positive (positive/positive or negative/negative):
  7815. // The min should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  7816. // divided by ---------------------------------------------------------------
  7817. // largest magnitude denominator (positive_Max2 | negative_Min2)
  7818. //
  7819. // The max should be the largest magnitude numerator (positive_Max1 | negative_Max1)
  7820. // divided by ---------------------------------------------------------------
  7821. // smallest magnitude denominator (positive_Min2 | negative_Max2)
  7822. // If the result is negative (positive/negative or positive/negative):
  7823. // The min should be the largest magnitude numerator (positive_Max1 | negative_Min1)
  7824. // divided by ---------------------------------------------------------------
  7825. // smallest magnitude denominator (negative_Max2 | positive_Min2)
  7826. //
  7827. // The max should be the smallest magnitude numerator (positive_Min1 | negative_Max1)
  7828. // divided by ---------------------------------------------------------------
  7829. // largest magnitude denominator (negative_Min2 | positive_Max2)
  7830. // Consider: The range can be slightly more precise if we take care of the rounding
  7831. if (max1 > 0)
  7832. {
  7833. // Take only the positive numerator range
  7834. int32 positive_Min1 = max(1, min1);
  7835. int32 positive_Max1 = max1;
  7836. if (max2 > 0)
  7837. {
  7838. // Take only the positive denominator range
  7839. int32 positive_Min2 = max(1, min2);
  7840. int32 positive_Max2 = max2;
  7841. // Positive / Positive
  7842. int32 quadrant1_Min = positive_Min1 <= positive_Max2? 1 : positive_Min1 / positive_Max2;
  7843. int32 quadrant1_Max = positive_Max1 <= positive_Min2? 1 : positive_Max1 / positive_Min2;
  7844. Assert(1 <= quadrant1_Min && quadrant1_Min <= quadrant1_Max);
  7845. // The result should positive
  7846. newMin = min(newMin, quadrant1_Min);
  7847. newMax = max(newMax, quadrant1_Max);
  7848. }
  7849. if (min2 < 0)
  7850. {
  7851. // Take only the negative denominator range
  7852. int32 negative_Min2 = min2;
  7853. int32 negative_Max2 = min(-1, max2);
  7854. // Positive / Negative
  7855. int32 quadrant2_Min = -positive_Max1 >= negative_Max2? -1 : positive_Max1 / negative_Max2;
  7856. int32 quadrant2_Max = -positive_Min1 >= negative_Min2? -1 : positive_Min1 / negative_Min2;
  7857. // The result should negative
  7858. Assert(quadrant2_Min <= quadrant2_Max && quadrant2_Max <= -1);
  7859. newMin = min(newMin, quadrant2_Min);
  7860. newMax = max(newMax, quadrant2_Max);
  7861. }
  7862. }
  7863. if (min1 < 0)
  7864. {
  7865. // Take only the native numerator range
  7866. int32 negative_Min1 = min1;
  7867. int32 negative_Max1 = min(-1, max1);
  7868. if (max2 > 0)
  7869. {
  7870. // Take only the positive denominator range
  7871. int32 positive_Min2 = max(1, min2);
  7872. int32 positive_Max2 = max2;
  7873. // Negative / Positive
  7874. int32 quadrant4_Min = negative_Min1 >= -positive_Min2? -1 : negative_Min1 / positive_Min2;
  7875. int32 quadrant4_Max = negative_Max1 >= -positive_Max2? -1 : negative_Max1 / positive_Max2;
  7876. // The result should negative
  7877. Assert(quadrant4_Min <= quadrant4_Max && quadrant4_Max <= -1);
  7878. newMin = min(newMin, quadrant4_Min);
  7879. newMax = max(newMax, quadrant4_Max);
  7880. }
  7881. if (min2 < 0)
  7882. {
  7883. // Take only the negative denominator range
  7884. int32 negative_Min2 = min2;
  7885. int32 negative_Max2 = min(-1, max2);
  7886. int32 quadrant3_Min;
  7887. int32 quadrant3_Max;
  7888. // Negative / Negative
  7889. if (negative_Max1 == 0x80000000 && negative_Min2 == -1)
  7890. {
  7891. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : (negative_Max1+1) / negative_Min2;
  7892. }
  7893. else
  7894. {
  7895. quadrant3_Min = negative_Max1 >= negative_Min2? 1 : negative_Max1 / negative_Min2;
  7896. }
  7897. if (negative_Min1 == 0x80000000 && negative_Max2 == -1)
  7898. {
  7899. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : (negative_Min1+1) / negative_Max2;
  7900. }
  7901. else
  7902. {
  7903. quadrant3_Max = negative_Min1 >= negative_Max2? 1 : negative_Min1 / negative_Max2;
  7904. }
  7905. // The result should positive
  7906. Assert(1 <= quadrant3_Min && quadrant3_Min <= quadrant3_Max);
  7907. newMin = min(newMin, quadrant3_Min);
  7908. newMax = max(newMax, quadrant3_Max);
  7909. }
  7910. }
  7911. Assert(newMin <= newMax);
  7912. // Continue to int type spec
  7913. break;
  7914. }
  7915. }
  7916. // fall-through
  7917. default:
  7918. {
  7919. const bool involesLargeInt32 =
  7920. (src1Val && src1Val->GetValueInfo()->IsLikelyUntaggedInt()) ||
  7921. (src2Val && src2Val->GetValueInfo()->IsLikelyUntaggedInt());
  7922. const auto trySpecializeToFloat =
  7923. [&](const bool mayOverflow) -> bool
  7924. {
  7925. // It has been determined that this instruction cannot be int-specialized. Need to determine whether to attempt
  7926. // to float-specialize the instruction, or leave it unspecialized.
  7927. if((involesLargeInt32
  7928. #if INT32VAR
  7929. && mayOverflow
  7930. #endif
  7931. ) || (instr->m_opcode == Js::OpCode::Mul_A && !this->DoAggressiveMulIntTypeSpec())
  7932. )
  7933. {
  7934. // An input range is completely outside the range of an int31 and the operation is likely to overflow.
  7935. // Additionally, on 32-bit platforms, the value is untaggable and will be a JavascriptNumber, which is
  7936. // significantly slower to use in an unspecialized operation compared to a tagged int. So, try to
  7937. // float-specialize the instruction.
  7938. src1Val = src1OriginalVal;
  7939. src2Val = src2OriginalVal;
  7940. return TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7941. }
  7942. return false;
  7943. };
  7944. if (instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  7945. {
  7946. if ((src1Val && src1Val->GetValueInfo()->IsLikelyFloat()) || (src2Val && src2Val->GetValueInfo()->IsLikelyFloat()))
  7947. {
  7948. // Try to type specialize to float
  7949. src1Val = src1OriginalVal;
  7950. src2Val = src2OriginalVal;
  7951. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  7952. }
  7953. if (src1Val == nullptr ||
  7954. src2Val == nullptr ||
  7955. !src1Val->GetValueInfo()->IsLikelyInt() ||
  7956. !src2Val->GetValueInfo()->IsLikelyInt() ||
  7957. (
  7958. !DoAggressiveIntTypeSpec() &&
  7959. (
  7960. !(src1Val->GetValueInfo()->IsInt() || CurrentBlockData()->IsSwitchInt32TypeSpecialized(instr)) ||
  7961. !src2Val->GetValueInfo()->IsInt()
  7962. )
  7963. ) ||
  7964. (instr->GetSrc1()->IsRegOpnd() && instr->GetSrc1()->AsRegOpnd()->m_sym->m_isNotInt) ||
  7965. (instr->GetSrc2()->IsRegOpnd() && instr->GetSrc2()->AsRegOpnd()->m_sym->m_isNotInt))
  7966. {
  7967. return trySpecializeToFloat(true);
  7968. }
  7969. }
  7970. // Try to type specialize to int32
  7971. // If one of the values is a float constant with a value that fits in a uint32 but not an int32,
  7972. // and the instruction can ignore int overflow, the source value for the purposes of int specialization
  7973. // would have been changed to an int constant value by ignoring overflow. But, the conversion is still lossy.
  7974. if (!(src1OriginalVal && src1OriginalVal->GetValueInfo()->IsFloatConstant() && src1Val && src1Val->GetValueInfo()->HasIntConstantValue()))
  7975. {
  7976. src1Lossy = false;
  7977. }
  7978. if (!(src2OriginalVal && src2OriginalVal->GetValueInfo()->IsFloatConstant() && src2Val && src2Val->GetValueInfo()->HasIntConstantValue()))
  7979. {
  7980. src2Lossy = false;
  7981. }
  7982. switch(instr->m_opcode)
  7983. {
  7984. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  7985. // If the src is already type-specialized, if we don't type-specialize ArgOut_A_InlineBuiltIn instr, we'll get additional ToVar.
  7986. // So, to avoid that, type-specialize the ArgOut_A_InlineBuiltIn instr.
  7987. // Else we don't need to type-specialize the instr, we are fine with src being Var.
  7988. if (instr->GetSrc1()->IsRegOpnd())
  7989. {
  7990. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  7991. if (CurrentBlockData()->IsInt32TypeSpecialized(sym))
  7992. {
  7993. opcode = instr->m_opcode;
  7994. skipDst = true; // We should keep dst as is, otherwise the link opnd for next ArgOut/InlineBuiltInStart would be broken.
  7995. skipSrc2 = true; // src2 is linkOpnd. We don't need to type-specialize it.
  7996. newMin = min1; newMax = max1; // Values don't matter, these are unused.
  7997. goto LOutsideSwitch; // Continue to int-type-specialize.
  7998. }
  7999. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  8000. {
  8001. src1Val = src1OriginalVal;
  8002. src2Val = src2OriginalVal;
  8003. return this->TypeSpecializeFloatBinary(instr, src1Val, src2Val, pDstVal);
  8004. }
  8005. #ifdef ENABLE_SIMDJS
  8006. else if (CurrentBlockData()->IsSimd128F4TypeSpecialized(sym))
  8007. {
  8008. // SIMD_JS
  8009. // We should be already using the SIMD type-spec sym. See TypeSpecializeSimd128.
  8010. Assert(IRType_IsSimd128(instr->GetSrc1()->GetType()));
  8011. }
  8012. #endif
  8013. }
  8014. return false;
  8015. case Js::OpCode::Add_A:
  8016. do // while(false)
  8017. {
  8018. const auto CannotOverflowBasedOnRelativeBounds = [&](int32 *const constantValueRef)
  8019. {
  8020. Assert(constantValueRef);
  8021. if(min2 == max2 &&
  8022. src1Val->GetValueInfo()->IsIntBounded() &&
  8023. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min2))
  8024. {
  8025. *constantValueRef = min2;
  8026. return true;
  8027. }
  8028. else if(
  8029. min1 == max1 &&
  8030. src2Val->GetValueInfo()->IsIntBounded() &&
  8031. src2Val->GetValueInfo()->AsIntBounded()->Bounds()->AddCannotOverflowBasedOnRelativeBounds(min1))
  8032. {
  8033. *constantValueRef = min1;
  8034. return true;
  8035. }
  8036. return false;
  8037. };
  8038. if (Int32Math::Add(min1, min2, &newMin))
  8039. {
  8040. int32 constantSrcValue;
  8041. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  8042. {
  8043. newMin = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  8044. }
  8045. else if(instr->ShouldCheckForIntOverflow())
  8046. {
  8047. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8048. {
  8049. // May overflow
  8050. return trySpecializeToFloat(true);
  8051. }
  8052. bailOutKind |= IR::BailOutOnOverflow;
  8053. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  8054. }
  8055. else
  8056. {
  8057. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since
  8058. // overflow causes the value to wrap around, and we don't have a way to specify a lower and upper
  8059. // range of ints, we use the full range of int32s.
  8060. ignoredIntOverflow = true;
  8061. newMin = INT32_MIN;
  8062. newMax = INT32_MAX;
  8063. break;
  8064. }
  8065. }
  8066. if (Int32Math::Add(max1, max2, &newMax))
  8067. {
  8068. int32 constantSrcValue;
  8069. if(CannotOverflowBasedOnRelativeBounds(&constantSrcValue))
  8070. {
  8071. newMax = constantSrcValue >= 0 ? INT32_MAX : INT32_MIN;
  8072. }
  8073. else if(instr->ShouldCheckForIntOverflow())
  8074. {
  8075. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8076. {
  8077. // May overflow
  8078. return trySpecializeToFloat(true);
  8079. }
  8080. bailOutKind |= IR::BailOutOnOverflow;
  8081. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  8082. }
  8083. else
  8084. {
  8085. // See comment about ignoring overflow above
  8086. ignoredIntOverflow = true;
  8087. newMin = INT32_MIN;
  8088. newMax = INT32_MAX;
  8089. break;
  8090. }
  8091. }
  8092. if(bailOutKind & IR::BailOutOnOverflow)
  8093. {
  8094. Assert(bailOutKind == IR::BailOutOnOverflow);
  8095. Assert(instr->ShouldCheckForIntOverflow());
  8096. int32 temp;
  8097. if(Int32Math::Add(
  8098. Int32Math::NearestInRangeTo(0, min1, max1),
  8099. Int32Math::NearestInRangeTo(0, min2, max2),
  8100. &temp))
  8101. {
  8102. // Always overflows
  8103. return trySpecializeToFloat(true);
  8104. }
  8105. }
  8106. } while(false);
  8107. if (!this->IsLoopPrePass() && newMin == newMax && bailOutKind == IR::BailOutInvalid)
  8108. {
  8109. // Take care of Add with zero here, since we know we're dealing with 2 numbers.
  8110. this->CaptureByteCodeSymUses(instr);
  8111. IR::Opnd *src;
  8112. bool isAddZero = true;
  8113. int32 intConstantValue;
  8114. if (src1Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  8115. {
  8116. src = instr->UnlinkSrc2();
  8117. instr->FreeSrc1();
  8118. }
  8119. else if (src2Val->GetValueInfo()->TryGetIntConstantValue(&intConstantValue) && intConstantValue == 0)
  8120. {
  8121. src = instr->UnlinkSrc1();
  8122. instr->FreeSrc2();
  8123. }
  8124. else
  8125. {
  8126. // This should have been handled by const folding, unless:
  8127. // - A source's value was substituted with a different value here, which is after const folding happened
  8128. // - A value is not definitely int, but once converted to definite int, it would be zero due to a
  8129. // condition in the source code such as if(a === 0). Ideally, we would specialize the sources and
  8130. // remove the add, but doesn't seem too important for now.
  8131. Assert(
  8132. !DoConstFold() ||
  8133. src1Val != src1OriginalVal ||
  8134. src2Val != src2OriginalVal ||
  8135. !src1Val->GetValueInfo()->IsInt() ||
  8136. !src2Val->GetValueInfo()->IsInt());
  8137. isAddZero = false;
  8138. src = nullptr;
  8139. }
  8140. if (isAddZero)
  8141. {
  8142. IR::Instr *newInstr = IR::Instr::New(Js::OpCode::Ld_A, instr->UnlinkDst(), src, instr->m_func);
  8143. newInstr->SetByteCodeOffset(instr);
  8144. instr->m_opcode = Js::OpCode::Nop;
  8145. this->currentBlock->InsertInstrAfter(newInstr, instr);
  8146. return true;
  8147. }
  8148. }
  8149. if(!ignoredIntOverflow)
  8150. {
  8151. if(min2 == max2 &&
  8152. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  8153. instr->GetSrc1()->IsRegOpnd())
  8154. {
  8155. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, min2);
  8156. }
  8157. else if(
  8158. min1 == max1 &&
  8159. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc1(), src1Val)) &&
  8160. instr->GetSrc2()->IsRegOpnd())
  8161. {
  8162. addSubConstantInfo.Set(instr->GetSrc2()->AsRegOpnd()->m_sym, src2Val, min2 == max2, min1);
  8163. }
  8164. }
  8165. opcode = Js::OpCode::Add_I4;
  8166. break;
  8167. case Js::OpCode::Sub_A:
  8168. do // while(false)
  8169. {
  8170. const auto CannotOverflowBasedOnRelativeBounds = [&]()
  8171. {
  8172. return
  8173. min2 == max2 &&
  8174. src1Val->GetValueInfo()->IsIntBounded() &&
  8175. src1Val->GetValueInfo()->AsIntBounded()->Bounds()->SubCannotOverflowBasedOnRelativeBounds(min2);
  8176. };
  8177. if (Int32Math::Sub(min1, max2, &newMin))
  8178. {
  8179. if(CannotOverflowBasedOnRelativeBounds())
  8180. {
  8181. Assert(min2 == max2);
  8182. newMin = min2 >= 0 ? INT32_MIN : INT32_MAX;
  8183. }
  8184. else if(instr->ShouldCheckForIntOverflow())
  8185. {
  8186. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8187. {
  8188. // May overflow
  8189. return trySpecializeToFloat(true);
  8190. }
  8191. bailOutKind |= IR::BailOutOnOverflow;
  8192. newMin = min1 < 0 ? INT32_MIN : INT32_MAX;
  8193. }
  8194. else
  8195. {
  8196. // When ignoring overflow, the range needs to account for overflow. For any Add or Sub, since overflow
  8197. // causes the value to wrap around, and we don't have a way to specify a lower and upper range of ints,
  8198. // we use the full range of int32s.
  8199. ignoredIntOverflow = true;
  8200. newMin = INT32_MIN;
  8201. newMax = INT32_MAX;
  8202. break;
  8203. }
  8204. }
  8205. if (Int32Math::Sub(max1, min2, &newMax))
  8206. {
  8207. if(CannotOverflowBasedOnRelativeBounds())
  8208. {
  8209. Assert(min2 == max2);
  8210. newMax = min2 >= 0 ? INT32_MIN: INT32_MAX;
  8211. }
  8212. else if(instr->ShouldCheckForIntOverflow())
  8213. {
  8214. if(involesLargeInt32 || !DoAggressiveIntTypeSpec())
  8215. {
  8216. // May overflow
  8217. return trySpecializeToFloat(true);
  8218. }
  8219. bailOutKind |= IR::BailOutOnOverflow;
  8220. newMax = max1 < 0 ? INT32_MIN : INT32_MAX;
  8221. }
  8222. else
  8223. {
  8224. // See comment about ignoring overflow above
  8225. ignoredIntOverflow = true;
  8226. newMin = INT32_MIN;
  8227. newMax = INT32_MAX;
  8228. break;
  8229. }
  8230. }
  8231. if(bailOutKind & IR::BailOutOnOverflow)
  8232. {
  8233. Assert(bailOutKind == IR::BailOutOnOverflow);
  8234. Assert(instr->ShouldCheckForIntOverflow());
  8235. int32 temp;
  8236. if(Int32Math::Sub(
  8237. Int32Math::NearestInRangeTo(-1, min1, max1),
  8238. Int32Math::NearestInRangeTo(0, min2, max2),
  8239. &temp))
  8240. {
  8241. // Always overflows
  8242. return trySpecializeToFloat(true);
  8243. }
  8244. }
  8245. } while(false);
  8246. if(!ignoredIntOverflow &&
  8247. min2 == max2 &&
  8248. min2 != INT32_MIN &&
  8249. (!IsLoopPrePass() || IsPrepassSrcValueInfoPrecise(instr->GetSrc2(), src2Val)) &&
  8250. instr->GetSrc1()->IsRegOpnd())
  8251. {
  8252. addSubConstantInfo.Set(instr->GetSrc1()->AsRegOpnd()->m_sym, src1Val, min1 == max1, -min2);
  8253. }
  8254. opcode = Js::OpCode::Sub_I4;
  8255. break;
  8256. case Js::OpCode::Mul_A:
  8257. {
  8258. if (Int32Math::Mul(min1, min2, &newMin))
  8259. {
  8260. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  8261. {
  8262. // May overflow
  8263. return trySpecializeToFloat(true);
  8264. }
  8265. bailOutKind |= IR::BailOutOnMulOverflow;
  8266. newMin = (min1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  8267. }
  8268. newMax = newMin;
  8269. if (Int32Math::Mul(max1, max2, &tmp))
  8270. {
  8271. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  8272. {
  8273. // May overflow
  8274. return trySpecializeToFloat(true);
  8275. }
  8276. bailOutKind |= IR::BailOutOnMulOverflow;
  8277. tmp = (max1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  8278. }
  8279. newMin = min(newMin, tmp);
  8280. newMax = max(newMax, tmp);
  8281. if (Int32Math::Mul(min1, max2, &tmp))
  8282. {
  8283. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  8284. {
  8285. // May overflow
  8286. return trySpecializeToFloat(true);
  8287. }
  8288. bailOutKind |= IR::BailOutOnMulOverflow;
  8289. tmp = (min1 < 0) ^ (max2 < 0) ? INT32_MIN : INT32_MAX;
  8290. }
  8291. newMin = min(newMin, tmp);
  8292. newMax = max(newMax, tmp);
  8293. if (Int32Math::Mul(max1, min2, &tmp))
  8294. {
  8295. if (involesLargeInt32 || !DoAggressiveMulIntTypeSpec() || !DoAggressiveIntTypeSpec())
  8296. {
  8297. // May overflow
  8298. return trySpecializeToFloat(true);
  8299. }
  8300. bailOutKind |= IR::BailOutOnMulOverflow;
  8301. tmp = (max1 < 0) ^ (min2 < 0) ? INT32_MIN : INT32_MAX;
  8302. }
  8303. newMin = min(newMin, tmp);
  8304. newMax = max(newMax, tmp);
  8305. if (bailOutKind & IR::BailOutOnMulOverflow)
  8306. {
  8307. // CSE only if two MULs have the same overflow check behavior.
  8308. // Currently this is set to be ignore int32 overflow, but not 53-bit, or int32 overflow matters.
  8309. if (!instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow())
  8310. {
  8311. // If we allow int to overflow then there can be anything in the resulting int
  8312. newMin = IntConstMin;
  8313. newMax = IntConstMax;
  8314. ignoredIntOverflow = true;
  8315. }
  8316. int32 temp, overflowValue;
  8317. if (Int32Math::Mul(
  8318. Int32Math::NearestInRangeTo(0, min1, max1),
  8319. Int32Math::NearestInRangeTo(0, min2, max2),
  8320. &temp,
  8321. &overflowValue))
  8322. {
  8323. Assert(instr->ignoreOverflowBitCount >= 32);
  8324. int overflowMatters = 64 - instr->ignoreOverflowBitCount;
  8325. if (!ignoredIntOverflow ||
  8326. // Use shift to check high bits in case its negative
  8327. ((overflowValue << overflowMatters) >> overflowMatters) != overflowValue
  8328. )
  8329. {
  8330. // Always overflows
  8331. return trySpecializeToFloat(true);
  8332. }
  8333. }
  8334. }
  8335. if (newMin <= 0 && newMax >= 0 && // New range crosses zero
  8336. (min1 < 0 || min2 < 0) && // An operand's range contains a negative integer
  8337. !(min1 > 0 || min2 > 0) && // Neither operand's range contains only positive integers
  8338. !instr->GetSrc1()->IsEqual(instr->GetSrc2())) // The operands don't have the same value
  8339. {
  8340. if (instr->ShouldCheckForNegativeZero())
  8341. {
  8342. // -0 matters since the sym is not a local, or is used in a way in which -0 would differ from +0
  8343. if (!DoAggressiveIntTypeSpec())
  8344. {
  8345. // May result in -0
  8346. return trySpecializeToFloat(false);
  8347. }
  8348. if (((min1 == 0 && max1 == 0) || (min2 == 0 && max2 == 0)) && (max1 < 0 || max2 < 0))
  8349. {
  8350. // Always results in -0
  8351. return trySpecializeToFloat(false);
  8352. }
  8353. bailOutKind |= IR::BailOutOnNegativeZero;
  8354. }
  8355. else
  8356. {
  8357. ignoredNegativeZero = true;
  8358. }
  8359. }
  8360. opcode = Js::OpCode::Mul_I4;
  8361. break;
  8362. }
  8363. case Js::OpCode::Rem_A:
  8364. {
  8365. IR::Opnd* src2 = instr->GetSrc2();
  8366. if (!this->IsLoopPrePass() && min2 == max2 && min1 >= 0)
  8367. {
  8368. int32 value = min2;
  8369. if (value == (1 << Math::Log2(value)) && src2->IsAddrOpnd())
  8370. {
  8371. Assert(src2->AsAddrOpnd()->IsVar());
  8372. instr->m_opcode = Js::OpCode::And_A;
  8373. src2->AsAddrOpnd()->SetAddress(Js::TaggedInt::ToVarUnchecked(value - 1),
  8374. IR::AddrOpndKindConstantVar);
  8375. *pSrc2Val = GetIntConstantValue(value - 1, instr);
  8376. src2Val = *pSrc2Val;
  8377. return this->TypeSpecializeBinary(&instr, pSrc1Val, pSrc2Val, pDstVal, src1OriginalVal, src2Val, redoTypeSpecRef);
  8378. }
  8379. }
  8380. #ifdef _M_ARM
  8381. if (!AutoSystemInfo::Data.ArmDivAvailable())
  8382. {
  8383. return false;
  8384. }
  8385. #endif
  8386. if (min1 < 0)
  8387. {
  8388. // The most negative it can be is min1, unless limited by min2/max2
  8389. int32 negMaxAbs2;
  8390. if (min2 == INT32_MIN)
  8391. {
  8392. negMaxAbs2 = INT32_MIN;
  8393. }
  8394. else
  8395. {
  8396. negMaxAbs2 = -max(abs(min2), abs(max2)) + 1;
  8397. }
  8398. newMin = max(min1, negMaxAbs2);
  8399. }
  8400. else
  8401. {
  8402. newMin = 0;
  8403. }
  8404. bool isModByPowerOf2 = (instr->IsProfiledInstr() && instr->m_func->HasProfileInfo() &&
  8405. instr->m_func->GetReadOnlyProfileInfo()->IsModulusOpByPowerOf2(static_cast<Js::ProfileId>(instr->AsProfiledInstr()->u.profileId)));
  8406. if(isModByPowerOf2)
  8407. {
  8408. Assert(bailOutKind == IR::BailOutInvalid);
  8409. bailOutKind = IR::BailOnModByPowerOf2;
  8410. newMin = 0;
  8411. }
  8412. else
  8413. {
  8414. if (min2 <= 0 && max2 >= 0)
  8415. {
  8416. // Consider: We could handle the zero case with a check and bailout...
  8417. return false;
  8418. }
  8419. if (min1 == 0x80000000 && (min2 <= -1 && max2 >= -1))
  8420. {
  8421. // Prevent integer overflow, as div by zero or MIN_INT / -1 will throw an exception
  8422. return false;
  8423. }
  8424. if (min1 < 0)
  8425. {
  8426. if(instr->ShouldCheckForNegativeZero())
  8427. {
  8428. if (!DoAggressiveIntTypeSpec())
  8429. {
  8430. return false;
  8431. }
  8432. bailOutKind |= IR::BailOutOnNegativeZero;
  8433. }
  8434. else
  8435. {
  8436. ignoredNegativeZero = true;
  8437. }
  8438. }
  8439. }
  8440. {
  8441. int32 absMax2;
  8442. if (min2 == INT32_MIN)
  8443. {
  8444. // abs(INT32_MIN) == INT32_MAX because of overflow
  8445. absMax2 = INT32_MAX;
  8446. }
  8447. else
  8448. {
  8449. absMax2 = max(abs(min2), abs(max2)) - 1;
  8450. }
  8451. newMax = min(absMax2, max(max1, 0));
  8452. newMax = max(newMin, newMax);
  8453. }
  8454. opcode = Js::OpCode::Rem_I4;
  8455. Assert(!instr->GetSrc1()->IsUnsigned());
  8456. break;
  8457. }
  8458. case Js::OpCode::CmEq_A:
  8459. case Js::OpCode::CmSrEq_A:
  8460. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8461. {
  8462. return false;
  8463. }
  8464. newMin = 0;
  8465. newMax = 1;
  8466. opcode = Js::OpCode::CmEq_I4;
  8467. needsBoolConv = true;
  8468. break;
  8469. case Js::OpCode::CmNeq_A:
  8470. case Js::OpCode::CmSrNeq_A:
  8471. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8472. {
  8473. return false;
  8474. }
  8475. newMin = 0;
  8476. newMax = 1;
  8477. opcode = Js::OpCode::CmNeq_I4;
  8478. needsBoolConv = true;
  8479. break;
  8480. case Js::OpCode::CmLe_A:
  8481. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8482. {
  8483. return false;
  8484. }
  8485. newMin = 0;
  8486. newMax = 1;
  8487. opcode = Js::OpCode::CmLe_I4;
  8488. needsBoolConv = true;
  8489. break;
  8490. case Js::OpCode::CmLt_A:
  8491. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8492. {
  8493. return false;
  8494. }
  8495. newMin = 0;
  8496. newMax = 1;
  8497. opcode = Js::OpCode::CmLt_I4;
  8498. needsBoolConv = true;
  8499. break;
  8500. case Js::OpCode::CmGe_A:
  8501. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8502. {
  8503. return false;
  8504. }
  8505. newMin = 0;
  8506. newMax = 1;
  8507. opcode = Js::OpCode::CmGe_I4;
  8508. needsBoolConv = true;
  8509. break;
  8510. case Js::OpCode::CmGt_A:
  8511. if (!IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val))
  8512. {
  8513. return false;
  8514. }
  8515. newMin = 0;
  8516. newMax = 1;
  8517. opcode = Js::OpCode::CmGt_I4;
  8518. needsBoolConv = true;
  8519. break;
  8520. case Js::OpCode::BrSrEq_A:
  8521. case Js::OpCode::BrEq_A:
  8522. case Js::OpCode::BrNotNeq_A:
  8523. case Js::OpCode::BrSrNotNeq_A:
  8524. {
  8525. if(DoConstFold() &&
  8526. !IsLoopPrePass() &&
  8527. TryOptConstFoldBrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8528. {
  8529. return true;
  8530. }
  8531. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8532. UpdateIntBoundsForEqualBranch(src1Val, src2Val);
  8533. if(!specialize)
  8534. {
  8535. return false;
  8536. }
  8537. opcode = Js::OpCode::BrEq_I4;
  8538. // We'll get a warning if we don't assign a value to these...
  8539. // We'll assert if we use them and make a range where min > max
  8540. newMin = 2; newMax = 1;
  8541. break;
  8542. }
  8543. case Js::OpCode::BrSrNeq_A:
  8544. case Js::OpCode::BrNeq_A:
  8545. case Js::OpCode::BrSrNotEq_A:
  8546. case Js::OpCode::BrNotEq_A:
  8547. {
  8548. if(DoConstFold() &&
  8549. !IsLoopPrePass() &&
  8550. TryOptConstFoldBrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8551. {
  8552. return true;
  8553. }
  8554. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8555. UpdateIntBoundsForNotEqualBranch(src1Val, src2Val);
  8556. if(!specialize)
  8557. {
  8558. return false;
  8559. }
  8560. opcode = Js::OpCode::BrNeq_I4;
  8561. // We'll get a warning if we don't assign a value to these...
  8562. // We'll assert if we use them and make a range where min > max
  8563. newMin = 2; newMax = 1;
  8564. break;
  8565. }
  8566. case Js::OpCode::BrGt_A:
  8567. case Js::OpCode::BrNotLe_A:
  8568. {
  8569. if(DoConstFold() &&
  8570. !IsLoopPrePass() &&
  8571. TryOptConstFoldBrGreaterThan(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8572. {
  8573. return true;
  8574. }
  8575. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8576. UpdateIntBoundsForGreaterThanBranch(src1Val, src2Val);
  8577. if(!specialize)
  8578. {
  8579. return false;
  8580. }
  8581. opcode = Js::OpCode::BrGt_I4;
  8582. // We'll get a warning if we don't assign a value to these...
  8583. // We'll assert if we use them and make a range where min > max
  8584. newMin = 2; newMax = 1;
  8585. break;
  8586. }
  8587. case Js::OpCode::BrGe_A:
  8588. case Js::OpCode::BrNotLt_A:
  8589. {
  8590. if(DoConstFold() &&
  8591. !IsLoopPrePass() &&
  8592. TryOptConstFoldBrGreaterThanOrEqual(instr, true, src1Val, min1, max1, src2Val, min2, max2))
  8593. {
  8594. return true;
  8595. }
  8596. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8597. UpdateIntBoundsForGreaterThanOrEqualBranch(src1Val, src2Val);
  8598. if(!specialize)
  8599. {
  8600. return false;
  8601. }
  8602. opcode = Js::OpCode::BrGe_I4;
  8603. // We'll get a warning if we don't assign a value to these...
  8604. // We'll assert if we use them and make a range where min > max
  8605. newMin = 2; newMax = 1;
  8606. break;
  8607. }
  8608. case Js::OpCode::BrLt_A:
  8609. case Js::OpCode::BrNotGe_A:
  8610. {
  8611. if(DoConstFold() &&
  8612. !IsLoopPrePass() &&
  8613. TryOptConstFoldBrGreaterThanOrEqual(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8614. {
  8615. return true;
  8616. }
  8617. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8618. UpdateIntBoundsForLessThanBranch(src1Val, src2Val);
  8619. if(!specialize)
  8620. {
  8621. return false;
  8622. }
  8623. opcode = Js::OpCode::BrLt_I4;
  8624. // We'll get a warning if we don't assign a value to these...
  8625. // We'll assert if we use them and make a range where min > max
  8626. newMin = 2; newMax = 1;
  8627. break;
  8628. }
  8629. case Js::OpCode::BrLe_A:
  8630. case Js::OpCode::BrNotGt_A:
  8631. {
  8632. if(DoConstFold() &&
  8633. !IsLoopPrePass() &&
  8634. TryOptConstFoldBrGreaterThan(instr, false, src1Val, min1, max1, src2Val, min2, max2))
  8635. {
  8636. return true;
  8637. }
  8638. const bool specialize = IsWorthSpecializingToInt32Branch(instr, src1Val, src2Val);
  8639. UpdateIntBoundsForLessThanOrEqualBranch(src1Val, src2Val);
  8640. if(!specialize)
  8641. {
  8642. return false;
  8643. }
  8644. opcode = Js::OpCode::BrLe_I4;
  8645. // We'll get a warning if we don't assign a value to these...
  8646. // We'll assert if we use them and make a range where min > max
  8647. newMin = 2; newMax = 1;
  8648. break;
  8649. }
  8650. default:
  8651. return false;
  8652. }
  8653. // If this instruction is in a range of instructions where int overflow does not matter, we will still specialize it
  8654. // (won't leave it unspecialized based on heuristics), since it is most likely worth specializing, and the dst value
  8655. // needs to be guaranteed to be an int
  8656. if(!ignoredIntOverflow &&
  8657. !ignoredNegativeZero &&
  8658. !needsBoolConv &&
  8659. instr->ShouldCheckForIntOverflow() &&
  8660. !IsWorthSpecializingToInt32(instr, src1Val, src2Val))
  8661. {
  8662. // Even though type specialization is being skipped since it may not be worth it, the proper value should still be
  8663. // maintained so that the result may be type specialized later. An int value is not created for the dst in any of
  8664. // the following cases.
  8665. // - A bailout check is necessary to specialize this instruction. The bailout check is what guarantees the result to
  8666. // be an int, but since we're not going to specialize this instruction, there won't be a bailout check.
  8667. // - Aggressive int type specialization is disabled and we're in a loop prepass. We're conservative on dst values in
  8668. // that case, especially if the dst sym is live on the back-edge.
  8669. if(bailOutKind == IR::BailOutInvalid &&
  8670. instr->GetDst() &&
  8671. src1Val->GetValueInfo()->IsInt() &&
  8672. src2Val->GetValueInfo()->IsInt() &&
  8673. (DoAggressiveIntTypeSpec() || !this->IsLoopPrePass()))
  8674. {
  8675. *pDstVal = CreateDstUntransferredIntValue(newMin, newMax, instr, src1Val, src2Val);
  8676. }
  8677. return false;
  8678. }
  8679. } // case default
  8680. } // switch
  8681. LOutsideSwitch:
  8682. this->ignoredIntOverflowForCurrentInstr = ignoredIntOverflow;
  8683. this->ignoredNegativeZeroForCurrentInstr = ignoredNegativeZero;
  8684. {
  8685. // Try CSE again before modifying the IR, in case some attributes are required for successful CSE
  8686. Value *src1IndirIndexVal = nullptr;
  8687. if(CSEOptimize(currentBlock, &instr, &src1Val, &src2Val, &src1IndirIndexVal, true /* intMathExprOnly */))
  8688. {
  8689. *redoTypeSpecRef = true;
  8690. return false;
  8691. }
  8692. }
  8693. const Js::OpCode originalOpCode = instr->m_opcode;
  8694. if (!this->IsLoopPrePass())
  8695. {
  8696. // No re-write on prepass
  8697. instr->m_opcode = opcode;
  8698. }
  8699. Value *src1ValueToSpecialize = src1Val, *src2ValueToSpecialize = src2Val;
  8700. // Lossy conversions to int32 must be done based on the original source values. For instance, if one of the values is a
  8701. // float constant with a value that fits in a uint32 but not an int32, and the instruction can ignore int overflow, the
  8702. // source value for the purposes of int specialization would have been changed to an int constant value by ignoring
  8703. // overflow. If we were to specialize the sym using the int constant value, it would be treated as a lossless
  8704. // conversion, but since there may be subsequent uses of the same float constant value that may not ignore overflow,
  8705. // this must be treated as a lossy conversion by specializing the sym using the original float constant value.
  8706. if(src1Lossy)
  8707. {
  8708. src1ValueToSpecialize = src1OriginalVal;
  8709. }
  8710. if (src2Lossy)
  8711. {
  8712. src2ValueToSpecialize = src2OriginalVal;
  8713. }
  8714. // Make sure the srcs are specialized
  8715. IR::Opnd* src1 = instr->GetSrc1();
  8716. this->ToInt32(instr, src1, this->currentBlock, src1ValueToSpecialize, nullptr, src1Lossy);
  8717. if (!skipSrc2)
  8718. {
  8719. IR::Opnd* src2 = instr->GetSrc2();
  8720. this->ToInt32(instr, src2, this->currentBlock, src2ValueToSpecialize, nullptr, src2Lossy);
  8721. }
  8722. if(bailOutKind != IR::BailOutInvalid && !this->IsLoopPrePass())
  8723. {
  8724. GenerateBailAtOperation(&instr, bailOutKind);
  8725. }
  8726. if (!skipDst && instr->GetDst())
  8727. {
  8728. if (needsBoolConv)
  8729. {
  8730. IR::RegOpnd *varDst;
  8731. if (this->IsLoopPrePass())
  8732. {
  8733. varDst = instr->GetDst()->AsRegOpnd();
  8734. this->ToVarRegOpnd(varDst, this->currentBlock);
  8735. }
  8736. else
  8737. {
  8738. // Generate:
  8739. // t1.i = CmCC t2.i, t3.i
  8740. // t1.v = Conv_bool t1.i
  8741. //
  8742. // If the only uses of t1 are ints, the conv_bool will get dead-stored
  8743. TypeSpecializeIntDst(instr, originalOpCode, nullptr, src1Val, src2Val, bailOutKind, newMin, newMax, pDstVal);
  8744. IR::RegOpnd *intDst = instr->GetDst()->AsRegOpnd();
  8745. intDst->SetIsJITOptimizedReg(true);
  8746. varDst = IR::RegOpnd::New(intDst->m_sym->GetVarEquivSym(this->func), TyVar, this->func);
  8747. IR::Instr *convBoolInstr = IR::Instr::New(Js::OpCode::Conv_Bool, varDst, intDst, this->func);
  8748. // In some cases (e.g. unsigned compare peep code), a comparison will use variables
  8749. // other than the ones initially intended for it, if we can determine that we would
  8750. // arrive at the same result. This means that we get a ByteCodeUses operation after
  8751. // the actual comparison. Since Inserting the Conv_bool just after the compare, and
  8752. // just before the ByteCodeUses, would cause issues later on with register lifetime
  8753. // calculation, we want to insert the Conv_bool after the whole compare instruction
  8754. // block.
  8755. IR::Instr *putAfter = instr;
  8756. while (putAfter->m_next && putAfter->m_next->m_opcode == Js::OpCode::ByteCodeUses)
  8757. {
  8758. putAfter = putAfter->m_next;
  8759. }
  8760. putAfter->InsertAfter(convBoolInstr);
  8761. convBoolInstr->SetByteCodeOffset(instr);
  8762. this->ToVarRegOpnd(varDst, this->currentBlock);
  8763. CurrentBlockData()->liveInt32Syms->Set(varDst->m_sym->m_id);
  8764. CurrentBlockData()->liveLossyInt32Syms->Set(varDst->m_sym->m_id);
  8765. }
  8766. *pDstVal = this->NewGenericValue(ValueType::Boolean, varDst);
  8767. }
  8768. else
  8769. {
  8770. TypeSpecializeIntDst(
  8771. instr,
  8772. originalOpCode,
  8773. nullptr,
  8774. src1Val,
  8775. src2Val,
  8776. bailOutKind,
  8777. newMin,
  8778. newMax,
  8779. pDstVal,
  8780. addSubConstantInfo.HasInfo() ? &addSubConstantInfo : nullptr);
  8781. }
  8782. }
  8783. if(bailOutKind == IR::BailOutInvalid)
  8784. {
  8785. GOPT_TRACE(_u("Type specialized to INT\n"));
  8786. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8787. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8788. {
  8789. Output::Print(_u("Type specialized to INT: "));
  8790. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8791. }
  8792. #endif
  8793. }
  8794. else
  8795. {
  8796. GOPT_TRACE(_u("Type specialized to INT with bailout on:\n"));
  8797. if(bailOutKind & (IR::BailOutOnOverflow | IR::BailOutOnMulOverflow) )
  8798. {
  8799. GOPT_TRACE(_u(" Overflow\n"));
  8800. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8801. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8802. {
  8803. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Overflow");
  8804. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8805. }
  8806. #endif
  8807. }
  8808. if(bailOutKind & IR::BailOutOnNegativeZero)
  8809. {
  8810. GOPT_TRACE(_u(" Zero\n"));
  8811. #if ENABLE_DEBUG_CONFIG_OPTIONS
  8812. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::AggressiveIntTypeSpecPhase))
  8813. {
  8814. Output::Print(_u("Type specialized to INT with bailout (%S): "), "Zero");
  8815. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  8816. }
  8817. #endif
  8818. }
  8819. }
  8820. return true;
  8821. }
  8822. bool
  8823. GlobOpt::IsWorthSpecializingToInt32Branch(IR::Instr const * instr, Value const * src1Val, Value const * src2Val) const
  8824. {
  8825. if (!src1Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc1()->IsRegOpnd())
  8826. {
  8827. StackSym const *sym1 = instr->GetSrc1()->AsRegOpnd()->m_sym;
  8828. if (CurrentBlockData()->IsInt32TypeSpecialized(sym1) == false)
  8829. {
  8830. if (!src2Val->GetValueInfo()->HasIntConstantValue() && instr->GetSrc2()->IsRegOpnd())
  8831. {
  8832. StackSym const *sym2 = instr->GetSrc2()->AsRegOpnd()->m_sym;
  8833. if (CurrentBlockData()->IsInt32TypeSpecialized(sym2) == false)
  8834. {
  8835. // Type specializing a Br itself isn't worth it, unless one src
  8836. // is already type specialized
  8837. return false;
  8838. }
  8839. }
  8840. }
  8841. }
  8842. return true;
  8843. }
  8844. bool
  8845. GlobOpt::TryOptConstFoldBrFalse(
  8846. IR::Instr *const instr,
  8847. Value *const srcValue,
  8848. const int32 min,
  8849. const int32 max)
  8850. {
  8851. Assert(instr);
  8852. Assert(instr->m_opcode == Js::OpCode::BrFalse_A || instr->m_opcode == Js::OpCode::BrTrue_A);
  8853. Assert(srcValue);
  8854. if(!(DoAggressiveIntTypeSpec() ? srcValue->GetValueInfo()->IsLikelyInt() : srcValue->GetValueInfo()->IsInt()))
  8855. {
  8856. return false;
  8857. }
  8858. if(ValueInfo::IsEqualTo(srcValue, min, max, nullptr, 0, 0))
  8859. {
  8860. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrFalse_A, instr, srcValue);
  8861. return true;
  8862. }
  8863. if(ValueInfo::IsNotEqualTo(srcValue, min, max, nullptr, 0, 0))
  8864. {
  8865. OptConstFoldBr(instr->m_opcode == Js::OpCode::BrTrue_A, instr, srcValue);
  8866. return true;
  8867. }
  8868. return false;
  8869. }
  8870. bool
  8871. GlobOpt::TryOptConstFoldBrEqual(
  8872. IR::Instr *const instr,
  8873. const bool branchOnEqual,
  8874. Value *const src1Value,
  8875. const int32 min1,
  8876. const int32 max1,
  8877. Value *const src2Value,
  8878. const int32 min2,
  8879. const int32 max2)
  8880. {
  8881. Assert(instr);
  8882. Assert(src1Value);
  8883. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  8884. Assert(src2Value);
  8885. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  8886. if(ValueInfo::IsEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  8887. {
  8888. OptConstFoldBr(branchOnEqual, instr, src1Value, src2Value);
  8889. return true;
  8890. }
  8891. if(ValueInfo::IsNotEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  8892. {
  8893. OptConstFoldBr(!branchOnEqual, instr, src1Value, src2Value);
  8894. return true;
  8895. }
  8896. return false;
  8897. }
  8898. bool
  8899. GlobOpt::TryOptConstFoldBrGreaterThan(
  8900. IR::Instr *const instr,
  8901. const bool branchOnGreaterThan,
  8902. Value *const src1Value,
  8903. const int32 min1,
  8904. const int32 max1,
  8905. Value *const src2Value,
  8906. const int32 min2,
  8907. const int32 max2)
  8908. {
  8909. Assert(instr);
  8910. Assert(src1Value);
  8911. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  8912. Assert(src2Value);
  8913. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  8914. if(ValueInfo::IsGreaterThan(src1Value, min1, max1, src2Value, min2, max2))
  8915. {
  8916. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  8917. return true;
  8918. }
  8919. if(ValueInfo::IsLessThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  8920. {
  8921. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  8922. return true;
  8923. }
  8924. return false;
  8925. }
  8926. bool
  8927. GlobOpt::TryOptConstFoldBrGreaterThanOrEqual(
  8928. IR::Instr *const instr,
  8929. const bool branchOnGreaterThanOrEqual,
  8930. Value *const src1Value,
  8931. const int32 min1,
  8932. const int32 max1,
  8933. Value *const src2Value,
  8934. const int32 min2,
  8935. const int32 max2)
  8936. {
  8937. Assert(instr);
  8938. Assert(src1Value);
  8939. Assert(DoAggressiveIntTypeSpec() ? src1Value->GetValueInfo()->IsLikelyInt() : src1Value->GetValueInfo()->IsInt());
  8940. Assert(src2Value);
  8941. Assert(DoAggressiveIntTypeSpec() ? src2Value->GetValueInfo()->IsLikelyInt() : src2Value->GetValueInfo()->IsInt());
  8942. if(ValueInfo::IsGreaterThanOrEqualTo(src1Value, min1, max1, src2Value, min2, max2))
  8943. {
  8944. OptConstFoldBr(branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  8945. return true;
  8946. }
  8947. if(ValueInfo::IsLessThan(src1Value, min1, max1, src2Value, min2, max2))
  8948. {
  8949. OptConstFoldBr(!branchOnGreaterThanOrEqual, instr, src1Value, src2Value);
  8950. return true;
  8951. }
  8952. return false;
  8953. }
  8954. bool
  8955. GlobOpt::TryOptConstFoldBrUnsignedLessThan(
  8956. IR::Instr *const instr,
  8957. const bool branchOnLessThan,
  8958. Value *const src1Value,
  8959. const int32 min1,
  8960. const int32 max1,
  8961. Value *const src2Value,
  8962. const int32 min2,
  8963. const int32 max2)
  8964. {
  8965. Assert(DoConstFold());
  8966. Assert(!IsLoopPrePass());
  8967. if(!src1Value ||
  8968. !src2Value ||
  8969. !(
  8970. DoAggressiveIntTypeSpec()
  8971. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  8972. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  8973. ))
  8974. {
  8975. return false;
  8976. }
  8977. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  8978. uint uMax1 = max((uint)min1, (uint)max1);
  8979. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  8980. uint uMax2 = max((uint)min2, (uint)max2);
  8981. if (uMax1 < uMin2)
  8982. {
  8983. // Range 1 is always lesser than Range 2
  8984. OptConstFoldBr(branchOnLessThan, instr, src1Value, src2Value);
  8985. return true;
  8986. }
  8987. if (uMin1 >= uMax2)
  8988. {
  8989. // Range 2 is always lesser than Range 1
  8990. OptConstFoldBr(!branchOnLessThan, instr, src1Value, src2Value);
  8991. return true;
  8992. }
  8993. return false;
  8994. }
  8995. bool
  8996. GlobOpt::TryOptConstFoldBrUnsignedGreaterThan(
  8997. IR::Instr *const instr,
  8998. const bool branchOnGreaterThan,
  8999. Value *const src1Value,
  9000. const int32 min1,
  9001. const int32 max1,
  9002. Value *const src2Value,
  9003. const int32 min2,
  9004. const int32 max2)
  9005. {
  9006. Assert(DoConstFold());
  9007. Assert(!IsLoopPrePass());
  9008. if(!src1Value ||
  9009. !src2Value ||
  9010. !(
  9011. DoAggressiveIntTypeSpec()
  9012. ? src1Value->GetValueInfo()->IsLikelyInt() && src2Value->GetValueInfo()->IsLikelyInt()
  9013. : src1Value->GetValueInfo()->IsInt() && src2Value->GetValueInfo()->IsInt()
  9014. ))
  9015. {
  9016. return false;
  9017. }
  9018. uint uMin1 = (min1 < 0 ? (max1 < 0 ? min((uint)min1, (uint)max1) : 0) : min1);
  9019. uint uMax1 = max((uint)min1, (uint)max1);
  9020. uint uMin2 = (min2 < 0 ? (max2 < 0 ? min((uint)min2, (uint)max2) : 0) : min2);
  9021. uint uMax2 = max((uint)min2, (uint)max2);
  9022. if (uMin1 > uMax2)
  9023. {
  9024. // Range 1 is always greater than Range 2
  9025. OptConstFoldBr(branchOnGreaterThan, instr, src1Value, src2Value);
  9026. return true;
  9027. }
  9028. if (uMax1 <= uMin2)
  9029. {
  9030. // Range 2 is always greater than Range 1
  9031. OptConstFoldBr(!branchOnGreaterThan, instr, src1Value, src2Value);
  9032. return true;
  9033. }
  9034. return false;
  9035. }
  9036. void
  9037. GlobOpt::SetPathDependentInfo(const bool conditionToBranch, const PathDependentInfo &info)
  9038. {
  9039. Assert(this->currentBlock->GetSuccList()->Count() == 2);
  9040. IR::Instr * fallthrough = this->currentBlock->GetNext()->GetFirstInstr();
  9041. FOREACH_SLISTBASECOUNTED_ENTRY(FlowEdge*, edge, this->currentBlock->GetSuccList())
  9042. {
  9043. if (conditionToBranch == (edge->GetSucc()->GetFirstInstr() != fallthrough))
  9044. {
  9045. edge->SetPathDependentInfo(info, alloc);
  9046. return;
  9047. }
  9048. }
  9049. NEXT_SLISTBASECOUNTED_ENTRY;
  9050. Assert(false);
  9051. }
  9052. PathDependentInfoToRestore
  9053. GlobOpt::UpdatePathDependentInfo(PathDependentInfo *const info)
  9054. {
  9055. Assert(info);
  9056. if(!info->HasInfo())
  9057. {
  9058. return PathDependentInfoToRestore();
  9059. }
  9060. decltype(&GlobOpt::UpdateIntBoundsForEqual) UpdateIntBoundsForLeftValue, UpdateIntBoundsForRightValue;
  9061. switch(info->Relationship())
  9062. {
  9063. case PathDependentRelationship::Equal:
  9064. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForEqual;
  9065. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForEqual;
  9066. break;
  9067. case PathDependentRelationship::NotEqual:
  9068. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  9069. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForNotEqual;
  9070. break;
  9071. case PathDependentRelationship::GreaterThanOrEqual:
  9072. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  9073. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  9074. break;
  9075. case PathDependentRelationship::GreaterThan:
  9076. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  9077. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForLessThan;
  9078. break;
  9079. case PathDependentRelationship::LessThanOrEqual:
  9080. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThanOrEqual;
  9081. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThanOrEqual;
  9082. break;
  9083. case PathDependentRelationship::LessThan:
  9084. UpdateIntBoundsForLeftValue = &GlobOpt::UpdateIntBoundsForLessThan;
  9085. UpdateIntBoundsForRightValue = &GlobOpt::UpdateIntBoundsForGreaterThan;
  9086. break;
  9087. default:
  9088. Assert(false);
  9089. __assume(false);
  9090. }
  9091. ValueInfo *leftValueInfo = info->LeftValue()->GetValueInfo();
  9092. IntConstantBounds leftConstantBounds;
  9093. AssertVerify(leftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  9094. ValueInfo *rightValueInfo;
  9095. IntConstantBounds rightConstantBounds;
  9096. if(info->RightValue())
  9097. {
  9098. rightValueInfo = info->RightValue()->GetValueInfo();
  9099. AssertVerify(rightValueInfo->TryGetIntConstantBounds(&rightConstantBounds, true));
  9100. }
  9101. else
  9102. {
  9103. rightValueInfo = nullptr;
  9104. rightConstantBounds = IntConstantBounds(info->RightConstantValue(), info->RightConstantValue());
  9105. }
  9106. ValueInfo *const newLeftValueInfo =
  9107. (this->*UpdateIntBoundsForLeftValue)(
  9108. info->LeftValue(),
  9109. leftConstantBounds,
  9110. info->RightValue(),
  9111. rightConstantBounds,
  9112. true);
  9113. if(newLeftValueInfo)
  9114. {
  9115. ChangeValueInfo(nullptr, info->LeftValue(), newLeftValueInfo);
  9116. AssertVerify(newLeftValueInfo->TryGetIntConstantBounds(&leftConstantBounds, true));
  9117. }
  9118. else
  9119. {
  9120. leftValueInfo = nullptr;
  9121. }
  9122. ValueInfo *const newRightValueInfo =
  9123. (this->*UpdateIntBoundsForRightValue)(
  9124. info->RightValue(),
  9125. rightConstantBounds,
  9126. info->LeftValue(),
  9127. leftConstantBounds,
  9128. true);
  9129. if(newRightValueInfo)
  9130. {
  9131. ChangeValueInfo(nullptr, info->RightValue(), newRightValueInfo);
  9132. }
  9133. else
  9134. {
  9135. rightValueInfo = nullptr;
  9136. }
  9137. return PathDependentInfoToRestore(leftValueInfo, rightValueInfo);
  9138. }
  9139. void
  9140. GlobOpt::RestorePathDependentInfo(PathDependentInfo *const info, const PathDependentInfoToRestore infoToRestore)
  9141. {
  9142. Assert(info);
  9143. if(infoToRestore.LeftValueInfo())
  9144. {
  9145. Assert(info->LeftValue());
  9146. ChangeValueInfo(nullptr, info->LeftValue(), infoToRestore.LeftValueInfo());
  9147. }
  9148. if(infoToRestore.RightValueInfo())
  9149. {
  9150. Assert(info->RightValue());
  9151. ChangeValueInfo(nullptr, info->RightValue(), infoToRestore.RightValueInfo());
  9152. }
  9153. }
  9154. bool
  9155. GlobOpt::TypeSpecializeFloatUnary(IR::Instr **pInstr, Value *src1Val, Value **pDstVal, bool skipDst /* = false */)
  9156. {
  9157. IR::Instr *&instr = *pInstr;
  9158. IR::Opnd *src1;
  9159. IR::Opnd *dst;
  9160. Js::OpCode opcode = instr->m_opcode;
  9161. Value *valueToTransfer = nullptr;
  9162. Assert(src1Val && src1Val->GetValueInfo()->IsLikelyNumber() || OpCodeAttr::IsInlineBuiltIn(instr->m_opcode));
  9163. if (!this->DoFloatTypeSpec())
  9164. {
  9165. return false;
  9166. }
  9167. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  9168. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9169. {
  9170. switch (opcode)
  9171. {
  9172. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9173. skipDst = true;
  9174. // fall-through
  9175. case Js::OpCode::Ld_A:
  9176. case Js::OpCode::BrTrue_A:
  9177. case Js::OpCode::BrFalse_A:
  9178. if (instr->GetSrc1()->IsRegOpnd())
  9179. {
  9180. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9181. if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
  9182. {
  9183. // Type specializing an Ld_A isn't worth it, unless the src
  9184. // is already type specialized
  9185. return false;
  9186. }
  9187. }
  9188. if (instr->m_opcode == Js::OpCode::Ld_A)
  9189. {
  9190. valueToTransfer = src1Val;
  9191. }
  9192. break;
  9193. case Js::OpCode::Neg_A:
  9194. break;
  9195. case Js::OpCode::Conv_Num:
  9196. Assert(src1Val);
  9197. opcode = Js::OpCode::Ld_A;
  9198. valueToTransfer = src1Val;
  9199. if (!src1Val->GetValueInfo()->IsNumber())
  9200. {
  9201. StackSym *sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  9202. valueToTransfer = NewGenericValue(ValueType::Float, instr->GetDst()->GetStackSym());
  9203. if (CurrentBlockData()->IsFloat64TypeSpecialized(sym) == false)
  9204. {
  9205. // Set the dst as a nonDeadStore. We want to keep the Ld_A to prevent the FromVar from
  9206. // being dead-stored, as it could cause implicit calls.
  9207. dst = instr->GetDst();
  9208. dst->AsRegOpnd()->m_dontDeadStore = true;
  9209. }
  9210. }
  9211. break;
  9212. case Js::OpCode::StElemI_A:
  9213. case Js::OpCode::StElemI_A_Strict:
  9214. case Js::OpCode::StElemC:
  9215. return TypeSpecializeStElem(pInstr, src1Val, pDstVal);
  9216. default:
  9217. return false;
  9218. }
  9219. }
  9220. // Make sure the srcs are specialized
  9221. src1 = instr->GetSrc1();
  9222. // Use original val when calling toFloat64 as this is what we'll use to try hoisting the fromVar if we're in a loop.
  9223. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, IR::BailOutPrimitiveButString);
  9224. if (!skipDst)
  9225. {
  9226. dst = instr->GetDst();
  9227. if (dst)
  9228. {
  9229. this->TypeSpecializeFloatDst(instr, valueToTransfer, src1Val, nullptr, pDstVal);
  9230. if (!this->IsLoopPrePass())
  9231. {
  9232. instr->m_opcode = opcode;
  9233. }
  9234. }
  9235. }
  9236. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  9237. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9238. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  9239. {
  9240. Output::Print(_u("Type specialized to FLOAT: "));
  9241. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9242. }
  9243. #endif
  9244. return true;
  9245. }
  9246. // Unconditionally type-spec dst to float.
  9247. void
  9248. GlobOpt::TypeSpecializeFloatDst(IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value *const src2Value, Value **pDstVal)
  9249. {
  9250. IR::Opnd* dst = instr->GetDst();
  9251. Assert(dst);
  9252. AssertMsg(dst->IsRegOpnd(), "What else?");
  9253. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9254. if(valToTransfer)
  9255. {
  9256. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  9257. CurrentBlockData()->InsertNewValue(*pDstVal, dst);
  9258. }
  9259. else
  9260. {
  9261. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Value, src2Value);
  9262. }
  9263. }
  9264. #ifdef ENABLE_SIMDJS
  9265. void
  9266. GlobOpt::TypeSpecializeSimd128Dst(IRType type, IR::Instr *instr, Value *valToTransfer, Value *const src1Value, Value **pDstVal)
  9267. {
  9268. IR::Opnd* dst = instr->GetDst();
  9269. Assert(dst);
  9270. AssertMsg(dst->IsRegOpnd(), "What else?");
  9271. this->ToSimd128Dst(type, instr, dst->AsRegOpnd(), this->currentBlock);
  9272. if (valToTransfer)
  9273. {
  9274. *pDstVal = this->ValueNumberTransferDst(instr, valToTransfer);
  9275. CurrentBlockData()->InsertNewValue(*pDstVal, dst);
  9276. }
  9277. else
  9278. {
  9279. *pDstVal = NewGenericValue(GetValueTypeFromIRType(type), instr->GetDst());
  9280. }
  9281. }
  9282. #endif
  9283. bool
  9284. GlobOpt::TypeSpecializeLdLen(
  9285. IR::Instr * *const instrRef,
  9286. Value * *const src1ValueRef,
  9287. Value * *const dstValueRef,
  9288. bool *const forceInvariantHoistingRef)
  9289. {
  9290. Assert(instrRef);
  9291. IR::Instr *&instr = *instrRef;
  9292. Assert(instr);
  9293. Assert(instr->m_opcode == Js::OpCode::LdLen_A);
  9294. Assert(src1ValueRef);
  9295. Value *&src1Value = *src1ValueRef;
  9296. Assert(dstValueRef);
  9297. Value *&dstValue = *dstValueRef;
  9298. Assert(forceInvariantHoistingRef);
  9299. bool &forceInvariantHoisting = *forceInvariantHoistingRef;
  9300. if(!DoLdLenIntSpec(instr, instr->GetSrc1()->GetValueType()))
  9301. {
  9302. return false;
  9303. }
  9304. IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  9305. if(!IsLoopPrePass())
  9306. {
  9307. IR::RegOpnd *const baseOpnd = instr->GetSrc1()->AsRegOpnd();
  9308. if(baseOpnd->IsArrayRegOpnd())
  9309. {
  9310. StackSym *const lengthSym = baseOpnd->AsArrayRegOpnd()->LengthSym();
  9311. if(lengthSym)
  9312. {
  9313. CaptureByteCodeSymUses(instr);
  9314. instr->m_opcode = Js::OpCode::Ld_I4;
  9315. instr->ReplaceSrc1(IR::RegOpnd::New(lengthSym, lengthSym->GetType(), func));
  9316. instr->ClearBailOutInfo();
  9317. // Find the hoisted length value
  9318. Value *const lengthValue = CurrentBlockData()->FindValue(lengthSym);
  9319. Assert(lengthValue);
  9320. src1Value = lengthValue;
  9321. ValueInfo *const lengthValueInfo = lengthValue->GetValueInfo();
  9322. Assert(lengthValueInfo->GetSymStore() != lengthSym);
  9323. IntConstantBounds lengthConstantBounds;
  9324. AssertVerify(lengthValueInfo->TryGetIntConstantBounds(&lengthConstantBounds));
  9325. Assert(lengthConstantBounds.LowerBound() >= 0);
  9326. // Int-specialize, and transfer the value to the dst
  9327. TypeSpecializeIntDst(
  9328. instr,
  9329. Js::OpCode::LdLen_A,
  9330. src1Value,
  9331. src1Value,
  9332. nullptr,
  9333. bailOutKind,
  9334. lengthConstantBounds.LowerBound(),
  9335. lengthConstantBounds.UpperBound(),
  9336. &dstValue);
  9337. // Try to force hoisting the Ld_I4 so that the length will have an invariant sym store that can be
  9338. // copy-propped. Invariant hoisting does not automatically hoist Ld_I4.
  9339. forceInvariantHoisting = true;
  9340. return true;
  9341. }
  9342. }
  9343. if (instr->HasBailOutInfo())
  9344. {
  9345. Assert(instr->GetBailOutKind() == IR::BailOutMarkTempObject);
  9346. bailOutKind = IR::BailOutOnIrregularLength | IR::BailOutMarkTempObject;
  9347. instr->SetBailOutKind(bailOutKind);
  9348. }
  9349. else
  9350. {
  9351. Assert(bailOutKind == IR::BailOutOnIrregularLength);
  9352. GenerateBailAtOperation(&instr, bailOutKind);
  9353. }
  9354. }
  9355. TypeSpecializeIntDst(
  9356. instr,
  9357. Js::OpCode::LdLen_A,
  9358. nullptr,
  9359. nullptr,
  9360. nullptr,
  9361. bailOutKind,
  9362. 0,
  9363. INT32_MAX,
  9364. &dstValue);
  9365. return true;
  9366. }
  9367. bool
  9368. GlobOpt::TypeSpecializeFloatBinary(IR::Instr *instr, Value *src1Val, Value *src2Val, Value **pDstVal)
  9369. {
  9370. IR::Opnd *src1;
  9371. IR::Opnd *src2;
  9372. IR::Opnd *dst;
  9373. bool allowUndefinedOrNullSrc1 = true;
  9374. bool allowUndefinedOrNullSrc2 = true;
  9375. bool skipSrc1 = false;
  9376. bool skipSrc2 = false;
  9377. bool skipDst = false;
  9378. if (!this->DoFloatTypeSpec())
  9379. {
  9380. return false;
  9381. }
  9382. // For inline built-ins we need to do type specialization. Check upfront to avoid duplicating same case labels.
  9383. if (!OpCodeAttr::IsInlineBuiltIn(instr->m_opcode))
  9384. {
  9385. switch (instr->m_opcode)
  9386. {
  9387. case Js::OpCode::Sub_A:
  9388. case Js::OpCode::Mul_A:
  9389. case Js::OpCode::Div_A:
  9390. case Js::OpCode::Expo_A:
  9391. // Avoid if one source is known not to be a number.
  9392. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9393. {
  9394. return false;
  9395. }
  9396. break;
  9397. case Js::OpCode::BrSrEq_A:
  9398. case Js::OpCode::BrSrNeq_A:
  9399. case Js::OpCode::BrEq_A:
  9400. case Js::OpCode::BrNeq_A:
  9401. case Js::OpCode::BrSrNotEq_A:
  9402. case Js::OpCode::BrNotEq_A:
  9403. case Js::OpCode::BrSrNotNeq_A:
  9404. case Js::OpCode::BrNotNeq_A:
  9405. // Avoid if one source is known not to be a number.
  9406. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9407. {
  9408. return false;
  9409. }
  9410. // Undef == Undef, but +Undef != +Undef
  9411. // 0.0 != null, but 0.0 == +null
  9412. //
  9413. // So Bailout on anything but numbers for both src1 and src2
  9414. allowUndefinedOrNullSrc1 = false;
  9415. allowUndefinedOrNullSrc2 = false;
  9416. break;
  9417. case Js::OpCode::BrGt_A:
  9418. case Js::OpCode::BrGe_A:
  9419. case Js::OpCode::BrLt_A:
  9420. case Js::OpCode::BrLe_A:
  9421. case Js::OpCode::BrNotGt_A:
  9422. case Js::OpCode::BrNotGe_A:
  9423. case Js::OpCode::BrNotLt_A:
  9424. case Js::OpCode::BrNotLe_A:
  9425. // Avoid if one source is known not to be a number.
  9426. if (src1Val->GetValueInfo()->IsNotNumber() || src2Val->GetValueInfo()->IsNotNumber())
  9427. {
  9428. return false;
  9429. }
  9430. break;
  9431. case Js::OpCode::Add_A:
  9432. // For Add, we need both sources to be Numbers, otherwise it could be a string concat
  9433. if (!src1Val || !src2Val || !(src1Val->GetValueInfo()->IsLikelyNumber() && src2Val->GetValueInfo()->IsLikelyNumber()))
  9434. {
  9435. return false;
  9436. }
  9437. break;
  9438. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  9439. skipSrc2 = true;
  9440. skipDst = true;
  9441. break;
  9442. default:
  9443. return false;
  9444. }
  9445. }
  9446. else
  9447. {
  9448. switch (instr->m_opcode)
  9449. {
  9450. case Js::OpCode::InlineArrayPush:
  9451. bool isFloatConstMissingItem = src2Val->GetValueInfo()->IsFloatConstant();
  9452. if(isFloatConstMissingItem)
  9453. {
  9454. FloatConstType floatValue = src2Val->GetValueInfo()->AsFloatConstant()->FloatValue();
  9455. isFloatConstMissingItem = Js::SparseArraySegment<double>::IsMissingItem(&floatValue);
  9456. }
  9457. // Don't specialize if the element is not likelyNumber - we will surely bailout
  9458. if(!(src2Val->GetValueInfo()->IsLikelyNumber()) || isFloatConstMissingItem)
  9459. {
  9460. return false;
  9461. }
  9462. // Only specialize the Second source - element
  9463. skipSrc1 = true;
  9464. skipDst = true;
  9465. allowUndefinedOrNullSrc2 = false;
  9466. break;
  9467. }
  9468. }
  9469. // Make sure the srcs are specialized
  9470. if(!skipSrc1)
  9471. {
  9472. src1 = instr->GetSrc1();
  9473. this->ToFloat64(instr, src1, this->currentBlock, src1Val, nullptr, (allowUndefinedOrNullSrc1 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  9474. }
  9475. if (!skipSrc2)
  9476. {
  9477. src2 = instr->GetSrc2();
  9478. this->ToFloat64(instr, src2, this->currentBlock, src2Val, nullptr, (allowUndefinedOrNullSrc2 ? IR::BailOutPrimitiveButString : IR::BailOutNumberOnly));
  9479. }
  9480. if (!skipDst)
  9481. {
  9482. dst = instr->GetDst();
  9483. if (dst)
  9484. {
  9485. *pDstVal = CreateDstUntransferredValue(ValueType::Float, instr, src1Val, src2Val);
  9486. AssertMsg(dst->IsRegOpnd(), "What else?");
  9487. this->ToFloat64Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  9488. }
  9489. }
  9490. GOPT_TRACE_INSTR(instr, _u("Type specialized to FLOAT: "));
  9491. #if ENABLE_DEBUG_CONFIG_OPTIONS
  9492. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::FloatTypeSpecPhase))
  9493. {
  9494. Output::Print(_u("Type specialized to FLOAT: "));
  9495. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  9496. }
  9497. #endif
  9498. return true;
  9499. }
  9500. bool
  9501. GlobOpt::TypeSpecializeStElem(IR::Instr ** pInstr, Value *src1Val, Value **pDstVal)
  9502. {
  9503. IR::Instr *&instr = *pInstr;
  9504. IR::RegOpnd *baseOpnd = instr->GetDst()->AsIndirOpnd()->GetBaseOpnd();
  9505. ValueType baseValueType(baseOpnd->GetValueType());
  9506. if (instr->DoStackArgsOpt(this->func) ||
  9507. (!this->DoTypedArrayTypeSpec() && baseValueType.IsLikelyOptimizedTypedArray()) ||
  9508. (!this->DoNativeArrayTypeSpec() && baseValueType.IsLikelyNativeArray()) ||
  9509. !(baseValueType.IsLikelyOptimizedTypedArray() || baseValueType.IsLikelyNativeArray()))
  9510. {
  9511. GOPT_TRACE_INSTR(instr, _u("Didn't type specialize array access, because typed array type specialization is disabled, or base is not an optimized typed array.\n"));
  9512. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9513. {
  9514. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9515. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9516. baseValueType.ToString(baseValueTypeStr);
  9517. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because %s.\n"),
  9518. this->func->GetJITFunctionBody()->GetDisplayName(),
  9519. this->func->GetDebugNumberSet(debugStringBuffer),
  9520. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9521. baseValueTypeStr,
  9522. instr->DoStackArgsOpt(this->func) ?
  9523. _u("instruction uses the arguments object") :
  9524. _u("typed array type specialization is disabled, or base is not an optimized typed array"));
  9525. Output::Flush();
  9526. }
  9527. return false;
  9528. }
  9529. Assert(instr->GetSrc1()->IsRegOpnd() || (src1Val && src1Val->GetValueInfo()->HasIntConstantValue()));
  9530. StackSym *sym = instr->GetSrc1()->IsRegOpnd() ? instr->GetSrc1()->AsRegOpnd()->m_sym : nullptr;
  9531. // Only type specialize the source of store element if the source symbol is already type specialized to int or float.
  9532. if (sym)
  9533. {
  9534. if (baseValueType.IsLikelyNativeArray())
  9535. {
  9536. // Gently coerce these src's into native if it seems likely to work.
  9537. // Otherwise we can't use the fast path to store.
  9538. // But don't try to put a float-specialized number into an int array this way.
  9539. if (!(
  9540. CurrentBlockData()->IsInt32TypeSpecialized(sym) ||
  9541. (
  9542. src1Val &&
  9543. (
  9544. DoAggressiveIntTypeSpec()
  9545. ? src1Val->GetValueInfo()->IsLikelyInt()
  9546. : src1Val->GetValueInfo()->IsInt()
  9547. )
  9548. )
  9549. ))
  9550. {
  9551. if (!(
  9552. CurrentBlockData()->IsFloat64TypeSpecialized(sym) ||
  9553. (src1Val && src1Val->GetValueInfo()->IsLikelyNumber())
  9554. ) ||
  9555. baseValueType.HasIntElements())
  9556. {
  9557. return false;
  9558. }
  9559. }
  9560. }
  9561. else if (!CurrentBlockData()->IsInt32TypeSpecialized(sym) && !CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9562. {
  9563. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because src is not type specialized.\n"));
  9564. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9565. {
  9566. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9567. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9568. baseValueType.ToString(baseValueTypeStr);
  9569. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because src is not specialized.\n"),
  9570. this->func->GetJITFunctionBody()->GetDisplayName(),
  9571. this->func->GetDebugNumberSet(debugStringBuffer),
  9572. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9573. baseValueTypeStr);
  9574. Output::Flush();
  9575. }
  9576. return false;
  9577. }
  9578. }
  9579. int32 src1IntConstantValue;
  9580. if(baseValueType.IsLikelyNativeIntArray() && src1Val && src1Val->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue))
  9581. {
  9582. if(Js::SparseArraySegment<int32>::IsMissingItem(&src1IntConstantValue))
  9583. {
  9584. return false;
  9585. }
  9586. }
  9587. // Note: doing ToVarUses to make sure we do get the int32 version of the index before trying to access its value in
  9588. // ShouldExpectConventionalArrayIndexValue. Not sure why that never gave us a problem before.
  9589. Assert(instr->GetDst()->IsIndirOpnd());
  9590. IR::IndirOpnd *dst = instr->GetDst()->AsIndirOpnd();
  9591. // Make sure we use the int32 version of the index operand symbol, if available. Otherwise, ensure the var symbol is live (by
  9592. // potentially inserting a ToVar).
  9593. this->ToVarUses(instr, dst, /* isDst = */ true, nullptr);
  9594. if (!ShouldExpectConventionalArrayIndexValue(dst))
  9595. {
  9596. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because index is negative or likely not int.\n"));
  9597. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9598. {
  9599. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9600. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9601. baseValueType.ToString(baseValueTypeStr);
  9602. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not specialize because index is negative or likely not int.\n"),
  9603. this->func->GetJITFunctionBody()->GetDisplayName(),
  9604. this->func->GetDebugNumberSet(debugStringBuffer),
  9605. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9606. baseValueTypeStr);
  9607. Output::Flush();
  9608. }
  9609. return false;
  9610. }
  9611. IRType toType = TyVar;
  9612. bool isLossyAllowed = true;
  9613. IR::BailOutKind arrayBailOutKind = IR::BailOutConventionalTypedArrayAccessOnly;
  9614. switch(baseValueType.GetObjectType())
  9615. {
  9616. case ObjectType::Int8Array:
  9617. case ObjectType::Uint8Array:
  9618. case ObjectType::Int16Array:
  9619. case ObjectType::Uint16Array:
  9620. case ObjectType::Int32Array:
  9621. case ObjectType::Int8VirtualArray:
  9622. case ObjectType::Uint8VirtualArray:
  9623. case ObjectType::Int16VirtualArray:
  9624. case ObjectType::Uint16VirtualArray:
  9625. case ObjectType::Int32VirtualArray:
  9626. case ObjectType::Int8MixedArray:
  9627. case ObjectType::Uint8MixedArray:
  9628. case ObjectType::Int16MixedArray:
  9629. case ObjectType::Uint16MixedArray:
  9630. case ObjectType::Int32MixedArray:
  9631. Int32Array:
  9632. if (this->DoAggressiveIntTypeSpec() || this->DoFloatTypeSpec())
  9633. {
  9634. toType = TyInt32;
  9635. }
  9636. break;
  9637. case ObjectType::Uint32Array:
  9638. case ObjectType::Uint32VirtualArray:
  9639. case ObjectType::Uint32MixedArray:
  9640. // Uint32Arrays may store values that overflow int32. If the value being stored comes from a symbol that's
  9641. // already losslessly type specialized to int32, we'll use it. Otherwise, if we only have a float64 specialized
  9642. // value, we don't want to force bailout if it doesn't fit in int32. Instead, we'll emit conversion in the
  9643. // lowerer, and handle overflow, if necessary.
  9644. if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
  9645. {
  9646. toType = TyInt32;
  9647. }
  9648. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9649. {
  9650. toType = TyFloat64;
  9651. }
  9652. break;
  9653. case ObjectType::Float32Array:
  9654. case ObjectType::Float64Array:
  9655. case ObjectType::Float32VirtualArray:
  9656. case ObjectType::Float32MixedArray:
  9657. case ObjectType::Float64VirtualArray:
  9658. case ObjectType::Float64MixedArray:
  9659. Float64Array:
  9660. if (this->DoFloatTypeSpec())
  9661. {
  9662. toType = TyFloat64;
  9663. }
  9664. break;
  9665. case ObjectType::Uint8ClampedArray:
  9666. case ObjectType::Uint8ClampedVirtualArray:
  9667. case ObjectType::Uint8ClampedMixedArray:
  9668. // Uint8ClampedArray requires rounding (as opposed to truncation) of floating point values. If source symbol is
  9669. // float type specialized, type specialize this instruction to float as well, and handle rounding in the
  9670. // lowerer.
  9671. if (!sym || CurrentBlockData()->IsInt32TypeSpecialized(sym))
  9672. {
  9673. toType = TyInt32;
  9674. isLossyAllowed = false;
  9675. }
  9676. else if (CurrentBlockData()->IsFloat64TypeSpecialized(sym))
  9677. {
  9678. toType = TyFloat64;
  9679. }
  9680. break;
  9681. default:
  9682. Assert(baseValueType.IsLikelyNativeArray());
  9683. isLossyAllowed = false;
  9684. arrayBailOutKind = IR::BailOutConventionalNativeArrayAccessOnly;
  9685. if(baseValueType.HasIntElements())
  9686. {
  9687. goto Int32Array;
  9688. }
  9689. Assert(baseValueType.HasFloatElements());
  9690. goto Float64Array;
  9691. }
  9692. if (toType != TyVar)
  9693. {
  9694. GOPT_TRACE_INSTR(instr, _u("Type specialized array access.\n"));
  9695. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9696. {
  9697. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9698. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9699. baseValueType.ToString(baseValueTypeStr);
  9700. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, type specialized to %s.\n"),
  9701. this->func->GetJITFunctionBody()->GetDisplayName(),
  9702. this->func->GetDebugNumberSet(debugStringBuffer),
  9703. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9704. baseValueTypeStr,
  9705. toType == TyInt32 ? _u("int32") : _u("float64"));
  9706. Output::Flush();
  9707. }
  9708. IR::BailOutKind bailOutKind = ((toType == TyInt32) ? IR::BailOutIntOnly : IR::BailOutNumberOnly);
  9709. this->ToTypeSpecUse(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, toType, bailOutKind, /* lossy = */ isLossyAllowed);
  9710. if (!this->IsLoopPrePass())
  9711. {
  9712. bool bConvertToBailoutInstr = true;
  9713. // Definite StElemC doesn't need bailout, because it can't fail or cause conversion.
  9714. if (instr->m_opcode == Js::OpCode::StElemC && baseValueType.IsObject())
  9715. {
  9716. if (baseValueType.HasIntElements())
  9717. {
  9718. //Native int array requires a missing element check & bailout
  9719. int32 min = INT32_MIN;
  9720. int32 max = INT32_MAX;
  9721. if (src1Val->GetValueInfo()->GetIntValMinMax(&min, &max, false))
  9722. {
  9723. bConvertToBailoutInstr = ((min <= Js::JavascriptNativeIntArray::MissingItem) && (max >= Js::JavascriptNativeIntArray::MissingItem));
  9724. }
  9725. }
  9726. else
  9727. {
  9728. bConvertToBailoutInstr = false;
  9729. }
  9730. }
  9731. if (bConvertToBailoutInstr)
  9732. {
  9733. if(instr->HasBailOutInfo())
  9734. {
  9735. const IR::BailOutKind oldBailOutKind = instr->GetBailOutKind();
  9736. Assert(
  9737. (
  9738. !(oldBailOutKind & ~IR::BailOutKindBits) ||
  9739. (oldBailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp
  9740. ) &&
  9741. !(oldBailOutKind & IR::BailOutKindBits & ~(IR::BailOutOnArrayAccessHelperCall | IR::BailOutMarkTempObject)));
  9742. if(arrayBailOutKind == IR::BailOutConventionalTypedArrayAccessOnly)
  9743. {
  9744. // BailOutConventionalTypedArrayAccessOnly also bails out if the array access is outside the head
  9745. // segment bounds, and guarantees no implicit calls. Override the bailout kind so that the instruction
  9746. // bails out for the right reason.
  9747. instr->SetBailOutKind(
  9748. arrayBailOutKind | (oldBailOutKind & (IR::BailOutKindBits - IR::BailOutOnArrayAccessHelperCall)));
  9749. }
  9750. else
  9751. {
  9752. // BailOutConventionalNativeArrayAccessOnly by itself may generate a helper call, and may cause implicit
  9753. // calls to occur, so it must be merged in to eliminate generating the helper call.
  9754. Assert(arrayBailOutKind == IR::BailOutConventionalNativeArrayAccessOnly);
  9755. instr->SetBailOutKind(oldBailOutKind | arrayBailOutKind);
  9756. }
  9757. }
  9758. else
  9759. {
  9760. GenerateBailAtOperation(&instr, arrayBailOutKind);
  9761. }
  9762. }
  9763. }
  9764. }
  9765. else
  9766. {
  9767. GOPT_TRACE_INSTR(instr, _u("Didn't specialize array access, because the source was not already specialized.\n"));
  9768. if (PHASE_TRACE(Js::TypedArrayTypeSpecPhase, this->func))
  9769. {
  9770. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  9771. char baseValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  9772. baseValueType.ToString(baseValueTypeStr);
  9773. Output::Print(_u("Typed Array Optimization: function: %s (%s): instr: %s, base value type: %S, did not type specialize, because of array type.\n"),
  9774. this->func->GetJITFunctionBody()->GetDisplayName(),
  9775. this->func->GetDebugNumberSet(debugStringBuffer),
  9776. Js::OpCodeUtil::GetOpCodeName(instr->m_opcode),
  9777. baseValueTypeStr);
  9778. Output::Flush();
  9779. }
  9780. }
  9781. return toType != TyVar;
  9782. }
  9783. IR::Instr *
  9784. GlobOpt::ToVarUses(IR::Instr *instr, IR::Opnd *opnd, bool isDst, Value *val)
  9785. {
  9786. Sym *sym;
  9787. switch (opnd->GetKind())
  9788. {
  9789. case IR::OpndKindReg:
  9790. if (!isDst && !CurrentBlockData()->liveVarSyms->Test(opnd->AsRegOpnd()->m_sym->m_id))
  9791. {
  9792. instr = this->ToVar(instr, opnd->AsRegOpnd(), this->currentBlock, val, true);
  9793. }
  9794. break;
  9795. case IR::OpndKindSym:
  9796. sym = opnd->AsSymOpnd()->m_sym;
  9797. if (sym->IsPropertySym() && !CurrentBlockData()->liveVarSyms->Test(sym->AsPropertySym()->m_stackSym->m_id)
  9798. && sym->AsPropertySym()->m_stackSym->IsVar())
  9799. {
  9800. StackSym *propertyBase = sym->AsPropertySym()->m_stackSym;
  9801. IR::RegOpnd *newOpnd = IR::RegOpnd::New(propertyBase, TyVar, instr->m_func);
  9802. instr = this->ToVar(instr, newOpnd, this->currentBlock, CurrentBlockData()->FindValue(propertyBase), true);
  9803. }
  9804. break;
  9805. case IR::OpndKindIndir:
  9806. IR::RegOpnd *baseOpnd = opnd->AsIndirOpnd()->GetBaseOpnd();
  9807. if (!CurrentBlockData()->liveVarSyms->Test(baseOpnd->m_sym->m_id))
  9808. {
  9809. instr = this->ToVar(instr, baseOpnd, this->currentBlock, CurrentBlockData()->FindValue(baseOpnd->m_sym), true);
  9810. }
  9811. IR::RegOpnd *indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  9812. if (indexOpnd && !indexOpnd->m_sym->IsTypeSpec())
  9813. {
  9814. if((indexOpnd->GetValueType().IsInt()
  9815. ? !IsTypeSpecPhaseOff(func)
  9816. : indexOpnd->GetValueType().IsLikelyInt() && DoAggressiveIntTypeSpec()) && !GetIsAsmJSFunc()) // typespec is disabled for asmjs
  9817. {
  9818. StackSym *const indexVarSym = indexOpnd->m_sym;
  9819. Value *const indexValue = CurrentBlockData()->FindValue(indexVarSym);
  9820. Assert(indexValue);
  9821. Assert(indexValue->GetValueInfo()->IsLikelyInt());
  9822. ToInt32(instr, indexOpnd, currentBlock, indexValue, opnd->AsIndirOpnd(), false);
  9823. Assert(indexValue->GetValueInfo()->IsInt());
  9824. if(!IsLoopPrePass())
  9825. {
  9826. indexOpnd = opnd->AsIndirOpnd()->GetIndexOpnd();
  9827. if(indexOpnd)
  9828. {
  9829. Assert(indexOpnd->m_sym->IsTypeSpec());
  9830. IntConstantBounds indexConstantBounds;
  9831. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  9832. if(ValueInfo::IsGreaterThanOrEqualTo(
  9833. indexValue,
  9834. indexConstantBounds.LowerBound(),
  9835. indexConstantBounds.UpperBound(),
  9836. nullptr,
  9837. 0,
  9838. 0))
  9839. {
  9840. indexOpnd->SetType(TyUint32);
  9841. }
  9842. }
  9843. }
  9844. }
  9845. else if (!CurrentBlockData()->liveVarSyms->Test(indexOpnd->m_sym->m_id))
  9846. {
  9847. instr = this->ToVar(instr, indexOpnd, this->currentBlock, CurrentBlockData()->FindValue(indexOpnd->m_sym), true);
  9848. }
  9849. }
  9850. break;
  9851. }
  9852. return instr;
  9853. }
  9854. IR::Instr *
  9855. GlobOpt::ToVar(IR::Instr *instr, IR::RegOpnd *regOpnd, BasicBlock *block, Value *value, bool needsUpdate)
  9856. {
  9857. IR::Instr *newInstr;
  9858. StackSym *varSym = regOpnd->m_sym;
  9859. if (IsTypeSpecPhaseOff(this->func))
  9860. {
  9861. return instr;
  9862. }
  9863. if (this->IsLoopPrePass())
  9864. {
  9865. block->globOptData.liveVarSyms->Set(varSym->m_id);
  9866. return instr;
  9867. }
  9868. if (block->globOptData.liveVarSyms->Test(varSym->m_id))
  9869. {
  9870. // Already live, nothing to do
  9871. return instr;
  9872. }
  9873. if (!varSym->IsVar())
  9874. {
  9875. Assert(!varSym->IsTypeSpec());
  9876. // Leave non-vars alone.
  9877. return instr;
  9878. }
  9879. Assert(block->globOptData.IsTypeSpecialized(varSym));
  9880. if (!value)
  9881. {
  9882. value = block->globOptData.FindValue(varSym);
  9883. }
  9884. ValueInfo *valueInfo = value ? value->GetValueInfo() : nullptr;
  9885. if(valueInfo && valueInfo->IsInt())
  9886. {
  9887. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  9888. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  9889. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  9890. // lossy state.
  9891. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  9892. }
  9893. IRType fromType = TyIllegal;
  9894. StackSym *typeSpecSym = nullptr;
  9895. if (block->globOptData.liveInt32Syms->Test(varSym->m_id) && !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id))
  9896. {
  9897. fromType = TyInt32;
  9898. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  9899. Assert(valueInfo);
  9900. Assert(valueInfo->IsInt());
  9901. }
  9902. else if (block->globOptData.liveFloat64Syms->Test(varSym->m_id))
  9903. {
  9904. fromType = TyFloat64;
  9905. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  9906. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any non-number
  9907. // value, even ones that have already been generated before. Float-specialized non-number values cannot be converted
  9908. // back to Var since they will not go back to the original non-number value. The dead-store pass will update the bailout
  9909. // kind on already-generated FromVars based on this bit.
  9910. typeSpecSym->m_requiresBailOnNotNumber = true;
  9911. // A previous float conversion may have used BailOutPrimitiveButString, which does not change the value type to say
  9912. // definitely float, since it can also be a non-string primitive. The convert back to Var though, will cause that
  9913. // bailout kind to be changed to BailOutNumberOnly in the dead-store phase, so from the point of the initial conversion
  9914. // to float, that the value is definitely number. Since we don't know where the FromVar is, change the value type here.
  9915. if(valueInfo)
  9916. {
  9917. if(!valueInfo->IsNumber())
  9918. {
  9919. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  9920. ChangeValueInfo(block, value, valueInfo);
  9921. regOpnd->SetValueType(valueInfo->Type());
  9922. }
  9923. }
  9924. else
  9925. {
  9926. value = NewGenericValue(ValueType::Float);
  9927. valueInfo = value->GetValueInfo();
  9928. block->globOptData.SetValue(value, varSym);
  9929. regOpnd->SetValueType(valueInfo->Type());
  9930. }
  9931. }
  9932. else
  9933. {
  9934. #ifdef ENABLE_SIMDJS
  9935. // SIMD_JS
  9936. Assert(block->globOptData.IsLiveAsSimd128(varSym));
  9937. if (block->globOptData.IsLiveAsSimd128F4(varSym))
  9938. {
  9939. fromType = TySimd128F4;
  9940. }
  9941. else
  9942. {
  9943. Assert(block->globOptData.IsLiveAsSimd128I4(varSym));
  9944. fromType = TySimd128I4;
  9945. }
  9946. if (valueInfo)
  9947. {
  9948. if (fromType == TySimd128F4 && !valueInfo->Type().IsSimd128Float32x4())
  9949. {
  9950. valueInfo = valueInfo->SpecializeToSimd128F4(alloc);
  9951. ChangeValueInfo(block, value, valueInfo);
  9952. regOpnd->SetValueType(valueInfo->Type());
  9953. }
  9954. else if (fromType == TySimd128I4 && !valueInfo->Type().IsSimd128Int32x4())
  9955. {
  9956. if (!valueInfo->Type().IsSimd128Int32x4())
  9957. {
  9958. valueInfo = valueInfo->SpecializeToSimd128I4(alloc);
  9959. ChangeValueInfo(block, value, valueInfo);
  9960. regOpnd->SetValueType(valueInfo->Type());
  9961. }
  9962. }
  9963. }
  9964. else
  9965. {
  9966. ValueType valueType = fromType == TySimd128F4 ? ValueType::GetSimd128(ObjectType::Simd128Float32x4) : ValueType::GetSimd128(ObjectType::Simd128Int32x4);
  9967. value = NewGenericValue(valueType);
  9968. valueInfo = value->GetValueInfo();
  9969. block->globOptData.SetValue(value, varSym);
  9970. regOpnd->SetValueType(valueInfo->Type());
  9971. }
  9972. ValueType valueType = valueInfo->Type();
  9973. // Should be definite if type-specialized
  9974. Assert(valueType.IsSimd128());
  9975. typeSpecSym = varSym->GetSimd128EquivSym(fromType, this->func);
  9976. #else
  9977. Assert(UNREACHED);
  9978. #endif
  9979. }
  9980. AssertOrFailFast(valueInfo);
  9981. int32 intConstantValue;
  9982. if (valueInfo->TryGetIntConstantValue(&intConstantValue))
  9983. {
  9984. // Lower will tag or create a number directly
  9985. newInstr = IR::Instr::New(Js::OpCode::LdC_A_I4, regOpnd,
  9986. IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func), instr->m_func);
  9987. }
  9988. else
  9989. {
  9990. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, fromType, instr->m_func);
  9991. Js::OpCode opcode = Js::OpCode::ToVar;
  9992. regNew->SetIsJITOptimizedReg(true);
  9993. newInstr = IR::Instr::New(opcode, regOpnd, regNew, instr->m_func);
  9994. }
  9995. newInstr->SetByteCodeOffset(instr);
  9996. newInstr->GetDst()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  9997. ValueType valueType = valueInfo->Type();
  9998. if(fromType == TyInt32)
  9999. {
  10000. #if !INT32VAR // All 32-bit ints are taggable on 64-bit architectures
  10001. IntConstantBounds constantBounds;
  10002. AssertVerify(valueInfo->TryGetIntConstantBounds(&constantBounds));
  10003. if(constantBounds.IsTaggable())
  10004. #endif
  10005. {
  10006. // The value is within the taggable range, so set the opnd value types to TaggedInt to avoid the overflow check
  10007. valueType = ValueType::GetTaggedInt();
  10008. }
  10009. }
  10010. newInstr->GetDst()->SetValueType(valueType);
  10011. newInstr->GetSrc1()->SetValueType(valueType);
  10012. IR::Instr *insertAfterInstr = instr->m_prev;
  10013. if (instr == block->GetLastInstr() &&
  10014. (instr->IsBranchInstr() || instr->m_opcode == Js::OpCode::BailTarget))
  10015. {
  10016. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  10017. while(insertAfterInstr->m_opcode == Js::OpCode::ByteCodeUses)
  10018. {
  10019. insertAfterInstr = insertAfterInstr->m_prev;
  10020. }
  10021. }
  10022. block->InsertInstrAfter(newInstr, insertAfterInstr);
  10023. block->globOptData.liveVarSyms->Set(varSym->m_id);
  10024. GOPT_TRACE_OPND(regOpnd, _u("Converting to var\n"));
  10025. if (block->loop)
  10026. {
  10027. Assert(!this->IsLoopPrePass());
  10028. this->TryHoistInvariant(newInstr, block, value, value, nullptr, false);
  10029. }
  10030. if (needsUpdate)
  10031. {
  10032. // Make sure that the kill effect of the ToVar instruction is tracked and that the kill of a property
  10033. // type is reflected in the current instruction.
  10034. this->ProcessKills(newInstr);
  10035. this->ValueNumberObjectType(newInstr->GetDst(), newInstr);
  10036. if (instr->GetSrc1() && instr->GetSrc1()->IsSymOpnd() && instr->GetSrc1()->AsSymOpnd()->IsPropertySymOpnd())
  10037. {
  10038. // Reprocess the load source. We need to reset the PropertySymOpnd fields first.
  10039. IR::PropertySymOpnd *propertySymOpnd = instr->GetSrc1()->AsPropertySymOpnd();
  10040. if (propertySymOpnd->IsTypeCheckSeqCandidate())
  10041. {
  10042. propertySymOpnd->SetTypeChecked(false);
  10043. propertySymOpnd->SetTypeAvailable(false);
  10044. propertySymOpnd->SetWriteGuardChecked(false);
  10045. }
  10046. this->FinishOptPropOp(instr, propertySymOpnd);
  10047. instr = this->SetTypeCheckBailOut(instr->GetSrc1(), instr, nullptr);
  10048. }
  10049. }
  10050. return instr;
  10051. }
  10052. IR::Instr *
  10053. GlobOpt::ToInt32(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, bool lossy)
  10054. {
  10055. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyInt32, IR::BailOutIntOnly, lossy);
  10056. }
  10057. IR::Instr *
  10058. GlobOpt::ToFloat64(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IR::BailOutKind bailOutKind)
  10059. {
  10060. return this->ToTypeSpecUse(instr, opnd, block, val, indir, TyFloat64, bailOutKind);
  10061. }
  10062. IR::Instr *
  10063. GlobOpt::ToTypeSpecUse(IR::Instr *instr, IR::Opnd *opnd, BasicBlock *block, Value *val, IR::IndirOpnd *indir, IRType toType, IR::BailOutKind bailOutKind, bool lossy, IR::Instr *insertBeforeInstr)
  10064. {
  10065. Assert(bailOutKind != IR::BailOutInvalid);
  10066. IR::Instr *newInstr;
  10067. if (!val && opnd->IsRegOpnd())
  10068. {
  10069. val = block->globOptData.FindValue(opnd->AsRegOpnd()->m_sym);
  10070. }
  10071. ValueInfo *valueInfo = val ? val->GetValueInfo() : nullptr;
  10072. bool needReplaceSrc = false;
  10073. bool updateBlockLastInstr = false;
  10074. if (instr)
  10075. {
  10076. needReplaceSrc = true;
  10077. if (!insertBeforeInstr)
  10078. {
  10079. insertBeforeInstr = instr;
  10080. }
  10081. }
  10082. else if (!insertBeforeInstr)
  10083. {
  10084. // Insert it at the end of the block
  10085. insertBeforeInstr = block->GetLastInstr();
  10086. if (insertBeforeInstr->IsBranchInstr() || insertBeforeInstr->m_opcode == Js::OpCode::BailTarget)
  10087. {
  10088. // Don't insert code between the branch and the preceding ByteCodeUses instrs...
  10089. while(insertBeforeInstr->m_prev->m_opcode == Js::OpCode::ByteCodeUses)
  10090. {
  10091. insertBeforeInstr = insertBeforeInstr->m_prev;
  10092. }
  10093. }
  10094. else
  10095. {
  10096. insertBeforeInstr = insertBeforeInstr->m_next;
  10097. updateBlockLastInstr = true;
  10098. }
  10099. }
  10100. // Int constant values will be propagated into the instruction. For ArgOut_A_InlineBuiltIn, there's no benefit from
  10101. // const-propping, so those are excluded.
  10102. if (opnd->IsRegOpnd() &&
  10103. !(
  10104. valueInfo &&
  10105. (valueInfo->HasIntConstantValue() || valueInfo->IsFloatConstant()) &&
  10106. (!instr || instr->m_opcode != Js::OpCode::ArgOut_A_InlineBuiltIn)
  10107. ))
  10108. {
  10109. IR::RegOpnd *regSrc = opnd->AsRegOpnd();
  10110. StackSym *varSym = regSrc->m_sym;
  10111. Js::OpCode opcode = Js::OpCode::FromVar;
  10112. if (varSym->IsTypeSpec() || !block->globOptData.liveVarSyms->Test(varSym->m_id))
  10113. {
  10114. // Conversion between int32 and float64
  10115. if (varSym->IsTypeSpec())
  10116. {
  10117. varSym = varSym->GetVarEquivSym(this->func);
  10118. }
  10119. opcode = Js::OpCode::Conv_Prim;
  10120. }
  10121. Assert(block->globOptData.liveVarSyms->Test(varSym->m_id) || block->globOptData.IsTypeSpecialized(varSym));
  10122. StackSym *typeSpecSym = nullptr;
  10123. BOOL isLive = FALSE;
  10124. BVSparse<JitArenaAllocator> *livenessBv = nullptr;
  10125. if(valueInfo && valueInfo->IsInt())
  10126. {
  10127. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  10128. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  10129. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  10130. // lossy state.
  10131. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10132. }
  10133. if (toType == TyInt32)
  10134. {
  10135. // Need to determine whether the conversion is actually lossy or lossless. If the value is an int, then it's a
  10136. // lossless conversion despite the type of conversion requested. The liveness of the converted int32 sym needs to be
  10137. // set to reflect the actual type of conversion done. Also, a lossless conversion needs the value to determine
  10138. // whether the conversion may need to bail out.
  10139. Assert(valueInfo);
  10140. if(valueInfo->IsInt())
  10141. {
  10142. lossy = false;
  10143. }
  10144. else
  10145. {
  10146. Assert(IsLoopPrePass() || !block->globOptData.IsInt32TypeSpecialized(varSym));
  10147. }
  10148. livenessBv = block->globOptData.liveInt32Syms;
  10149. isLive = livenessBv->Test(varSym->m_id) && (lossy || !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id));
  10150. if (this->IsLoopPrePass())
  10151. {
  10152. if(!isLive)
  10153. {
  10154. livenessBv->Set(varSym->m_id);
  10155. if(lossy)
  10156. {
  10157. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10158. }
  10159. else
  10160. {
  10161. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10162. }
  10163. }
  10164. if(!lossy)
  10165. {
  10166. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  10167. valueInfo = valueInfo->SpecializeToInt32(alloc);
  10168. ChangeValueInfo(nullptr, val, valueInfo);
  10169. if(needReplaceSrc)
  10170. {
  10171. opnd->SetValueType(valueInfo->Type());
  10172. }
  10173. }
  10174. return instr;
  10175. }
  10176. typeSpecSym = varSym->GetInt32EquivSym(this->func);
  10177. if (!isLive)
  10178. {
  10179. if (!opnd->IsVar() ||
  10180. !block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  10181. (block->globOptData.liveFloat64Syms->Test(varSym->m_id) && valueInfo && valueInfo->IsLikelyFloat()))
  10182. {
  10183. Assert(block->globOptData.liveFloat64Syms->Test(varSym->m_id));
  10184. if(!lossy && !valueInfo->IsInt())
  10185. {
  10186. // Shouldn't try to do a lossless conversion from float64 to int32 when the value is not known to be an
  10187. // int. There are cases where we need more than two passes over loops to flush out all dependencies.
  10188. // It's possible for the loop prepass to think that a sym s1 remains an int because it acquires the
  10189. // value of another sym s2 that is an int in the prepass at that time. However, s2 can become a float
  10190. // later in the loop body, in which case s1 would become a float on the second iteration of the loop. By
  10191. // that time, we would have already committed to having s1 live as a lossless int on entry into the
  10192. // loop, and we end up having to compensate by doing a lossless conversion from float to int, which will
  10193. // need a bailout and will most likely bail out.
  10194. //
  10195. // If s2 becomes a var instead of a float, then the compensation is legal although not ideal. After
  10196. // enough bailouts, rejit would be triggered with aggressive int type spec turned off. For the
  10197. // float-to-int conversion though, there's no point in emitting a bailout because we already know that
  10198. // the value is a float and has high probability of bailing out (whereas a var has a chance to be a
  10199. // tagged int), and so currently lossless conversion from float to int with bailout is not supported.
  10200. //
  10201. // So, treating this case as a compile-time bailout. The exception will trigger the jit work item to be
  10202. // restarted with aggressive int type specialization disabled.
  10203. if(bailOutKind == IR::BailOutExpectingInteger)
  10204. {
  10205. Assert(IsSwitchOptEnabled());
  10206. throw Js::RejitException(RejitReason::DisableSwitchOptExpectingInteger);
  10207. }
  10208. else
  10209. {
  10210. Assert(DoAggressiveIntTypeSpec());
  10211. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  10212. {
  10213. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  10214. Output::Print(
  10215. _u("BailOut (compile-time): function: %s (%s) varSym: "),
  10216. this->func->GetJITFunctionBody()->GetDisplayName(),
  10217. this->func->GetDebugNumberSet(debugStringBuffer),
  10218. varSym->m_id);
  10219. #if DBG_DUMP
  10220. varSym->Dump();
  10221. #else
  10222. Output::Print(_u("s%u"), varSym->m_id);
  10223. #endif
  10224. if(varSym->HasByteCodeRegSlot())
  10225. {
  10226. Output::Print(_u(" byteCodeReg: R%u"), varSym->GetByteCodeRegSlot());
  10227. }
  10228. Output::Print(_u(" (lossless conversion from float64 to int32)\n"));
  10229. Output::Flush();
  10230. }
  10231. if(!DoAggressiveIntTypeSpec())
  10232. {
  10233. // Aggressive int type specialization is already off for some reason. Prevent trying to rejit again
  10234. // because it won't help and the same thing will happen again. Just abort jitting this function.
  10235. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  10236. {
  10237. Output::Print(_u(" Aborting JIT because AggressiveIntTypeSpec is already off\n"));
  10238. Output::Flush();
  10239. }
  10240. throw Js::OperationAbortedException();
  10241. }
  10242. throw Js::RejitException(RejitReason::AggressiveIntTypeSpecDisabled);
  10243. }
  10244. }
  10245. if(opnd->IsVar())
  10246. {
  10247. regSrc->SetType(TyFloat64);
  10248. regSrc->m_sym = varSym->GetFloat64EquivSym(this->func);
  10249. opcode = Js::OpCode::Conv_Prim;
  10250. }
  10251. else
  10252. {
  10253. Assert(regSrc->IsFloat64());
  10254. Assert(regSrc->m_sym->IsFloat64());
  10255. Assert(opcode == Js::OpCode::Conv_Prim);
  10256. }
  10257. }
  10258. }
  10259. GOPT_TRACE_OPND(regSrc, _u("Converting to int32\n"));
  10260. }
  10261. else if (toType == TyFloat64)
  10262. {
  10263. // float64
  10264. typeSpecSym = varSym->GetFloat64EquivSym(this->func);
  10265. if(!IsLoopPrePass() && typeSpecSym->m_requiresBailOnNotNumber && block->globOptData.IsFloat64TypeSpecialized(varSym))
  10266. {
  10267. // This conversion is already protected by a BailOutNumberOnly bailout (or at least it will be after the
  10268. // dead-store phase). Since 'requiresBailOnNotNumber' is not flow-based, change the value to definitely float.
  10269. if(valueInfo)
  10270. {
  10271. if(!valueInfo->IsNumber())
  10272. {
  10273. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10274. ChangeValueInfo(block, val, valueInfo);
  10275. opnd->SetValueType(valueInfo->Type());
  10276. }
  10277. }
  10278. else
  10279. {
  10280. val = NewGenericValue(ValueType::Float);
  10281. valueInfo = val->GetValueInfo();
  10282. block->globOptData.SetValue(val, varSym);
  10283. opnd->SetValueType(valueInfo->Type());
  10284. }
  10285. }
  10286. if(bailOutKind == IR::BailOutNumberOnly)
  10287. {
  10288. if(!IsLoopPrePass())
  10289. {
  10290. // Ensure that all bailout FromVars that generate a value for this type-specialized sym will bail out on any
  10291. // non-number value, even ones that have already been generated before. The dead-store pass will update the
  10292. // bailout kind on already-generated FromVars based on this bit.
  10293. typeSpecSym->m_requiresBailOnNotNumber = true;
  10294. }
  10295. }
  10296. else if(typeSpecSym->m_requiresBailOnNotNumber)
  10297. {
  10298. Assert(bailOutKind == IR::BailOutPrimitiveButString);
  10299. bailOutKind = IR::BailOutNumberOnly;
  10300. }
  10301. livenessBv = block->globOptData.liveFloat64Syms;
  10302. isLive = livenessBv->Test(varSym->m_id);
  10303. if (this->IsLoopPrePass())
  10304. {
  10305. if(!isLive)
  10306. {
  10307. livenessBv->Set(varSym->m_id);
  10308. }
  10309. if (this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  10310. {
  10311. this->prePassLoop->forceFloat64SymsOnEntry->Set(varSym->m_id);
  10312. }
  10313. else
  10314. {
  10315. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  10316. if (symStore && symStore != varSym
  10317. && this->OptIsInvariant(symStore, block, this->prePassLoop, block->globOptData.FindValue(symStore), false, true))
  10318. {
  10319. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  10320. // outside the loop.
  10321. this->prePassLoop->forceFloat64SymsOnEntry->Set(symStore->m_id);
  10322. }
  10323. }
  10324. if(bailOutKind == IR::BailOutNumberOnly)
  10325. {
  10326. if(valueInfo)
  10327. {
  10328. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10329. ChangeValueInfo(block, val, valueInfo);
  10330. }
  10331. else
  10332. {
  10333. val = NewGenericValue(ValueType::Float);
  10334. valueInfo = val->GetValueInfo();
  10335. block->globOptData.SetValue(val, varSym);
  10336. }
  10337. if(needReplaceSrc)
  10338. {
  10339. opnd->SetValueType(valueInfo->Type());
  10340. }
  10341. }
  10342. return instr;
  10343. }
  10344. if (!isLive && regSrc->IsVar())
  10345. {
  10346. if (!block->globOptData.liveVarSyms->Test(varSym->m_id) ||
  10347. (
  10348. block->globOptData.liveInt32Syms->Test(varSym->m_id) &&
  10349. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  10350. valueInfo &&
  10351. valueInfo->IsLikelyInt()
  10352. ))
  10353. {
  10354. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  10355. Assert(!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)); // Shouldn't try to convert a lossy int32 to anything
  10356. regSrc->SetType(TyInt32);
  10357. regSrc->m_sym = varSym->GetInt32EquivSym(this->func);
  10358. opcode = Js::OpCode::Conv_Prim;
  10359. }
  10360. }
  10361. GOPT_TRACE_OPND(regSrc, _u("Converting to float64\n"));
  10362. }
  10363. #ifdef ENABLE_SIMDJS
  10364. else
  10365. {
  10366. // SIMD_JS
  10367. Assert(IRType_IsSimd128(toType));
  10368. // Get or create type-spec sym
  10369. typeSpecSym = varSym->GetSimd128EquivSym(toType, this->func);
  10370. if (!IsLoopPrePass() && block->globOptData.IsSimd128TypeSpecialized(toType, varSym))
  10371. {
  10372. // Consider: Is this needed ? Shouldn't this have been done at previous FromVar since the simd128 sym is alive ?
  10373. if (valueInfo)
  10374. {
  10375. if (!valueInfo->IsSimd128(toType))
  10376. {
  10377. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  10378. ChangeValueInfo(block, val, valueInfo);
  10379. opnd->SetValueType(valueInfo->Type());
  10380. }
  10381. }
  10382. else
  10383. {
  10384. val = NewGenericValue(GetValueTypeFromIRType(toType));
  10385. valueInfo = val->GetValueInfo();
  10386. block->globOptData.SetValue(val, varSym);
  10387. opnd->SetValueType(valueInfo->Type());
  10388. }
  10389. }
  10390. livenessBv = block->globOptData.GetSimd128LivenessBV(toType);
  10391. isLive = livenessBv->Test(varSym->m_id);
  10392. if (this->IsLoopPrePass())
  10393. {
  10394. // FromVar Hoisting
  10395. BVSparse<Memory::JitArenaAllocator> * forceSimd128SymsOnEntry;
  10396. forceSimd128SymsOnEntry = \
  10397. toType == TySimd128F4 ? this->prePassLoop->forceSimd128F4SymsOnEntry : this->prePassLoop->forceSimd128I4SymsOnEntry;
  10398. if (!isLive)
  10399. {
  10400. livenessBv->Set(varSym->m_id);
  10401. }
  10402. // Be aggressive with hoisting only if value is always initialized to SIMD type before entering loop.
  10403. // This reduces the chance that the FromVar gets executed while the specialized instruction in the loop is not. Leading to unnecessary excessive bailouts.
  10404. if (val && !val->GetValueInfo()->HasBeenUndefined() && !val->GetValueInfo()->HasBeenNull() &&
  10405. this->OptIsInvariant(opnd, block, this->prePassLoop, val, false, true))
  10406. {
  10407. forceSimd128SymsOnEntry->Set(varSym->m_id);
  10408. }
  10409. else
  10410. {
  10411. Sym *symStore = (valueInfo ? valueInfo->GetSymStore() : NULL);
  10412. Value * value = symStore ? block->globOptData.FindValue(symStore) : nullptr;
  10413. if (symStore && symStore != varSym
  10414. && value
  10415. && !value->GetValueInfo()->HasBeenUndefined() && !value->GetValueInfo()->HasBeenNull()
  10416. && this->OptIsInvariant(symStore, block, this->prePassLoop, value, true, true))
  10417. {
  10418. // If symStore is assigned to sym and we want sym to be type-specialized, for symStore to be specialized
  10419. // outside the loop.
  10420. forceSimd128SymsOnEntry->Set(symStore->m_id);
  10421. }
  10422. }
  10423. Assert(bailOutKind == IR::BailOutSimd128F4Only || bailOutKind == IR::BailOutSimd128I4Only);
  10424. // We are in loop prepass, we haven't propagated the value info to the src. Do it now.
  10425. if (valueInfo)
  10426. {
  10427. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  10428. ChangeValueInfo(block, val, valueInfo);
  10429. }
  10430. else
  10431. {
  10432. val = NewGenericValue(GetValueTypeFromIRType(toType));
  10433. valueInfo = val->GetValueInfo();
  10434. block->globOptData.SetValue(val, varSym);
  10435. }
  10436. if (needReplaceSrc)
  10437. {
  10438. opnd->SetValueType(valueInfo->Type());
  10439. }
  10440. return instr;
  10441. }
  10442. GOPT_TRACE_OPND(regSrc, _u("Converting to Simd128\n"));
  10443. }
  10444. #endif
  10445. bool needLoad = false;
  10446. if (needReplaceSrc)
  10447. {
  10448. bool wasDead = regSrc->GetIsDead();
  10449. // needReplaceSrc means we are type specializing a use, and need to replace the src on the instr
  10450. if (!isLive)
  10451. {
  10452. needLoad = true;
  10453. // ReplaceSrc will delete it.
  10454. regSrc = regSrc->Copy(instr->m_func)->AsRegOpnd();
  10455. }
  10456. IR::RegOpnd * regNew = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  10457. if(valueInfo)
  10458. {
  10459. regNew->SetValueType(valueInfo->Type());
  10460. regNew->m_wasNegativeZeroPreventedByBailout = valueInfo->WasNegativeZeroPreventedByBailout();
  10461. }
  10462. regNew->SetIsDead(wasDead);
  10463. regNew->SetIsJITOptimizedReg(true);
  10464. this->CaptureByteCodeSymUses(instr);
  10465. if (indir == nullptr)
  10466. {
  10467. instr->ReplaceSrc(opnd, regNew);
  10468. }
  10469. else
  10470. {
  10471. indir->ReplaceIndexOpnd(regNew);
  10472. }
  10473. opnd = regNew;
  10474. if (!needLoad)
  10475. {
  10476. Assert(isLive);
  10477. return instr;
  10478. }
  10479. }
  10480. else
  10481. {
  10482. // We just need to insert a load of a type spec sym
  10483. if(isLive)
  10484. {
  10485. return instr;
  10486. }
  10487. // Insert it before the specified instruction
  10488. instr = insertBeforeInstr;
  10489. }
  10490. IR::RegOpnd *regDst = IR::RegOpnd::New(typeSpecSym, toType, instr->m_func);
  10491. bool isBailout = false;
  10492. bool isHoisted = false;
  10493. bool isInLandingPad = (block->next && !block->next->isDeleted && block->next->isLoopHeader);
  10494. if (isInLandingPad)
  10495. {
  10496. Loop *loop = block->next->loop;
  10497. Assert(loop && loop->landingPad == block);
  10498. Assert(loop->bailOutInfo);
  10499. }
  10500. if (opcode == Js::OpCode::FromVar)
  10501. {
  10502. if (toType == TyInt32)
  10503. {
  10504. Assert(valueInfo);
  10505. if (lossy)
  10506. {
  10507. if (!valueInfo->IsPrimitive() && !block->globOptData.IsTypeSpecialized(varSym))
  10508. {
  10509. // Lossy conversions to int32 on non-primitive values may have implicit calls to toString or valueOf, which
  10510. // may be overridden to have a side effect. The side effect needs to happen every time the conversion is
  10511. // supposed to happen, so the resulting lossy int32 value cannot be reused. Bail out on implicit calls.
  10512. Assert(DoLossyIntTypeSpec());
  10513. bailOutKind = IR::BailOutOnNotPrimitive;
  10514. isBailout = true;
  10515. }
  10516. }
  10517. else if (!valueInfo->IsInt())
  10518. {
  10519. // The operand is likely an int (hence the request to convert to int), so bail out if it's not an int. Only
  10520. // bail out if a lossless conversion to int is requested. Lossy conversions to int such as in (a | 0) don't
  10521. // need to bail out.
  10522. if (bailOutKind == IR::BailOutExpectingInteger)
  10523. {
  10524. Assert(IsSwitchOptEnabled());
  10525. }
  10526. else
  10527. {
  10528. Assert(DoAggressiveIntTypeSpec());
  10529. }
  10530. isBailout = true;
  10531. }
  10532. }
  10533. else if (toType == TyFloat64 &&
  10534. (!valueInfo || !valueInfo->IsNumber()))
  10535. {
  10536. // Bailout if converting vars to float if we can't prove they are floats:
  10537. // x = str + float; -> need to bailout if str is a string
  10538. //
  10539. // x = obj * 0.1;
  10540. // y = obj * 0.2; -> if obj has valueof, we'll only call valueof once on the FromVar conversion...
  10541. Assert(bailOutKind != IR::BailOutInvalid);
  10542. isBailout = true;
  10543. }
  10544. #ifdef ENABLE_SIMDJS
  10545. else if (IRType_IsSimd128(toType) &&
  10546. (!valueInfo || !valueInfo->IsSimd128(toType)))
  10547. {
  10548. Assert(toType == TySimd128F4 && bailOutKind == IR::BailOutSimd128F4Only
  10549. || toType == TySimd128I4 && bailOutKind == IR::BailOutSimd128I4Only);
  10550. isBailout = true;
  10551. }
  10552. #endif
  10553. }
  10554. if (isBailout)
  10555. {
  10556. if (isInLandingPad)
  10557. {
  10558. Loop *loop = block->next->loop;
  10559. this->EnsureBailTarget(loop);
  10560. instr = loop->bailOutInfo->bailOutInstr;
  10561. updateBlockLastInstr = false;
  10562. newInstr = IR::BailOutInstr::New(opcode, bailOutKind, loop->bailOutInfo, instr->m_func);
  10563. newInstr->SetDst(regDst);
  10564. newInstr->SetSrc1(regSrc);
  10565. }
  10566. else
  10567. {
  10568. newInstr = IR::BailOutInstr::New(opcode, regDst, regSrc, bailOutKind, instr, instr->m_func);
  10569. }
  10570. }
  10571. else
  10572. {
  10573. newInstr = IR::Instr::New(opcode, regDst, regSrc, instr->m_func);
  10574. }
  10575. newInstr->SetByteCodeOffset(instr);
  10576. instr->InsertBefore(newInstr);
  10577. if (updateBlockLastInstr)
  10578. {
  10579. block->SetLastInstr(newInstr);
  10580. }
  10581. regDst->SetIsJITOptimizedReg(true);
  10582. newInstr->GetSrc1()->AsRegOpnd()->SetIsJITOptimizedReg(true);
  10583. ValueInfo *const oldValueInfo = valueInfo;
  10584. if(valueInfo)
  10585. {
  10586. newInstr->GetSrc1()->SetValueType(valueInfo->Type());
  10587. }
  10588. if(isBailout)
  10589. {
  10590. Assert(opcode == Js::OpCode::FromVar);
  10591. if(toType == TyInt32)
  10592. {
  10593. Assert(valueInfo);
  10594. if(!lossy)
  10595. {
  10596. Assert(bailOutKind == IR::BailOutIntOnly || bailOutKind == IR::BailOutExpectingInteger);
  10597. valueInfo = valueInfo->SpecializeToInt32(alloc, isPerformingLoopBackEdgeCompensation);
  10598. ChangeValueInfo(nullptr, val, valueInfo);
  10599. int32 intConstantValue;
  10600. if(indir && needReplaceSrc && valueInfo->TryGetIntConstantValue(&intConstantValue))
  10601. {
  10602. // A likely-int value can have constant bounds due to conditional branches narrowing its range. Now that
  10603. // the sym has been proven to be an int, the likely-int value, after specialization, will be constant.
  10604. // Replace the index opnd in the indir with an offset.
  10605. Assert(opnd == indir->GetIndexOpnd());
  10606. Assert(indir->GetScale() == 0);
  10607. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  10608. opnd = nullptr;
  10609. indir->SetOffset(intConstantValue);
  10610. }
  10611. }
  10612. }
  10613. else if (toType == TyFloat64)
  10614. {
  10615. if(bailOutKind == IR::BailOutNumberOnly)
  10616. {
  10617. if(valueInfo)
  10618. {
  10619. valueInfo = valueInfo->SpecializeToFloat64(alloc);
  10620. ChangeValueInfo(block, val, valueInfo);
  10621. }
  10622. else
  10623. {
  10624. val = NewGenericValue(ValueType::Float);
  10625. valueInfo = val->GetValueInfo();
  10626. block->globOptData.SetValue(val, varSym);
  10627. }
  10628. }
  10629. }
  10630. else
  10631. {
  10632. Assert(IRType_IsSimd128(toType));
  10633. if (valueInfo)
  10634. {
  10635. valueInfo = valueInfo->SpecializeToSimd128(toType, alloc);
  10636. ChangeValueInfo(block, val, valueInfo);
  10637. }
  10638. else
  10639. {
  10640. val = NewGenericValue(GetValueTypeFromIRType(toType));
  10641. valueInfo = val->GetValueInfo();
  10642. block->globOptData.SetValue(val, varSym);
  10643. }
  10644. }
  10645. }
  10646. if(valueInfo)
  10647. {
  10648. newInstr->GetDst()->SetValueType(valueInfo->Type());
  10649. if(needReplaceSrc && opnd)
  10650. {
  10651. opnd->SetValueType(valueInfo->Type());
  10652. }
  10653. }
  10654. if (block->loop)
  10655. {
  10656. Assert(!this->IsLoopPrePass());
  10657. isHoisted = this->TryHoistInvariant(newInstr, block, val, val, nullptr, false, lossy, false, bailOutKind);
  10658. }
  10659. if (isBailout)
  10660. {
  10661. if (!isHoisted && !isInLandingPad)
  10662. {
  10663. if(valueInfo)
  10664. {
  10665. // Since this is a pre-op bailout, the old value info should be used for the purposes of bailout. For
  10666. // instance, the value info could be LikelyInt but with a constant range. Once specialized to int, the value
  10667. // info would be an int constant. However, the int constant is only guaranteed if the value is actually an
  10668. // int, which this conversion is verifying, so bailout cannot assume the constant value.
  10669. if(oldValueInfo)
  10670. {
  10671. val->SetValueInfo(oldValueInfo);
  10672. }
  10673. else
  10674. {
  10675. block->globOptData.ClearSymValue(varSym);
  10676. }
  10677. }
  10678. // Fill in bail out info if the FromVar is a bailout instr, and it wasn't hoisted as invariant.
  10679. // If it was hoisted, the invariant code will fill out the bailout info with the loop landing pad bailout info.
  10680. this->FillBailOutInfo(block, newInstr->GetBailOutInfo());
  10681. if(valueInfo)
  10682. {
  10683. // Restore the new value info after filling the bailout info
  10684. if(oldValueInfo)
  10685. {
  10686. val->SetValueInfo(valueInfo);
  10687. }
  10688. else
  10689. {
  10690. block->globOptData.SetValue(val, varSym);
  10691. }
  10692. }
  10693. }
  10694. }
  10695. // Now that we've captured the liveness in the bailout info, we can mark this as live.
  10696. // This type specialized sym isn't live if the FromVar bails out.
  10697. livenessBv->Set(varSym->m_id);
  10698. if(toType == TyInt32)
  10699. {
  10700. if(lossy)
  10701. {
  10702. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10703. }
  10704. else
  10705. {
  10706. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10707. }
  10708. }
  10709. }
  10710. else
  10711. {
  10712. Assert(valueInfo);
  10713. if(opnd->IsRegOpnd() && valueInfo->IsInt())
  10714. {
  10715. // If two syms have the same value, one is lossy-int-specialized, and then the other is int-specialized, the value
  10716. // would have been updated to definitely int. Upon using the lossy-int-specialized sym later, it would be flagged as
  10717. // lossy while the value is definitely int. Since the bit-vectors are based on the sym and not the value, update the
  10718. // lossy state.
  10719. block->globOptData.liveLossyInt32Syms->Clear(opnd->AsRegOpnd()->m_sym->m_id);
  10720. if(toType == TyInt32)
  10721. {
  10722. lossy = false;
  10723. }
  10724. }
  10725. if (this->IsLoopPrePass())
  10726. {
  10727. if(opnd->IsRegOpnd())
  10728. {
  10729. StackSym *const sym = opnd->AsRegOpnd()->m_sym;
  10730. if(toType == TyInt32)
  10731. {
  10732. Assert(!sym->IsTypeSpec());
  10733. block->globOptData.liveInt32Syms->Set(sym->m_id);
  10734. if(lossy)
  10735. {
  10736. block->globOptData.liveLossyInt32Syms->Set(sym->m_id);
  10737. }
  10738. else
  10739. {
  10740. block->globOptData.liveLossyInt32Syms->Clear(sym->m_id);
  10741. }
  10742. }
  10743. else
  10744. {
  10745. Assert(toType == TyFloat64);
  10746. AnalysisAssert(instr);
  10747. StackSym *const varSym = sym->IsTypeSpec() ? sym->GetVarEquivSym(instr->m_func) : sym;
  10748. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  10749. }
  10750. }
  10751. return instr;
  10752. }
  10753. if (!needReplaceSrc)
  10754. {
  10755. instr = insertBeforeInstr;
  10756. }
  10757. IR::Opnd *constOpnd;
  10758. int32 intConstantValue;
  10759. if(valueInfo->TryGetIntConstantValue(&intConstantValue))
  10760. {
  10761. if(toType == TyInt32)
  10762. {
  10763. constOpnd = IR::IntConstOpnd::New(intConstantValue, TyInt32, instr->m_func);
  10764. }
  10765. else
  10766. {
  10767. Assert(toType == TyFloat64);
  10768. constOpnd = IR::FloatConstOpnd::New(static_cast<FloatConstType>(intConstantValue), TyFloat64, instr->m_func);
  10769. }
  10770. }
  10771. else if(valueInfo->IsFloatConstant())
  10772. {
  10773. const FloatConstType floatValue = valueInfo->AsFloatConstant()->FloatValue();
  10774. if(toType == TyInt32)
  10775. {
  10776. Assert(lossy);
  10777. constOpnd =
  10778. IR::IntConstOpnd::New(
  10779. Js::JavascriptMath::ToInt32(floatValue),
  10780. TyInt32,
  10781. instr->m_func);
  10782. }
  10783. else
  10784. {
  10785. Assert(toType == TyFloat64);
  10786. constOpnd = IR::FloatConstOpnd::New(floatValue, TyFloat64, instr->m_func);
  10787. }
  10788. }
  10789. else
  10790. {
  10791. Assert(opnd->IsVar());
  10792. Assert(opnd->IsAddrOpnd());
  10793. AssertMsg(opnd->AsAddrOpnd()->IsVar(), "We only expect to see addr that are var before lower.");
  10794. // Don't need to capture uses, we are only replacing an addr opnd
  10795. if(toType == TyInt32)
  10796. {
  10797. constOpnd = IR::IntConstOpnd::New(Js::TaggedInt::ToInt32(opnd->AsAddrOpnd()->m_address), TyInt32, instr->m_func);
  10798. }
  10799. else
  10800. {
  10801. Assert(toType == TyFloat64);
  10802. constOpnd = IR::FloatConstOpnd::New(Js::TaggedInt::ToDouble(opnd->AsAddrOpnd()->m_address), TyFloat64, instr->m_func);
  10803. }
  10804. }
  10805. if (toType == TyInt32)
  10806. {
  10807. if (needReplaceSrc)
  10808. {
  10809. CaptureByteCodeSymUses(instr);
  10810. if(indir)
  10811. {
  10812. Assert(opnd == indir->GetIndexOpnd());
  10813. Assert(indir->GetScale() == 0);
  10814. indir->UnlinkIndexOpnd()->Free(instr->m_func);
  10815. indir->SetOffset(constOpnd->AsIntConstOpnd()->AsInt32());
  10816. }
  10817. else
  10818. {
  10819. instr->ReplaceSrc(opnd, constOpnd);
  10820. }
  10821. }
  10822. else
  10823. {
  10824. StackSym *varSym = opnd->AsRegOpnd()->m_sym;
  10825. if(varSym->IsTypeSpec())
  10826. {
  10827. varSym = varSym->GetVarEquivSym(nullptr);
  10828. Assert(varSym);
  10829. }
  10830. if(block->globOptData.liveInt32Syms->TestAndSet(varSym->m_id))
  10831. {
  10832. Assert(!!block->globOptData.liveLossyInt32Syms->Test(varSym->m_id) == lossy);
  10833. }
  10834. else
  10835. {
  10836. if(lossy)
  10837. {
  10838. block->globOptData.liveLossyInt32Syms->Set(varSym->m_id);
  10839. }
  10840. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  10841. IR::RegOpnd *int32Reg = IR::RegOpnd::New(int32Sym, TyInt32, instr->m_func);
  10842. int32Reg->SetIsJITOptimizedReg(true);
  10843. newInstr = IR::Instr::New(Js::OpCode::Ld_I4, int32Reg, constOpnd, instr->m_func);
  10844. newInstr->SetByteCodeOffset(instr);
  10845. instr->InsertBefore(newInstr);
  10846. if (updateBlockLastInstr)
  10847. {
  10848. block->SetLastInstr(newInstr);
  10849. }
  10850. }
  10851. }
  10852. }
  10853. else
  10854. {
  10855. StackSym *floatSym;
  10856. bool newFloatSym = false;
  10857. StackSym* varSym;
  10858. if (opnd->IsRegOpnd())
  10859. {
  10860. varSym = opnd->AsRegOpnd()->m_sym;
  10861. if (varSym->IsTypeSpec())
  10862. {
  10863. varSym = varSym->GetVarEquivSym(nullptr);
  10864. Assert(varSym);
  10865. }
  10866. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  10867. }
  10868. else
  10869. {
  10870. varSym = block->globOptData.GetCopyPropSym(nullptr, val);
  10871. if(!varSym)
  10872. {
  10873. // Clear the symstore to ensure it's set below to this new symbol
  10874. this->SetSymStoreDirect(val->GetValueInfo(), nullptr);
  10875. varSym = StackSym::New(TyVar, instr->m_func);
  10876. newFloatSym = true;
  10877. }
  10878. floatSym = varSym->GetFloat64EquivSym(instr->m_func);
  10879. }
  10880. IR::RegOpnd *floatReg = IR::RegOpnd::New(floatSym, TyFloat64, instr->m_func);
  10881. floatReg->SetIsJITOptimizedReg(true);
  10882. // If the value is not live - let's load it.
  10883. if(!block->globOptData.liveFloat64Syms->TestAndSet(varSym->m_id))
  10884. {
  10885. newInstr = IR::Instr::New(Js::OpCode::LdC_F8_R8, floatReg, constOpnd, instr->m_func);
  10886. newInstr->SetByteCodeOffset(instr);
  10887. instr->InsertBefore(newInstr);
  10888. if (updateBlockLastInstr)
  10889. {
  10890. block->SetLastInstr(newInstr);
  10891. }
  10892. if(newFloatSym)
  10893. {
  10894. block->globOptData.SetValue(val, varSym);
  10895. }
  10896. // Src is always invariant, but check if the dst is, and then hoist.
  10897. if (block->loop &&
  10898. (
  10899. (newFloatSym && block->loop->CanHoistInvariants()) ||
  10900. this->OptIsInvariant(floatReg, block, block->loop, val, false, false)
  10901. ))
  10902. {
  10903. Assert(!this->IsLoopPrePass());
  10904. this->OptHoistInvariant(newInstr, block, block->loop, val, val, nullptr, false);
  10905. }
  10906. }
  10907. if (needReplaceSrc)
  10908. {
  10909. CaptureByteCodeSymUses(instr);
  10910. instr->ReplaceSrc(opnd, floatReg);
  10911. }
  10912. }
  10913. return instr;
  10914. }
  10915. return newInstr;
  10916. }
  10917. void
  10918. GlobOpt::ToVarRegOpnd(IR::RegOpnd *dst, BasicBlock *block)
  10919. {
  10920. ToVarStackSym(dst->m_sym, block);
  10921. }
  10922. void
  10923. GlobOpt::ToVarStackSym(StackSym *varSym, BasicBlock *block)
  10924. {
  10925. //added another check for sym , in case of asmjs there is mostly no var syms and hence added a new check to see if it is the primary sym
  10926. Assert(!varSym->IsTypeSpec());
  10927. block->globOptData.liveVarSyms->Set(varSym->m_id);
  10928. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  10929. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10930. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10931. #ifdef ENABLE_SIMDJS
  10932. // SIMD_JS
  10933. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  10934. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  10935. #endif
  10936. }
  10937. void
  10938. GlobOpt::ToInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10939. {
  10940. StackSym *varSym = dst->m_sym;
  10941. Assert(!varSym->IsTypeSpec());
  10942. if (!this->IsLoopPrePass() && varSym->IsVar())
  10943. {
  10944. StackSym *int32Sym = varSym->GetInt32EquivSym(instr->m_func);
  10945. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  10946. // since we'll just be hammering the symbol.
  10947. dst = instr->UnlinkDst()->AsRegOpnd();
  10948. dst->m_sym = int32Sym;
  10949. dst->SetType(TyInt32);
  10950. instr->SetDst(dst);
  10951. }
  10952. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  10953. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  10954. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10955. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10956. #ifdef ENABLE_SIMDJS
  10957. // SIMD_JS
  10958. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  10959. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  10960. #endif
  10961. }
  10962. void
  10963. GlobOpt::ToUInt32Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10964. {
  10965. // We should be calling only for asmjs function
  10966. Assert(GetIsAsmJSFunc());
  10967. StackSym *varSym = dst->m_sym;
  10968. Assert(!varSym->IsTypeSpec());
  10969. block->globOptData.liveInt32Syms->Set(varSym->m_id);
  10970. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id); // The store makes it lossless
  10971. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10972. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  10973. #ifdef ENABLE_SIMDJS
  10974. // SIMD_JS
  10975. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  10976. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  10977. #endif
  10978. }
  10979. void
  10980. GlobOpt::ToFloat64Dst(IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  10981. {
  10982. StackSym *varSym = dst->m_sym;
  10983. Assert(!varSym->IsTypeSpec());
  10984. if (!this->IsLoopPrePass() && varSym->IsVar())
  10985. {
  10986. StackSym *float64Sym = varSym->GetFloat64EquivSym(this->func);
  10987. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  10988. // since we'll just be hammering the symbol.
  10989. dst = instr->UnlinkDst()->AsRegOpnd();
  10990. dst->m_sym = float64Sym;
  10991. dst->SetType(TyFloat64);
  10992. instr->SetDst(dst);
  10993. }
  10994. block->globOptData.liveFloat64Syms->Set(varSym->m_id);
  10995. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  10996. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  10997. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  10998. #ifdef ENABLE_SIMDJS
  10999. // SIMD_JS
  11000. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  11001. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  11002. #endif
  11003. }
  11004. #ifdef ENABLE_SIMDJS
  11005. // SIMD_JS
  11006. void
  11007. GlobOpt::ToSimd128Dst(IRType toType, IR::Instr *instr, IR::RegOpnd *dst, BasicBlock *block)
  11008. {
  11009. StackSym *varSym = dst->m_sym;
  11010. Assert(!varSym->IsTypeSpec());
  11011. BVSparse<JitArenaAllocator> * livenessBV = block->globOptData.GetSimd128LivenessBV(toType);
  11012. Assert(livenessBV);
  11013. if (!this->IsLoopPrePass() && varSym->IsVar())
  11014. {
  11015. StackSym *simd128Sym = varSym->GetSimd128EquivSym(toType, this->func);
  11016. // Use UnlinkDst / SetDst to make sure isSingleDef is tracked properly,
  11017. // since we'll just be hammering the symbol.
  11018. dst = instr->UnlinkDst()->AsRegOpnd();
  11019. dst->m_sym = simd128Sym;
  11020. dst->SetType(toType);
  11021. instr->SetDst(dst);
  11022. }
  11023. block->globOptData.liveFloat64Syms->Clear(varSym->m_id);
  11024. block->globOptData.liveVarSyms->Clear(varSym->m_id);
  11025. block->globOptData.liveInt32Syms->Clear(varSym->m_id);
  11026. block->globOptData.liveLossyInt32Syms->Clear(varSym->m_id);
  11027. // SIMD_JS
  11028. block->globOptData.liveSimd128F4Syms->Clear(varSym->m_id);
  11029. block->globOptData.liveSimd128I4Syms->Clear(varSym->m_id);
  11030. livenessBV->Set(varSym->m_id);
  11031. }
  11032. #endif
  11033. static void SetIsConstFlag(StackSym* dstSym, int64 value)
  11034. {
  11035. Assert(dstSym);
  11036. dstSym->SetIsInt64Const();
  11037. }
  11038. static void SetIsConstFlag(StackSym* dstSym, int value)
  11039. {
  11040. Assert(dstSym);
  11041. dstSym->SetIsIntConst(value);
  11042. }
  11043. static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int64 value)
  11044. {
  11045. return (IR::Opnd*)IR::Int64ConstOpnd::New(value, instr->GetDst()->GetType(), instr->m_func);
  11046. }
  11047. static IR::Opnd* CreateIntConstOpnd(IR::Instr* instr, int value)
  11048. {
  11049. IntConstType constVal;
  11050. if (instr->GetDst()->IsUnsigned())
  11051. {
  11052. // we should zero extend in case of uint
  11053. constVal = (uint32)value;
  11054. }
  11055. else
  11056. {
  11057. constVal = value;
  11058. }
  11059. return (IR::Opnd*)IR::IntConstOpnd::New(constVal, instr->GetDst()->GetType(), instr->m_func);
  11060. }
  11061. template <typename T>
  11062. IR::Opnd* GlobOpt::ReplaceWConst(IR::Instr **pInstr, T value, Value **pDstVal)
  11063. {
  11064. IR::Instr * &instr = *pInstr;
  11065. IR::Opnd * constOpnd = CreateIntConstOpnd(instr, value);
  11066. instr->ReplaceSrc1(constOpnd);
  11067. instr->FreeSrc2();
  11068. this->OptSrc(constOpnd, &instr);
  11069. IR::Opnd *dst = instr->GetDst();
  11070. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  11071. if (dstSym->IsSingleDef())
  11072. {
  11073. SetIsConstFlag(dstSym, value);
  11074. }
  11075. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  11076. *pDstVal = GetIntConstantValue(value, instr, dst);
  11077. return dst;
  11078. }
  11079. template <typename T>
  11080. bool GlobOpt::OptConstFoldBinaryWasm(
  11081. IR::Instr** pInstr,
  11082. const Value* src1,
  11083. const Value* src2,
  11084. Value **pDstVal)
  11085. {
  11086. IR::Instr* &instr = *pInstr;
  11087. if (!DoConstFold())
  11088. {
  11089. return false;
  11090. }
  11091. T src1IntConstantValue, src2IntConstantValue;
  11092. if (!src1 || !src1->GetValueInfo()->TryGetIntConstantValue(&src1IntConstantValue, false) || //a bit sketchy: false for int32 means likelyInt = false
  11093. !src2 || !src2->GetValueInfo()->TryGetIntConstantValue(&src2IntConstantValue, false) //and unsigned = false for int64
  11094. )
  11095. {
  11096. return false;
  11097. }
  11098. int64 tmpValueOut;
  11099. if (!instr->BinaryCalculatorT<T>(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, func->GetJITFunctionBody()->IsWasmFunction()))
  11100. {
  11101. return false;
  11102. }
  11103. this->CaptureByteCodeSymUses(instr);
  11104. IR::Opnd *dst = (instr->GetDst()->IsInt64()) ? //dst can be int32 for int64 comparison operators
  11105. ReplaceWConst(pInstr, tmpValueOut, pDstVal) :
  11106. ReplaceWConst(pInstr, (int)tmpValueOut, pDstVal);
  11107. instr->m_opcode = Js::OpCode::Ld_I4;
  11108. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11109. return true;
  11110. }
  11111. bool
  11112. GlobOpt::OptConstFoldBinary(
  11113. IR::Instr * *pInstr,
  11114. const IntConstantBounds &src1IntConstantBounds,
  11115. const IntConstantBounds &src2IntConstantBounds,
  11116. Value **pDstVal)
  11117. {
  11118. IR::Instr * &instr = *pInstr;
  11119. int32 value;
  11120. IR::IntConstOpnd *constOpnd;
  11121. if (!DoConstFold())
  11122. {
  11123. return false;
  11124. }
  11125. int32 src1IntConstantValue = -1;
  11126. int32 src2IntConstantValue = -1;
  11127. int32 src1MaxIntConstantValue = -1;
  11128. int32 src2MaxIntConstantValue = -1;
  11129. int32 src1MinIntConstantValue = -1;
  11130. int32 src2MinIntConstantValue = -1;
  11131. if (instr->IsBranchInstr())
  11132. {
  11133. src1MinIntConstantValue = src1IntConstantBounds.LowerBound();
  11134. src1MaxIntConstantValue = src1IntConstantBounds.UpperBound();
  11135. src2MinIntConstantValue = src2IntConstantBounds.LowerBound();
  11136. src2MaxIntConstantValue = src2IntConstantBounds.UpperBound();
  11137. }
  11138. else if (src1IntConstantBounds.IsConstant() && src2IntConstantBounds.IsConstant())
  11139. {
  11140. src1IntConstantValue = src1IntConstantBounds.LowerBound();
  11141. src2IntConstantValue = src2IntConstantBounds.LowerBound();
  11142. }
  11143. else
  11144. {
  11145. return false;
  11146. }
  11147. IntConstType tmpValueOut;
  11148. if (!instr->BinaryCalculator(src1IntConstantValue, src2IntConstantValue, &tmpValueOut, TyInt32)
  11149. || !Math::FitsInDWord(tmpValueOut))
  11150. {
  11151. return false;
  11152. }
  11153. value = (int32)tmpValueOut;
  11154. this->CaptureByteCodeSymUses(instr);
  11155. constOpnd = IR::IntConstOpnd::New(value, TyInt32, instr->m_func);
  11156. instr->ReplaceSrc1(constOpnd);
  11157. instr->FreeSrc2();
  11158. this->OptSrc(constOpnd, &instr);
  11159. IR::Opnd *dst = instr->GetDst();
  11160. Assert(dst->IsRegOpnd());
  11161. StackSym *dstSym = dst->AsRegOpnd()->m_sym;
  11162. if (dstSym->IsSingleDef())
  11163. {
  11164. dstSym->SetIsIntConst(value);
  11165. }
  11166. GOPT_TRACE_INSTR(instr, _u("Constant folding to %d: \n"), value);
  11167. *pDstVal = GetIntConstantValue(value, instr, dst);
  11168. if (IsTypeSpecPhaseOff(this->func))
  11169. {
  11170. instr->m_opcode = Js::OpCode::LdC_A_I4;
  11171. this->ToVarRegOpnd(dst->AsRegOpnd(), this->currentBlock);
  11172. }
  11173. else
  11174. {
  11175. instr->m_opcode = Js::OpCode::Ld_I4;
  11176. this->ToInt32Dst(instr, dst->AsRegOpnd(), this->currentBlock);
  11177. }
  11178. return true;
  11179. }
  11180. void
  11181. GlobOpt::OptConstFoldBr(bool test, IR::Instr *instr, Value * src1Val, Value * src2Val)
  11182. {
  11183. GOPT_TRACE_INSTR(instr, _u("Constant folding to branch: "));
  11184. BasicBlock *deadBlock;
  11185. if (src1Val)
  11186. {
  11187. this->ToInt32(instr, instr->GetSrc1(), this->currentBlock, src1Val, nullptr, false);
  11188. }
  11189. if (src2Val)
  11190. {
  11191. this->ToInt32(instr, instr->GetSrc2(), this->currentBlock, src2Val, nullptr, false);
  11192. }
  11193. this->CaptureByteCodeSymUses(instr);
  11194. if (test)
  11195. {
  11196. instr->m_opcode = Js::OpCode::Br;
  11197. instr->FreeSrc1();
  11198. if(instr->GetSrc2())
  11199. {
  11200. instr->FreeSrc2();
  11201. }
  11202. deadBlock = instr->m_next->AsLabelInstr()->GetBasicBlock();
  11203. }
  11204. else
  11205. {
  11206. AssertMsg(instr->m_next->IsLabelInstr(), "Next instr of branch should be a label...");
  11207. if(instr->AsBranchInstr()->IsMultiBranch())
  11208. {
  11209. return;
  11210. }
  11211. deadBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  11212. instr->FreeSrc1();
  11213. if(instr->GetSrc2())
  11214. {
  11215. instr->FreeSrc2();
  11216. }
  11217. instr->m_opcode = Js::OpCode::Nop;
  11218. }
  11219. // Loop back edge: we would have already decremented data use count for the tail block when we processed the loop header.
  11220. if (!(this->currentBlock->loop && this->currentBlock->loop->GetHeadBlock() == deadBlock))
  11221. {
  11222. this->currentBlock->DecrementDataUseCount();
  11223. }
  11224. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  11225. if (deadBlock->GetPredList()->Count() == 0)
  11226. {
  11227. deadBlock->SetDataUseCount(0);
  11228. }
  11229. }
  11230. void
  11231. GlobOpt::ChangeValueType(
  11232. BasicBlock *const block,
  11233. Value *const value,
  11234. const ValueType newValueType,
  11235. const bool preserveSubclassInfo,
  11236. const bool allowIncompatibleType) const
  11237. {
  11238. Assert(value);
  11239. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  11240. Assert(!value->GetValueInfo()->IsJsType());
  11241. ValueInfo *const valueInfo = value->GetValueInfo();
  11242. const ValueType valueType(valueInfo->Type());
  11243. if(valueType == newValueType && (preserveSubclassInfo || valueInfo->IsGeneric()))
  11244. {
  11245. return;
  11246. }
  11247. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  11248. Assert(
  11249. !preserveSubclassInfo ||
  11250. !valueInfo->IsArrayValueInfo() ||
  11251. newValueType.IsObject() && newValueType.GetObjectType() == valueInfo->GetObjectType());
  11252. Assert(!valueInfo->GetSymStore() || !valueInfo->GetSymStore()->IsStackSym() || !valueInfo->GetSymStore()->AsStackSym()->IsFromByteCodeConstantTable());
  11253. ValueInfo *const newValueInfo =
  11254. preserveSubclassInfo
  11255. ? valueInfo->Copy(alloc)
  11256. : valueInfo->CopyWithGenericStructureKind(alloc);
  11257. newValueInfo->Type() = newValueType;
  11258. ChangeValueInfo(block, value, newValueInfo, allowIncompatibleType);
  11259. }
  11260. void
  11261. GlobOpt::ChangeValueInfo(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool allowIncompatibleType, const bool compensated) const
  11262. {
  11263. Assert(value);
  11264. Assert(newValueInfo);
  11265. // The value type must be changed to something more specific or something more generic. For instance, it would be changed to
  11266. // something more specific if the current value type is LikelyArray and checks have been done to ensure that it's an array,
  11267. // and it would be changed to something more generic if a call kills the Array value type and it must be treated as
  11268. // LikelyArray going forward.
  11269. // There are cases where we change the type because of different profile information, and because of rejit, these profile information
  11270. // may conflict. Need to allow incompatible type in those cause. However, the old type should be indefinite.
  11271. Assert((allowIncompatibleType && !value->GetValueInfo()->IsDefinite()) ||
  11272. AreValueInfosCompatible(newValueInfo, value->GetValueInfo()));
  11273. // ArrayValueInfo has information specific to the array type, so make sure that doesn't change
  11274. Assert(
  11275. !value->GetValueInfo()->IsArrayValueInfo() ||
  11276. !newValueInfo->IsArrayValueInfo() ||
  11277. newValueInfo->GetObjectType() == value->GetValueInfo()->GetObjectType());
  11278. if(block)
  11279. {
  11280. TrackValueInfoChangeForKills(block, value, newValueInfo, compensated);
  11281. }
  11282. value->SetValueInfo(newValueInfo);
  11283. }
  11284. bool
  11285. GlobOpt::AreValueInfosCompatible(const ValueInfo *const v0, const ValueInfo *const v1) const
  11286. {
  11287. Assert(v0);
  11288. Assert(v1);
  11289. if(v0->IsUninitialized() || v1->IsUninitialized())
  11290. {
  11291. return true;
  11292. }
  11293. const bool doAggressiveIntTypeSpec = DoAggressiveIntTypeSpec();
  11294. if(doAggressiveIntTypeSpec && (v0->IsInt() || v1->IsInt()))
  11295. {
  11296. // Int specialization in some uncommon loop cases involving dependencies, needs to allow specializing values of
  11297. // arbitrary types, even values that are definitely not int, to compensate for aggressive assumptions made by a loop
  11298. // prepass
  11299. return true;
  11300. }
  11301. if ((v0->Type()).IsMixedTypedArrayPair(v1->Type()) || (v1->Type()).IsMixedTypedArrayPair(v0->Type()))
  11302. {
  11303. return true;
  11304. }
  11305. const bool doFloatTypeSpec = DoFloatTypeSpec();
  11306. if(doFloatTypeSpec && (v0->IsFloat() || v1->IsFloat()))
  11307. {
  11308. // Float specialization allows specializing values of arbitrary types, even values that are definitely not float
  11309. return true;
  11310. }
  11311. #ifdef ENABLE_SIMDJS
  11312. // SIMD_JS
  11313. if (SIMD128_TYPE_SPEC_FLAG && v0->Type().IsSimd128())
  11314. {
  11315. // We only type-spec Undefined values, Objects (possibly merged SIMD values), or actual SIMD values.
  11316. if (v1->Type().IsLikelyUndefined() || v1->Type().IsLikelyNull())
  11317. {
  11318. return true;
  11319. }
  11320. if (v1->Type().IsLikelyObject() && v1->Type().GetObjectType() == ObjectType::Object)
  11321. {
  11322. return true;
  11323. }
  11324. if (v1->Type().IsSimd128())
  11325. {
  11326. return v0->Type().GetObjectType() == v1->Type().GetObjectType();
  11327. }
  11328. }
  11329. #endif
  11330. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  11331. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  11332. const auto AreValueTypesCompatible = [=](const ValueType t0, const ValueType t1)
  11333. {
  11334. return
  11335. t0.IsSubsetOf(t1, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec) ||
  11336. t1.IsSubsetOf(t0, doAggressiveIntTypeSpec, doFloatTypeSpec, doArrayMissingValueCheckHoist, doNativeArrayTypeSpec);
  11337. };
  11338. const ValueType t0(v0->Type().ToDefinite()), t1(v1->Type().ToDefinite());
  11339. if(t0.IsLikelyObject() && t1.IsLikelyObject())
  11340. {
  11341. // Check compatibility for the primitive portions and the object portions of the value types separately
  11342. if(AreValueTypesCompatible(t0.ToDefiniteObject(), t1.ToDefiniteObject()) &&
  11343. (
  11344. !t0.HasBeenPrimitive() ||
  11345. !t1.HasBeenPrimitive() ||
  11346. AreValueTypesCompatible(t0.ToDefinitePrimitiveSubset(), t1.ToDefinitePrimitiveSubset())
  11347. ))
  11348. {
  11349. return true;
  11350. }
  11351. }
  11352. else if(AreValueTypesCompatible(t0, t1))
  11353. {
  11354. return true;
  11355. }
  11356. const FloatConstantValueInfo *floatConstantValueInfo;
  11357. const ValueInfo *likelyIntValueinfo;
  11358. if(v0->IsFloatConstant() && v1->IsLikelyInt())
  11359. {
  11360. floatConstantValueInfo = v0->AsFloatConstant();
  11361. likelyIntValueinfo = v1;
  11362. }
  11363. else if(v0->IsLikelyInt() && v1->IsFloatConstant())
  11364. {
  11365. floatConstantValueInfo = v1->AsFloatConstant();
  11366. likelyIntValueinfo = v0;
  11367. }
  11368. else
  11369. {
  11370. return false;
  11371. }
  11372. // A float constant value with a value that is actually an int is a subset of a likely-int value.
  11373. // Ideally, we should create an int constant value for this up front, such that IsInt() also returns true. There
  11374. // were other issues with that, should see if that can be done.
  11375. int32 int32Value;
  11376. return
  11377. Js::JavascriptNumber::TryGetInt32Value(floatConstantValueInfo->FloatValue(), &int32Value) &&
  11378. (!likelyIntValueinfo->IsLikelyTaggedInt() || !Js::TaggedInt::IsOverflow(int32Value));
  11379. }
  11380. #if DBG
  11381. void
  11382. GlobOpt::VerifyArrayValueInfoForTracking(
  11383. const ValueInfo *const valueInfo,
  11384. const bool isJsArray,
  11385. const BasicBlock *const block,
  11386. const bool ignoreKnownImplicitCalls) const
  11387. {
  11388. Assert(valueInfo);
  11389. Assert(valueInfo->IsAnyOptimizedArray());
  11390. Assert(isJsArray == valueInfo->IsArrayOrObjectWithArray());
  11391. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11392. Assert(block);
  11393. Loop *implicitCallsLoop;
  11394. if(block->next && !block->next->isDeleted && block->next->isLoopHeader)
  11395. {
  11396. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  11397. // landing pad based on the loop for which this block is the landing pad.
  11398. implicitCallsLoop = block->next->loop;
  11399. Assert(implicitCallsLoop);
  11400. Assert(implicitCallsLoop->landingPad == block);
  11401. }
  11402. else
  11403. {
  11404. implicitCallsLoop = block->loop;
  11405. }
  11406. Assert(
  11407. !isJsArray ||
  11408. DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop) ||
  11409. (
  11410. ignoreKnownImplicitCalls &&
  11411. !(implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  11412. ));
  11413. Assert(!(isJsArray && valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist()));
  11414. Assert(
  11415. !(
  11416. valueInfo->IsArrayValueInfo() &&
  11417. (
  11418. valueInfo->AsArrayValueInfo()->HeadSegmentSym() ||
  11419. valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11420. ) &&
  11421. !DoArraySegmentHoist(valueInfo->Type())
  11422. ));
  11423. #if 0
  11424. // We can't assert here that there is only a head segment length sym if hoisting is allowed in the current block,
  11425. // because we may have propagated the sym forward out of a loop, and hoisting may be allowed inside but not
  11426. // outside the loop.
  11427. Assert(
  11428. isJsArray ||
  11429. !valueInfo->IsArrayValueInfo() ||
  11430. !valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym() ||
  11431. DoTypedArraySegmentLengthHoist(implicitCallsLoop) ||
  11432. ignoreKnownImplicitCalls ||
  11433. (implicitCallsLoop ? ImplicitCallFlagsAllowOpts(implicitCallsLoop) : ImplicitCallFlagsAllowOpts(func))
  11434. );
  11435. #endif
  11436. Assert(
  11437. !(
  11438. isJsArray &&
  11439. valueInfo->IsArrayValueInfo() &&
  11440. valueInfo->AsArrayValueInfo()->LengthSym() &&
  11441. !DoArrayLengthHoist()
  11442. ));
  11443. }
  11444. #endif
  11445. void
  11446. GlobOpt::TrackNewValueForKills(Value *const value)
  11447. {
  11448. Assert(value);
  11449. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11450. {
  11451. return;
  11452. }
  11453. DoTrackNewValueForKills(value);
  11454. }
  11455. void
  11456. GlobOpt::DoTrackNewValueForKills(Value *const value)
  11457. {
  11458. Assert(value);
  11459. ValueInfo *const valueInfo = value->GetValueInfo();
  11460. Assert(valueInfo->IsAnyOptimizedArray());
  11461. Assert(!valueInfo->IsArrayValueInfo());
  11462. // The value and value info here are new, so it's okay to modify the value info in-place
  11463. Assert(!valueInfo->GetSymStore());
  11464. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11465. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11466. Loop *implicitCallsLoop;
  11467. if(currentBlock->next && !currentBlock->next->isDeleted && currentBlock->next->isLoopHeader)
  11468. {
  11469. // Since a loop's landing pad does not have user code, determine whether disabling implicit calls is allowed in the
  11470. // landing pad based on the loop for which this block is the landing pad.
  11471. implicitCallsLoop = currentBlock->next->loop;
  11472. Assert(implicitCallsLoop);
  11473. Assert(implicitCallsLoop->landingPad == currentBlock);
  11474. }
  11475. else
  11476. {
  11477. implicitCallsLoop = currentBlock->loop;
  11478. }
  11479. if(isJsArray)
  11480. {
  11481. if(!DoArrayCheckHoist(valueInfo->Type(), implicitCallsLoop))
  11482. {
  11483. // Array opts are disabled for this value type, so treat it as an indefinite value type going forward
  11484. valueInfo->Type() = valueInfo->Type().ToLikely();
  11485. return;
  11486. }
  11487. if(valueInfo->HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  11488. {
  11489. valueInfo->Type() = valueInfo->Type().SetHasNoMissingValues(false);
  11490. }
  11491. }
  11492. #if DBG
  11493. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  11494. #endif
  11495. if(!isJsArray)
  11496. {
  11497. return;
  11498. }
  11499. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11500. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11501. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11502. // revert the value type to a likely version.
  11503. CurrentBlockData()->valuesToKillOnCalls->Add(value);
  11504. }
  11505. void
  11506. GlobOpt::TrackCopiedValueForKills(Value *const value)
  11507. {
  11508. Assert(value);
  11509. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11510. {
  11511. return;
  11512. }
  11513. DoTrackCopiedValueForKills(value);
  11514. }
  11515. void
  11516. GlobOpt::DoTrackCopiedValueForKills(Value *const value)
  11517. {
  11518. Assert(value);
  11519. ValueInfo *const valueInfo = value->GetValueInfo();
  11520. Assert(valueInfo->IsAnyOptimizedArray());
  11521. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11522. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11523. #if DBG
  11524. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock);
  11525. #endif
  11526. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  11527. {
  11528. return;
  11529. }
  11530. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11531. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11532. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11533. // revert the value type to a likely version.
  11534. CurrentBlockData()->valuesToKillOnCalls->Add(value);
  11535. }
  11536. void
  11537. GlobOpt::TrackMergedValueForKills(
  11538. Value *const value,
  11539. GlobOptBlockData *const blockData,
  11540. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  11541. {
  11542. Assert(value);
  11543. if(!value->GetValueInfo()->IsAnyOptimizedArray())
  11544. {
  11545. return;
  11546. }
  11547. DoTrackMergedValueForKills(value, blockData, mergedValueTypesTrackedForKills);
  11548. }
  11549. void
  11550. GlobOpt::DoTrackMergedValueForKills(
  11551. Value *const value,
  11552. GlobOptBlockData *const blockData,
  11553. BVSparse<JitArenaAllocator> *const mergedValueTypesTrackedForKills) const
  11554. {
  11555. Assert(value);
  11556. Assert(blockData);
  11557. ValueInfo *valueInfo = value->GetValueInfo();
  11558. Assert(valueInfo->IsAnyOptimizedArray());
  11559. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11560. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11561. #if DBG
  11562. VerifyArrayValueInfoForTracking(valueInfo, isJsArray, currentBlock, true);
  11563. #endif
  11564. if(!isJsArray && !(valueInfo->IsArrayValueInfo() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()))
  11565. {
  11566. return;
  11567. }
  11568. // Can't assume going forward that it will definitely be an array without disabling implicit calls, because the
  11569. // array may be transformed into an ES5 array. Since array opts are enabled, implicit calls can be disabled, and we can
  11570. // treat it as a definite value type going forward, but the value needs to be tracked so that something like a call can
  11571. // revert the value type to a likely version.
  11572. if(!mergedValueTypesTrackedForKills || !mergedValueTypesTrackedForKills->TestAndSet(value->GetValueNumber()))
  11573. {
  11574. blockData->valuesToKillOnCalls->Add(value);
  11575. }
  11576. }
  11577. void
  11578. GlobOpt::TrackValueInfoChangeForKills(BasicBlock *const block, Value *const value, ValueInfo *const newValueInfo, const bool compensated) const
  11579. {
  11580. Assert(block);
  11581. Assert(value);
  11582. Assert(newValueInfo);
  11583. ValueInfo *const oldValueInfo = value->GetValueInfo();
  11584. #if DBG
  11585. if(oldValueInfo->IsAnyOptimizedArray())
  11586. {
  11587. VerifyArrayValueInfoForTracking(oldValueInfo, oldValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  11588. }
  11589. #endif
  11590. const bool trackOldValueInfo =
  11591. oldValueInfo->IsArrayOrObjectWithArray() ||
  11592. (
  11593. oldValueInfo->IsOptimizedTypedArray() &&
  11594. oldValueInfo->IsArrayValueInfo() &&
  11595. oldValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11596. );
  11597. Assert(trackOldValueInfo == block->globOptData.valuesToKillOnCalls->ContainsKey(value));
  11598. #if DBG
  11599. if(newValueInfo->IsAnyOptimizedArray())
  11600. {
  11601. VerifyArrayValueInfoForTracking(newValueInfo, newValueInfo->IsArrayOrObjectWithArray(), block, compensated);
  11602. }
  11603. #endif
  11604. const bool trackNewValueInfo =
  11605. newValueInfo->IsArrayOrObjectWithArray() ||
  11606. (
  11607. newValueInfo->IsOptimizedTypedArray() &&
  11608. newValueInfo->IsArrayValueInfo() &&
  11609. newValueInfo->AsArrayValueInfo()->HeadSegmentLengthSym()
  11610. );
  11611. if(trackOldValueInfo == trackNewValueInfo)
  11612. {
  11613. return;
  11614. }
  11615. if(trackNewValueInfo)
  11616. {
  11617. block->globOptData.valuesToKillOnCalls->Add(value);
  11618. }
  11619. else
  11620. {
  11621. block->globOptData.valuesToKillOnCalls->Remove(value);
  11622. }
  11623. }
  11624. void
  11625. GlobOpt::ProcessValueKills(IR::Instr *const instr)
  11626. {
  11627. Assert(instr);
  11628. ValueSet *const valuesToKillOnCalls = CurrentBlockData()->valuesToKillOnCalls;
  11629. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  11630. {
  11631. return;
  11632. }
  11633. const JsArrayKills kills = CheckJsArrayKills(instr);
  11634. Assert(!kills.KillsArrayHeadSegments() || kills.KillsArrayHeadSegmentLengths());
  11635. if(IsLoopPrePass())
  11636. {
  11637. rootLoopPrePass->jsArrayKills = rootLoopPrePass->jsArrayKills.Merge(kills);
  11638. Assert(
  11639. !rootLoopPrePass->parent ||
  11640. rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  11641. if(kills.KillsAllArrays())
  11642. {
  11643. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = false;
  11644. }
  11645. if(valuesToKillOnCalls->Count() == 0)
  11646. {
  11647. return;
  11648. }
  11649. }
  11650. if(kills.KillsAllArrays())
  11651. {
  11652. Assert(kills.KillsTypedArrayHeadSegmentLengths());
  11653. // - Calls need to kill the value types of values in the following list. For instance, calls can transform a JS array
  11654. // into an ES5 array, so any definitely-array value types need to be killed. Also, VirtualTypeArrays do not have
  11655. // bounds checks; this can be problematic if the array is detached, so check to ensure that it is a virtual array.
  11656. // Update the value types to likley to ensure a bailout that asserts Array type is generated.
  11657. // - Calls also need to kill typed array head segment lengths. A typed array's array buffer may be transferred to a web
  11658. // worker, in which case the typed array's length is set to zero.
  11659. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11660. {
  11661. Value *const value = it.CurrentValue();
  11662. ValueInfo *const valueInfo = value->GetValueInfo();
  11663. Assert(
  11664. valueInfo->IsArrayOrObjectWithArray() ||
  11665. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11666. if (valueInfo->IsArrayOrObjectWithArray() || valueInfo->IsOptimizedVirtualTypedArray())
  11667. {
  11668. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11669. continue;
  11670. }
  11671. ChangeValueInfo(
  11672. nullptr,
  11673. value,
  11674. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11675. }
  11676. valuesToKillOnCalls->Clear();
  11677. return;
  11678. }
  11679. if(kills.KillsArraysWithNoMissingValues())
  11680. {
  11681. // Some operations may kill arrays with no missing values in unlikely circumstances. Convert their value types to likely
  11682. // versions so that the checks have to be redone.
  11683. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11684. {
  11685. Value *const value = it.CurrentValue();
  11686. ValueInfo *const valueInfo = value->GetValueInfo();
  11687. Assert(
  11688. valueInfo->IsArrayOrObjectWithArray() ||
  11689. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11690. if(!valueInfo->IsArrayOrObjectWithArray() || !valueInfo->HasNoMissingValues())
  11691. {
  11692. continue;
  11693. }
  11694. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11695. it.RemoveCurrent();
  11696. }
  11697. }
  11698. if(kills.KillsNativeArrays())
  11699. {
  11700. // Some operations may kill native arrays in (what should be) unlikely circumstances. Convert their value types to
  11701. // likely versions so that the checks have to be redone.
  11702. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11703. {
  11704. Value *const value = it.CurrentValue();
  11705. ValueInfo *const valueInfo = value->GetValueInfo();
  11706. Assert(
  11707. valueInfo->IsArrayOrObjectWithArray() ||
  11708. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11709. if(!valueInfo->IsArrayOrObjectWithArray() || valueInfo->HasVarElements())
  11710. {
  11711. continue;
  11712. }
  11713. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11714. it.RemoveCurrent();
  11715. }
  11716. }
  11717. const bool likelyKillsJsArraysWithNoMissingValues = IsOperationThatLikelyKillsJsArraysWithNoMissingValues(instr);
  11718. if(!kills.KillsArrayHeadSegmentLengths())
  11719. {
  11720. Assert(!kills.KillsArrayHeadSegments());
  11721. if(!likelyKillsJsArraysWithNoMissingValues && !kills.KillsArrayLengths())
  11722. {
  11723. return;
  11724. }
  11725. }
  11726. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11727. {
  11728. Value *const value = it.CurrentValue();
  11729. ValueInfo *valueInfo = value->GetValueInfo();
  11730. Assert(
  11731. valueInfo->IsArrayOrObjectWithArray() ||
  11732. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11733. if(!valueInfo->IsArrayOrObjectWithArray())
  11734. {
  11735. continue;
  11736. }
  11737. if(likelyKillsJsArraysWithNoMissingValues && valueInfo->HasNoMissingValues())
  11738. {
  11739. ChangeValueType(nullptr, value, valueInfo->Type().SetHasNoMissingValues(false), true);
  11740. valueInfo = value->GetValueInfo();
  11741. }
  11742. if(!valueInfo->IsArrayValueInfo())
  11743. {
  11744. continue;
  11745. }
  11746. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  11747. const bool removeHeadSegment = kills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  11748. const bool removeHeadSegmentLength = kills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  11749. const bool removeLength = kills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  11750. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  11751. {
  11752. ChangeValueInfo(
  11753. nullptr,
  11754. value,
  11755. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  11756. valueInfo = value->GetValueInfo();
  11757. }
  11758. }
  11759. }
  11760. void
  11761. GlobOpt::ProcessValueKills(BasicBlock *const block, GlobOptBlockData *const blockData)
  11762. {
  11763. Assert(block);
  11764. Assert(blockData);
  11765. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  11766. if(!IsLoopPrePass() && valuesToKillOnCalls->Count() == 0)
  11767. {
  11768. return;
  11769. }
  11770. // If the current block or loop has implicit calls, kill all definitely-array value types, as using that info will cause
  11771. // implicit calls to be disabled, resulting in unnecessary bailouts
  11772. const bool killValuesOnImplicitCalls =
  11773. (block->loop ? !this->ImplicitCallFlagsAllowOpts(block->loop) : !this->ImplicitCallFlagsAllowOpts(func));
  11774. if (!killValuesOnImplicitCalls)
  11775. {
  11776. return;
  11777. }
  11778. if(IsLoopPrePass() && block->loop == rootLoopPrePass)
  11779. {
  11780. AnalysisAssert(rootLoopPrePass);
  11781. for (Loop * loop = rootLoopPrePass; loop != nullptr; loop = loop->parent)
  11782. {
  11783. loop->jsArrayKills.SetKillsAllArrays();
  11784. }
  11785. Assert(!rootLoopPrePass->parent || rootLoopPrePass->jsArrayKills.AreSubsetOf(rootLoopPrePass->parent->jsArrayKills));
  11786. if(valuesToKillOnCalls->Count() == 0)
  11787. {
  11788. return;
  11789. }
  11790. }
  11791. for(auto it = valuesToKillOnCalls->GetIterator(); it.IsValid(); it.MoveNext())
  11792. {
  11793. Value *const value = it.CurrentValue();
  11794. ValueInfo *const valueInfo = value->GetValueInfo();
  11795. Assert(
  11796. valueInfo->IsArrayOrObjectWithArray() ||
  11797. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11798. if(valueInfo->IsArrayOrObjectWithArray())
  11799. {
  11800. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11801. continue;
  11802. }
  11803. ChangeValueInfo(
  11804. nullptr,
  11805. value,
  11806. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11807. }
  11808. valuesToKillOnCalls->Clear();
  11809. }
  11810. void
  11811. GlobOpt::ProcessValueKillsForLoopHeaderAfterBackEdgeMerge(BasicBlock *const block, GlobOptBlockData *const blockData)
  11812. {
  11813. Assert(block);
  11814. Assert(block->isLoopHeader);
  11815. Assert(blockData);
  11816. ValueSet *const valuesToKillOnCalls = blockData->valuesToKillOnCalls;
  11817. if(valuesToKillOnCalls->Count() == 0)
  11818. {
  11819. return;
  11820. }
  11821. const JsArrayKills loopKills(block->loop->jsArrayKills);
  11822. for(auto it = valuesToKillOnCalls->GetIteratorWithRemovalSupport(); it.IsValid(); it.MoveNext())
  11823. {
  11824. Value *const value = it.CurrentValue();
  11825. ValueInfo *valueInfo = value->GetValueInfo();
  11826. Assert(
  11827. valueInfo->IsArrayOrObjectWithArray() ||
  11828. valueInfo->IsOptimizedTypedArray() && valueInfo->AsArrayValueInfo()->HeadSegmentLengthSym());
  11829. const bool isJsArray = valueInfo->IsArrayOrObjectWithArray();
  11830. Assert(!isJsArray == valueInfo->IsOptimizedTypedArray());
  11831. if(isJsArray ? loopKills.KillsValueType(valueInfo->Type()) : loopKills.KillsTypedArrayHeadSegmentLengths())
  11832. {
  11833. // Hoisting array checks and other related things for this type is disabled for the loop due to the kill, as
  11834. // compensation code is currently not added on back-edges. When merging values from a back-edge, the array value
  11835. // type cannot be definite, as that may require adding compensation code on the back-edge if the optimization pass
  11836. // chooses to not optimize the array.
  11837. if(isJsArray)
  11838. {
  11839. ChangeValueType(nullptr, value, valueInfo->Type().ToLikely(), false);
  11840. }
  11841. else
  11842. {
  11843. ChangeValueInfo(
  11844. nullptr,
  11845. value,
  11846. valueInfo->AsArrayValueInfo()->Copy(alloc, true, false /* copyHeadSegmentLength */, true));
  11847. }
  11848. it.RemoveCurrent();
  11849. continue;
  11850. }
  11851. if(!isJsArray || !valueInfo->IsArrayValueInfo())
  11852. {
  11853. continue;
  11854. }
  11855. // Similarly, if the loop contains an operation that kills JS array segments, don't make the segment or other related
  11856. // syms available initially inside the loop
  11857. ArrayValueInfo *const arrayValueInfo = valueInfo->AsArrayValueInfo();
  11858. const bool removeHeadSegment = loopKills.KillsArrayHeadSegments() && arrayValueInfo->HeadSegmentSym();
  11859. const bool removeHeadSegmentLength = loopKills.KillsArrayHeadSegmentLengths() && arrayValueInfo->HeadSegmentLengthSym();
  11860. const bool removeLength = loopKills.KillsArrayLengths() && arrayValueInfo->LengthSym();
  11861. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  11862. {
  11863. ChangeValueInfo(
  11864. nullptr,
  11865. value,
  11866. arrayValueInfo->Copy(alloc, !removeHeadSegment, !removeHeadSegmentLength, !removeLength));
  11867. valueInfo = value->GetValueInfo();
  11868. }
  11869. }
  11870. }
  11871. bool
  11872. GlobOpt::NeedBailOnImplicitCallForLiveValues(BasicBlock const * const block, const bool isForwardPass) const
  11873. {
  11874. if(isForwardPass)
  11875. {
  11876. return block->globOptData.valuesToKillOnCalls->Count() != 0;
  11877. }
  11878. if(block->noImplicitCallUses->IsEmpty())
  11879. {
  11880. Assert(block->noImplicitCallNoMissingValuesUses->IsEmpty());
  11881. Assert(block->noImplicitCallNativeArrayUses->IsEmpty());
  11882. Assert(block->noImplicitCallJsArrayHeadSegmentSymUses->IsEmpty());
  11883. Assert(block->noImplicitCallArrayLengthSymUses->IsEmpty());
  11884. return false;
  11885. }
  11886. return true;
  11887. }
  11888. IR::Instr*
  11889. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, Func* func)
  11890. {
  11891. IR::Instr* instr = IR::Instr::New(Js::OpCode::BoundCheck, func);
  11892. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  11893. }
  11894. IR::Instr*
  11895. GlobOpt::CreateBoundsCheckInstr(IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset, IR::BailOutKind bailoutkind, BailOutInfo* bailoutInfo, Func * func)
  11896. {
  11897. IR::Instr* instr = IR::BailOutInstr::New(Js::OpCode::BoundCheck, bailoutkind, bailoutInfo, func);
  11898. return AttachBoundsCheckData(instr, lowerBound, upperBound, offset);
  11899. }
  11900. IR::Instr*
  11901. GlobOpt::AttachBoundsCheckData(IR::Instr* instr, IR::Opnd* lowerBound, IR::Opnd* upperBound, int offset)
  11902. {
  11903. instr->SetSrc1(lowerBound);
  11904. instr->SetSrc2(upperBound);
  11905. if (offset != 0)
  11906. {
  11907. instr->SetDst(IR::IntConstOpnd::New(offset, TyInt32, instr->m_func));
  11908. }
  11909. return instr;
  11910. }
  11911. void
  11912. GlobOpt::OptArraySrc(IR::Instr * *const instrRef)
  11913. {
  11914. Assert(instrRef);
  11915. IR::Instr *&instr = *instrRef;
  11916. Assert(instr);
  11917. IR::Instr *baseOwnerInstr;
  11918. IR::IndirOpnd *baseOwnerIndir;
  11919. IR::RegOpnd *baseOpnd;
  11920. bool isProfilableLdElem, isProfilableStElem;
  11921. bool isLoad, isStore;
  11922. bool needsHeadSegment, needsHeadSegmentLength, needsLength, needsBoundChecks;
  11923. switch(instr->m_opcode)
  11924. {
  11925. // SIMD_JS
  11926. case Js::OpCode::Simd128_LdArr_F4:
  11927. case Js::OpCode::Simd128_LdArr_I4:
  11928. // no type-spec for Asm.js
  11929. if (this->GetIsAsmJSFunc())
  11930. {
  11931. return;
  11932. }
  11933. // fall through
  11934. case Js::OpCode::LdElemI_A:
  11935. case Js::OpCode::LdMethodElem:
  11936. if(!instr->GetSrc1()->IsIndirOpnd())
  11937. {
  11938. return;
  11939. }
  11940. baseOwnerInstr = nullptr;
  11941. baseOwnerIndir = instr->GetSrc1()->AsIndirOpnd();
  11942. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  11943. isProfilableLdElem = instr->m_opcode == Js::OpCode::LdElemI_A; // LdMethodElem is currently not profiled
  11944. isProfilableLdElem |= Js::IsSimd128Load(instr->m_opcode);
  11945. needsBoundChecks = needsHeadSegmentLength = needsHeadSegment = isLoad = true;
  11946. needsLength = isStore = isProfilableStElem = false;
  11947. break;
  11948. // SIMD_JS
  11949. case Js::OpCode::Simd128_StArr_F4:
  11950. case Js::OpCode::Simd128_StArr_I4:
  11951. if (this->GetIsAsmJSFunc())
  11952. {
  11953. return;
  11954. }
  11955. // fall through
  11956. case Js::OpCode::StElemI_A:
  11957. case Js::OpCode::StElemI_A_Strict:
  11958. case Js::OpCode::StElemC:
  11959. if(!instr->GetDst()->IsIndirOpnd())
  11960. {
  11961. return;
  11962. }
  11963. baseOwnerInstr = nullptr;
  11964. baseOwnerIndir = instr->GetDst()->AsIndirOpnd();
  11965. baseOpnd = baseOwnerIndir->GetBaseOpnd();
  11966. needsBoundChecks = isProfilableStElem = instr->m_opcode != Js::OpCode::StElemC;
  11967. isProfilableStElem |= Js::IsSimd128Store(instr->m_opcode);
  11968. needsHeadSegmentLength = needsHeadSegment = isStore = true;
  11969. needsLength = isLoad = isProfilableLdElem = false;
  11970. break;
  11971. case Js::OpCode::InlineArrayPush:
  11972. case Js::OpCode::InlineArrayPop:
  11973. {
  11974. baseOwnerInstr = instr;
  11975. baseOwnerIndir = nullptr;
  11976. IR::Opnd * thisOpnd = instr->GetSrc1();
  11977. // Return if it not a LikelyArray or Object with Array - No point in doing array check elimination.
  11978. if(!thisOpnd->IsRegOpnd() || !thisOpnd->GetValueType().IsLikelyArrayOrObjectWithArray())
  11979. {
  11980. return;
  11981. }
  11982. baseOpnd = thisOpnd->AsRegOpnd();
  11983. isLoad = instr->m_opcode == Js::OpCode::InlineArrayPop;
  11984. isStore = instr->m_opcode == Js::OpCode::InlineArrayPush;
  11985. needsLength = needsHeadSegmentLength = needsHeadSegment = true;
  11986. needsBoundChecks = isProfilableLdElem = isProfilableStElem = false;
  11987. break;
  11988. }
  11989. case Js::OpCode::LdLen_A:
  11990. if(!instr->GetSrc1()->IsRegOpnd())
  11991. {
  11992. return;
  11993. }
  11994. baseOwnerInstr = instr;
  11995. baseOwnerIndir = nullptr;
  11996. baseOpnd = instr->GetSrc1()->AsRegOpnd();
  11997. if(baseOpnd->GetValueType().IsLikelyObject() &&
  11998. baseOpnd->GetValueType().GetObjectType() == ObjectType::ObjectWithArray)
  11999. {
  12000. return;
  12001. }
  12002. needsLength = true;
  12003. needsBoundChecks =
  12004. needsHeadSegmentLength =
  12005. needsHeadSegment =
  12006. isStore =
  12007. isLoad =
  12008. isProfilableStElem =
  12009. isProfilableLdElem = false;
  12010. break;
  12011. default:
  12012. return;
  12013. }
  12014. Assert(!(baseOwnerInstr && baseOwnerIndir));
  12015. Assert(!needsHeadSegmentLength || needsHeadSegment);
  12016. if(baseOwnerIndir && !IsLoopPrePass())
  12017. {
  12018. // Since this happens before type specialization, make sure that any necessary conversions are done, and that the index
  12019. // is int-specialized if possible such that the const flags are correct.
  12020. ToVarUses(instr, baseOwnerIndir, baseOwnerIndir == instr->GetDst(), nullptr);
  12021. }
  12022. if(isProfilableStElem && !IsLoopPrePass())
  12023. {
  12024. // If the dead-store pass decides to add the bailout kind IR::BailOutInvalidatedArrayHeadSegment, and the fast path is
  12025. // generated, it may bail out before the operation is done, so this would need to be a pre-op bailout.
  12026. if(instr->HasBailOutInfo())
  12027. {
  12028. Assert(
  12029. instr->GetByteCodeOffset() != Js::Constants::NoByteCodeOffset &&
  12030. instr->GetBailOutInfo()->bailOutOffset <= instr->GetByteCodeOffset());
  12031. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  12032. Assert(
  12033. !(bailOutKind & ~IR::BailOutKindBits) ||
  12034. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  12035. if(!(bailOutKind & ~IR::BailOutKindBits))
  12036. {
  12037. instr->SetBailOutKind(bailOutKind + IR::BailOutOnImplicitCallsPreOp);
  12038. }
  12039. }
  12040. else
  12041. {
  12042. GenerateBailAtOperation(&instr, IR::BailOutOnImplicitCallsPreOp);
  12043. }
  12044. }
  12045. Value *const baseValue = CurrentBlockData()->FindValue(baseOpnd->m_sym);
  12046. if(!baseValue)
  12047. {
  12048. return;
  12049. }
  12050. ValueInfo *baseValueInfo = baseValue->GetValueInfo();
  12051. ValueType baseValueType(baseValueInfo->Type());
  12052. baseOpnd->SetValueType(baseValueType);
  12053. if(!baseValueType.IsLikelyAnyOptimizedArray() ||
  12054. !DoArrayCheckHoist(baseValueType, currentBlock->loop, instr) ||
  12055. (baseOwnerIndir && !ShouldExpectConventionalArrayIndexValue(baseOwnerIndir)))
  12056. {
  12057. return;
  12058. }
  12059. const bool isLikelyJsArray = !baseValueType.IsLikelyTypedArray();
  12060. Assert(isLikelyJsArray == baseValueType.IsLikelyArrayOrObjectWithArray());
  12061. Assert(!isLikelyJsArray == baseValueType.IsLikelyOptimizedTypedArray());
  12062. if(!isLikelyJsArray && instr->m_opcode == Js::OpCode::LdMethodElem)
  12063. {
  12064. // Fast path is not generated in this case since the subsequent call will throw
  12065. return;
  12066. }
  12067. ValueType newBaseValueType(baseValueType.ToDefiniteObject());
  12068. if(isLikelyJsArray && newBaseValueType.HasNoMissingValues() && !DoArrayMissingValueCheckHoist())
  12069. {
  12070. newBaseValueType = newBaseValueType.SetHasNoMissingValues(false);
  12071. }
  12072. Assert((newBaseValueType == baseValueType) == baseValueType.IsObject());
  12073. ArrayValueInfo *baseArrayValueInfo = nullptr;
  12074. const auto UpdateValue = [&](StackSym *newHeadSegmentSym, StackSym *newHeadSegmentLengthSym, StackSym *newLengthSym)
  12075. {
  12076. Assert(baseValueType.GetObjectType() == newBaseValueType.GetObjectType());
  12077. Assert(newBaseValueType.IsObject());
  12078. Assert(baseValueType.IsLikelyArray() || !newLengthSym);
  12079. if(!(newHeadSegmentSym || newHeadSegmentLengthSym || newLengthSym))
  12080. {
  12081. // We're not adding new information to the value other than changing the value type. Preserve any existing
  12082. // information and just change the value type.
  12083. ChangeValueType(currentBlock, baseValue, newBaseValueType, true);
  12084. return;
  12085. }
  12086. // Merge the new syms into the value while preserving any existing information, and change the value type
  12087. if(baseArrayValueInfo)
  12088. {
  12089. if(!newHeadSegmentSym)
  12090. {
  12091. newHeadSegmentSym = baseArrayValueInfo->HeadSegmentSym();
  12092. }
  12093. if(!newHeadSegmentLengthSym)
  12094. {
  12095. newHeadSegmentLengthSym = baseArrayValueInfo->HeadSegmentLengthSym();
  12096. }
  12097. if(!newLengthSym)
  12098. {
  12099. newLengthSym = baseArrayValueInfo->LengthSym();
  12100. }
  12101. Assert(
  12102. !baseArrayValueInfo->HeadSegmentSym() ||
  12103. newHeadSegmentSym == baseArrayValueInfo->HeadSegmentSym());
  12104. Assert(
  12105. !baseArrayValueInfo->HeadSegmentLengthSym() ||
  12106. newHeadSegmentLengthSym == baseArrayValueInfo->HeadSegmentLengthSym());
  12107. Assert(!baseArrayValueInfo->LengthSym() || newLengthSym == baseArrayValueInfo->LengthSym());
  12108. }
  12109. ArrayValueInfo *const newBaseArrayValueInfo =
  12110. ArrayValueInfo::New(
  12111. alloc,
  12112. newBaseValueType,
  12113. newHeadSegmentSym,
  12114. newHeadSegmentLengthSym,
  12115. newLengthSym,
  12116. baseValueInfo->GetSymStore());
  12117. ChangeValueInfo(currentBlock, baseValue, newBaseArrayValueInfo);
  12118. };
  12119. if(IsLoopPrePass())
  12120. {
  12121. if(newBaseValueType != baseValueType)
  12122. {
  12123. UpdateValue(nullptr, nullptr, nullptr);
  12124. }
  12125. // For javascript arrays and objects with javascript arrays:
  12126. // - Implicit calls need to be disabled and calls cannot be allowed in the loop since the array vtable may be changed
  12127. // into an ES5 array.
  12128. // For typed arrays:
  12129. // - A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the
  12130. // typed array's length is set to zero. Implicit calls need to be disabled if the typed array's head segment length
  12131. // is going to be loaded and used later.
  12132. // Since we don't know if the loop has kills after this instruction, the kill information may not be complete. If a kill
  12133. // is found later, this information will be updated to not require disabling implicit calls.
  12134. if(!(
  12135. isLikelyJsArray
  12136. ? rootLoopPrePass->jsArrayKills.KillsValueType(newBaseValueType)
  12137. : rootLoopPrePass->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  12138. ))
  12139. {
  12140. rootLoopPrePass->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  12141. }
  12142. return;
  12143. }
  12144. if(baseValueInfo->IsArrayValueInfo())
  12145. {
  12146. baseArrayValueInfo = baseValueInfo->AsArrayValueInfo();
  12147. }
  12148. const bool doArrayChecks = !baseValueType.IsObject();
  12149. const bool doArraySegmentHoist = DoArraySegmentHoist(baseValueType) && instr->m_opcode != Js::OpCode::StElemC;
  12150. const bool headSegmentIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym();
  12151. const bool doHeadSegmentLoad = doArraySegmentHoist && needsHeadSegment && !headSegmentIsAvailable;
  12152. const bool doArraySegmentLengthHoist =
  12153. doArraySegmentHoist && (isLikelyJsArray || DoTypedArraySegmentLengthHoist(currentBlock->loop));
  12154. const bool headSegmentLengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->HeadSegmentLengthSym();
  12155. const bool doHeadSegmentLengthLoad =
  12156. doArraySegmentLengthHoist &&
  12157. (needsHeadSegmentLength || (!isLikelyJsArray && needsLength)) &&
  12158. !headSegmentLengthIsAvailable;
  12159. const bool lengthIsAvailable = baseArrayValueInfo && baseArrayValueInfo->LengthSym();
  12160. const bool doLengthLoad =
  12161. DoArrayLengthHoist() &&
  12162. needsLength &&
  12163. !lengthIsAvailable &&
  12164. baseValueType.IsLikelyArray() &&
  12165. DoLdLenIntSpec(instr->m_opcode == Js::OpCode::LdLen_A ? instr : nullptr, baseValueType);
  12166. StackSym *const newHeadSegmentSym = doHeadSegmentLoad ? StackSym::New(TyMachPtr, instr->m_func) : nullptr;
  12167. StackSym *const newHeadSegmentLengthSym = doHeadSegmentLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  12168. StackSym *const newLengthSym = doLengthLoad ? StackSym::New(TyUint32, instr->m_func) : nullptr;
  12169. bool canBailOutOnArrayAccessHelperCall;
  12170. if (Js::IsSimd128LoadStore(instr->m_opcode))
  12171. {
  12172. // SIMD_JS
  12173. // simd load/store never call helper
  12174. canBailOutOnArrayAccessHelperCall = true;
  12175. }
  12176. else
  12177. {
  12178. canBailOutOnArrayAccessHelperCall = (isProfilableLdElem || isProfilableStElem) &&
  12179. DoEliminateArrayAccessHelperCall() &&
  12180. !(
  12181. instr->IsProfiledInstr() &&
  12182. (
  12183. isProfilableLdElem
  12184. ? instr->AsProfiledInstr()->u.ldElemInfo->LikelyNeedsHelperCall()
  12185. : instr->AsProfiledInstr()->u.stElemInfo->LikelyNeedsHelperCall()
  12186. )
  12187. );
  12188. }
  12189. bool doExtractBoundChecks = false, eliminatedLowerBoundCheck = false, eliminatedUpperBoundCheck = false;
  12190. StackSym *indexVarSym = nullptr;
  12191. Value *indexValue = nullptr;
  12192. IntConstantBounds indexConstantBounds;
  12193. Value *headSegmentLengthValue = nullptr;
  12194. IntConstantBounds headSegmentLengthConstantBounds;
  12195. #if ENABLE_FAST_ARRAYBUFFER
  12196. if (baseValueType.IsLikelyOptimizedVirtualTypedArray() && !Js::IsSimd128LoadStore(instr->m_opcode) /*Always extract bounds for SIMD */)
  12197. {
  12198. if (isProfilableStElem ||
  12199. !instr->IsDstNotAlwaysConvertedToInt32() ||
  12200. ( (baseValueType.GetObjectType() == ObjectType::Float32VirtualArray ||
  12201. baseValueType.GetObjectType() == ObjectType::Float64VirtualArray) &&
  12202. !instr->IsDstNotAlwaysConvertedToNumber()
  12203. )
  12204. )
  12205. {
  12206. // Unless we're in asm.js (where it is guaranteed that virtual typed array accesses cannot read/write beyond 4GB),
  12207. // check the range of the index to make sure we won't access beyond the reserved memory beforing eliminating bounds
  12208. // checks in jitted code.
  12209. if (!GetIsAsmJSFunc() && baseOwnerIndir)
  12210. {
  12211. IR::RegOpnd * idxOpnd = baseOwnerIndir->GetIndexOpnd();
  12212. if (idxOpnd)
  12213. {
  12214. StackSym * idxSym = idxOpnd->m_sym->IsTypeSpec() ? idxOpnd->m_sym->GetVarEquivSym(nullptr) : idxOpnd->m_sym;
  12215. Value * idxValue = CurrentBlockData()->FindValue(idxSym);
  12216. IntConstantBounds idxConstantBounds;
  12217. if (idxValue && idxValue->GetValueInfo()->TryGetIntConstantBounds(&idxConstantBounds))
  12218. {
  12219. BYTE indirScale = Lowerer::GetArrayIndirScale(baseValueType);
  12220. int32 upperBound = idxConstantBounds.UpperBound();
  12221. int32 lowerBound = idxConstantBounds.LowerBound();
  12222. if (lowerBound >= 0 && ((static_cast<uint64>(upperBound) << indirScale) < MAX_ASMJS_ARRAYBUFFER_LENGTH))
  12223. {
  12224. eliminatedLowerBoundCheck = true;
  12225. eliminatedUpperBoundCheck = true;
  12226. canBailOutOnArrayAccessHelperCall = false;
  12227. }
  12228. }
  12229. }
  12230. }
  12231. else
  12232. {
  12233. if (!baseOwnerIndir)
  12234. {
  12235. Assert(instr->m_opcode == Js::OpCode::InlineArrayPush ||
  12236. instr->m_opcode == Js::OpCode::InlineArrayPop ||
  12237. instr->m_opcode == Js::OpCode::LdLen_A);
  12238. }
  12239. eliminatedLowerBoundCheck = true;
  12240. eliminatedUpperBoundCheck = true;
  12241. canBailOutOnArrayAccessHelperCall = false;
  12242. }
  12243. }
  12244. }
  12245. #endif
  12246. if(needsBoundChecks && DoBoundCheckElimination())
  12247. {
  12248. AnalysisAssert(baseOwnerIndir);
  12249. Assert(needsHeadSegmentLength);
  12250. // Bound checks can be separated from the instruction only if it can bail out instead of making a helper call when a
  12251. // bound check fails. And only if it would bail out, can we use a bound check to eliminate redundant bound checks later
  12252. // on that path.
  12253. doExtractBoundChecks = (headSegmentLengthIsAvailable || doHeadSegmentLengthLoad) && canBailOutOnArrayAccessHelperCall;
  12254. do
  12255. {
  12256. // Get the index value
  12257. IR::RegOpnd *const indexOpnd = baseOwnerIndir->GetIndexOpnd();
  12258. if(indexOpnd)
  12259. {
  12260. StackSym *const indexSym = indexOpnd->m_sym;
  12261. if(indexSym->IsTypeSpec())
  12262. {
  12263. Assert(indexSym->IsInt32());
  12264. indexVarSym = indexSym->GetVarEquivSym(nullptr);
  12265. Assert(indexVarSym);
  12266. indexValue = CurrentBlockData()->FindValue(indexVarSym);
  12267. Assert(indexValue);
  12268. AssertVerify(indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds));
  12269. Assert(indexOpnd->GetType() == TyInt32 || indexOpnd->GetType() == TyUint32);
  12270. Assert(
  12271. (indexOpnd->GetType() == TyUint32) ==
  12272. ValueInfo::IsGreaterThanOrEqualTo(
  12273. indexValue,
  12274. indexConstantBounds.LowerBound(),
  12275. indexConstantBounds.UpperBound(),
  12276. nullptr,
  12277. 0,
  12278. 0));
  12279. if(indexOpnd->GetType() == TyUint32)
  12280. {
  12281. eliminatedLowerBoundCheck = true;
  12282. }
  12283. }
  12284. else
  12285. {
  12286. doExtractBoundChecks = false; // Bound check instruction operates only on int-specialized operands
  12287. indexValue = CurrentBlockData()->FindValue(indexSym);
  12288. if(!indexValue || !indexValue->GetValueInfo()->TryGetIntConstantBounds(&indexConstantBounds))
  12289. {
  12290. break;
  12291. }
  12292. if(ValueInfo::IsGreaterThanOrEqualTo(
  12293. indexValue,
  12294. indexConstantBounds.LowerBound(),
  12295. indexConstantBounds.UpperBound(),
  12296. nullptr,
  12297. 0,
  12298. 0))
  12299. {
  12300. eliminatedLowerBoundCheck = true;
  12301. }
  12302. }
  12303. if(!eliminatedLowerBoundCheck &&
  12304. ValueInfo::IsLessThan(
  12305. indexValue,
  12306. indexConstantBounds.LowerBound(),
  12307. indexConstantBounds.UpperBound(),
  12308. nullptr,
  12309. 0,
  12310. 0))
  12311. {
  12312. eliminatedUpperBoundCheck = true;
  12313. doExtractBoundChecks = false;
  12314. break;
  12315. }
  12316. }
  12317. else
  12318. {
  12319. const int32 indexConstantValue = baseOwnerIndir->GetOffset();
  12320. if(indexConstantValue < 0)
  12321. {
  12322. eliminatedUpperBoundCheck = true;
  12323. doExtractBoundChecks = false;
  12324. break;
  12325. }
  12326. if(indexConstantValue == INT32_MAX)
  12327. {
  12328. eliminatedLowerBoundCheck = true;
  12329. doExtractBoundChecks = false;
  12330. break;
  12331. }
  12332. indexConstantBounds = IntConstantBounds(indexConstantValue, indexConstantValue);
  12333. eliminatedLowerBoundCheck = true;
  12334. }
  12335. if(!headSegmentLengthIsAvailable)
  12336. {
  12337. break;
  12338. }
  12339. headSegmentLengthValue = CurrentBlockData()->FindValue(baseArrayValueInfo->HeadSegmentLengthSym());
  12340. if(!headSegmentLengthValue)
  12341. {
  12342. if(doExtractBoundChecks)
  12343. {
  12344. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  12345. }
  12346. break;
  12347. }
  12348. AssertVerify(headSegmentLengthValue->GetValueInfo()->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  12349. if (ValueInfo::IsLessThanOrEqualTo(
  12350. indexValue,
  12351. indexConstantBounds.LowerBound(),
  12352. indexConstantBounds.UpperBound(),
  12353. headSegmentLengthValue,
  12354. headSegmentLengthConstantBounds.LowerBound(),
  12355. headSegmentLengthConstantBounds.UpperBound(),
  12356. GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1)
  12357. ))
  12358. {
  12359. eliminatedUpperBoundCheck = true;
  12360. if(eliminatedLowerBoundCheck)
  12361. {
  12362. doExtractBoundChecks = false;
  12363. }
  12364. }
  12365. } while(false);
  12366. }
  12367. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad || doExtractBoundChecks)
  12368. {
  12369. // Find the loops out of which array checks and head segment loads need to be hoisted
  12370. Loop *hoistChecksOutOfLoop = nullptr;
  12371. Loop *hoistHeadSegmentLoadOutOfLoop = nullptr;
  12372. Loop *hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  12373. Loop *hoistLengthLoadOutOfLoop = nullptr;
  12374. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  12375. {
  12376. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  12377. {
  12378. const JsArrayKills loopKills(loop->jsArrayKills);
  12379. Value *baseValueInLoopLandingPad = nullptr;
  12380. if((isLikelyJsArray && loopKills.KillsValueType(newBaseValueType)) ||
  12381. !OptIsInvariant(baseOpnd->m_sym, currentBlock, loop, baseValue, true, true, &baseValueInLoopLandingPad) ||
  12382. !(doArrayChecks || baseValueInLoopLandingPad->GetValueInfo()->IsObject()))
  12383. {
  12384. break;
  12385. }
  12386. // The value types should be the same, except:
  12387. // - The value type in the landing pad is a type that can merge to a specific object type. Typically, these
  12388. // cases will use BailOnNoProfile, but that can be disabled due to excessive bailouts. Those value types
  12389. // merge aggressively to the other side's object type, so the value type may have started off as
  12390. // Uninitialized, [Likely]Undefined|Null, [Likely]UninitializedObject, etc., and changed in the loop to an
  12391. // array type during a prepass.
  12392. // - StElems in the loop can kill the no-missing-values info.
  12393. // - The native array type may be made more conservative based on profile data by an instruction in the loop.
  12394. #if DBG
  12395. if (!baseValueInLoopLandingPad->GetValueInfo()->CanMergeToSpecificObjectType())
  12396. {
  12397. ValueType landingPadValueType = baseValueInLoopLandingPad->GetValueInfo()->Type();
  12398. Assert(landingPadValueType.IsSimilar(baseValueType) ||
  12399. (
  12400. landingPadValueType.IsLikelyNativeArray() &&
  12401. landingPadValueType.Merge(baseValueType).IsSimilar(baseValueType)
  12402. )
  12403. );
  12404. }
  12405. #endif
  12406. if(doArrayChecks)
  12407. {
  12408. hoistChecksOutOfLoop = loop;
  12409. }
  12410. if(isLikelyJsArray && loopKills.KillsArrayHeadSegments())
  12411. {
  12412. Assert(loopKills.KillsArrayHeadSegmentLengths());
  12413. if(!(doArrayChecks || doLengthLoad))
  12414. {
  12415. break;
  12416. }
  12417. }
  12418. else
  12419. {
  12420. if(doHeadSegmentLoad || headSegmentIsAvailable)
  12421. {
  12422. // If the head segment is already available, we may need to rehoist the value including other
  12423. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  12424. // the head segment length is not being loaded here.
  12425. hoistHeadSegmentLoadOutOfLoop = loop;
  12426. }
  12427. if(isLikelyJsArray
  12428. ? loopKills.KillsArrayHeadSegmentLengths()
  12429. : loopKills.KillsTypedArrayHeadSegmentLengths())
  12430. {
  12431. if(!(doArrayChecks || doHeadSegmentLoad || doLengthLoad))
  12432. {
  12433. break;
  12434. }
  12435. }
  12436. else if(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable)
  12437. {
  12438. // If the head segment length is already available, we may need to rehoist the value including other
  12439. // information. So, need to track the loop out of which the head segment length can be hoisted even if
  12440. // the head segment length is not being loaded here.
  12441. hoistHeadSegmentLengthLoadOutOfLoop = loop;
  12442. }
  12443. }
  12444. if(isLikelyJsArray && loopKills.KillsArrayLengths())
  12445. {
  12446. if(!(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad))
  12447. {
  12448. break;
  12449. }
  12450. }
  12451. else if(doLengthLoad || lengthIsAvailable)
  12452. {
  12453. // If the length is already available, we may need to rehoist the value including other information. So,
  12454. // need to track the loop out of which the head segment length can be hoisted even if the length is not
  12455. // being loaded here.
  12456. hoistLengthLoadOutOfLoop = loop;
  12457. }
  12458. }
  12459. }
  12460. IR::Instr *insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  12461. const auto InsertInstrInLandingPad = [&](IR::Instr *const instr, Loop *const hoistOutOfLoop)
  12462. {
  12463. if(hoistOutOfLoop->bailOutInfo->bailOutInstr)
  12464. {
  12465. instr->SetByteCodeOffset(hoistOutOfLoop->bailOutInfo->bailOutInstr);
  12466. hoistOutOfLoop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  12467. }
  12468. else
  12469. {
  12470. instr->SetByteCodeOffset(hoistOutOfLoop->landingPad->GetLastInstr());
  12471. hoistOutOfLoop->landingPad->InsertAfter(instr);
  12472. }
  12473. };
  12474. BailOutInfo *shareableBailOutInfo = nullptr;
  12475. IR::Instr *shareableBailOutInfoOriginalOwner = nullptr;
  12476. const auto ShareBailOut = [&]()
  12477. {
  12478. Assert(shareableBailOutInfo);
  12479. if(shareableBailOutInfo->bailOutInstr != shareableBailOutInfoOriginalOwner)
  12480. {
  12481. return;
  12482. }
  12483. Assert(shareableBailOutInfoOriginalOwner->GetBailOutInfo() == shareableBailOutInfo);
  12484. IR::Instr *const sharedBailOut = shareableBailOutInfoOriginalOwner->ShareBailOut();
  12485. Assert(sharedBailOut->GetBailOutInfo() == shareableBailOutInfo);
  12486. shareableBailOutInfoOriginalOwner = nullptr;
  12487. sharedBailOut->Unlink();
  12488. insertBeforeInstr->InsertBefore(sharedBailOut);
  12489. insertBeforeInstr = sharedBailOut;
  12490. };
  12491. if(doArrayChecks)
  12492. {
  12493. TRACE_TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Separating array checks with bailout\n"));
  12494. IR::Instr *bailOnNotArray = IR::Instr::New(Js::OpCode::BailOnNotArray, instr->m_func);
  12495. bailOnNotArray->SetSrc1(baseOpnd);
  12496. bailOnNotArray->GetSrc1()->SetIsJITOptimizedReg(true);
  12497. const IR::BailOutKind bailOutKind =
  12498. newBaseValueType.IsLikelyNativeArray() ? IR::BailOutOnNotNativeArray : IR::BailOutOnNotArray;
  12499. if(hoistChecksOutOfLoop)
  12500. {
  12501. Assert(!(isLikelyJsArray && hoistChecksOutOfLoop->jsArrayKills.KillsValueType(newBaseValueType)));
  12502. TRACE_PHASE_INSTR(
  12503. Js::ArrayCheckHoistPhase,
  12504. instr,
  12505. _u("Hoisting array checks with bailout out of loop %u to landing pad block %u\n"),
  12506. hoistChecksOutOfLoop->GetLoopNumber(),
  12507. hoistChecksOutOfLoop->landingPad->GetBlockNum());
  12508. TESTTRACE_PHASE_INSTR(Js::ArrayCheckHoistPhase, instr, _u("Hoisting array checks with bailout out of loop\n"));
  12509. Assert(hoistChecksOutOfLoop->bailOutInfo);
  12510. EnsureBailTarget(hoistChecksOutOfLoop);
  12511. InsertInstrInLandingPad(bailOnNotArray, hoistChecksOutOfLoop);
  12512. bailOnNotArray = bailOnNotArray->ConvertToBailOutInstr(hoistChecksOutOfLoop->bailOutInfo, bailOutKind);
  12513. }
  12514. else
  12515. {
  12516. bailOnNotArray->SetByteCodeOffset(instr);
  12517. insertBeforeInstr->InsertBefore(bailOnNotArray);
  12518. GenerateBailAtOperation(&bailOnNotArray, bailOutKind);
  12519. shareableBailOutInfo = bailOnNotArray->GetBailOutInfo();
  12520. shareableBailOutInfoOriginalOwner = bailOnNotArray;
  12521. }
  12522. baseValueType = newBaseValueType;
  12523. baseOpnd->SetValueType(newBaseValueType);
  12524. }
  12525. if(doLengthLoad)
  12526. {
  12527. Assert(baseValueType.IsArray());
  12528. Assert(newLengthSym);
  12529. TRACE_TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Separating array length load\n"));
  12530. // Create an initial value for the length
  12531. CurrentBlockData()->liveVarSyms->Set(newLengthSym->m_id);
  12532. Value *const lengthValue = NewIntRangeValue(0, INT32_MAX, false);
  12533. CurrentBlockData()->SetValue(lengthValue, newLengthSym);
  12534. // SetValue above would have set the sym store to newLengthSym. This sym won't be used for copy-prop though, so
  12535. // remove it as the sym store.
  12536. this->SetSymStoreDirect(lengthValue->GetValueInfo(), nullptr);
  12537. // length = [array + offsetOf(length)]
  12538. IR::Instr *const loadLength =
  12539. IR::Instr::New(
  12540. Js::OpCode::LdIndir,
  12541. IR::RegOpnd::New(newLengthSym, newLengthSym->GetType(), instr->m_func),
  12542. IR::IndirOpnd::New(
  12543. baseOpnd,
  12544. Js::JavascriptArray::GetOffsetOfLength(),
  12545. newLengthSym->GetType(),
  12546. instr->m_func),
  12547. instr->m_func);
  12548. loadLength->GetDst()->SetIsJITOptimizedReg(true);
  12549. loadLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  12550. // BailOnNegative length (BailOutOnIrregularLength)
  12551. IR::Instr *bailOnIrregularLength = IR::Instr::New(Js::OpCode::BailOnNegative, instr->m_func);
  12552. bailOnIrregularLength->SetSrc1(loadLength->GetDst());
  12553. const IR::BailOutKind bailOutKind = IR::BailOutOnIrregularLength;
  12554. if(hoistLengthLoadOutOfLoop)
  12555. {
  12556. Assert(!hoistLengthLoadOutOfLoop->jsArrayKills.KillsArrayLengths());
  12557. TRACE_PHASE_INSTR(
  12558. Js::Phase::ArrayLengthHoistPhase,
  12559. instr,
  12560. _u("Hoisting array length load out of loop %u to landing pad block %u\n"),
  12561. hoistLengthLoadOutOfLoop->GetLoopNumber(),
  12562. hoistLengthLoadOutOfLoop->landingPad->GetBlockNum());
  12563. TESTTRACE_PHASE_INSTR(Js::Phase::ArrayLengthHoistPhase, instr, _u("Hoisting array length load out of loop\n"));
  12564. Assert(hoistLengthLoadOutOfLoop->bailOutInfo);
  12565. EnsureBailTarget(hoistLengthLoadOutOfLoop);
  12566. InsertInstrInLandingPad(loadLength, hoistLengthLoadOutOfLoop);
  12567. InsertInstrInLandingPad(bailOnIrregularLength, hoistLengthLoadOutOfLoop);
  12568. bailOnIrregularLength =
  12569. bailOnIrregularLength->ConvertToBailOutInstr(hoistLengthLoadOutOfLoop->bailOutInfo, bailOutKind);
  12570. // Hoist the length value
  12571. for(InvariantBlockBackwardIterator it(
  12572. this,
  12573. currentBlock,
  12574. hoistLengthLoadOutOfLoop->landingPad,
  12575. baseOpnd->m_sym,
  12576. baseValue->GetValueNumber());
  12577. it.IsValid();
  12578. it.MoveNext())
  12579. {
  12580. BasicBlock *const block = it.Block();
  12581. block->globOptData.liveVarSyms->Set(newLengthSym->m_id);
  12582. Assert(!block->globOptData.FindValue(newLengthSym));
  12583. Value *const lengthValueCopy = CopyValue(lengthValue, lengthValue->GetValueNumber());
  12584. block->globOptData.SetValue(lengthValueCopy, newLengthSym);
  12585. this->SetSymStoreDirect(lengthValueCopy->GetValueInfo(), nullptr);
  12586. }
  12587. }
  12588. else
  12589. {
  12590. loadLength->SetByteCodeOffset(instr);
  12591. insertBeforeInstr->InsertBefore(loadLength);
  12592. bailOnIrregularLength->SetByteCodeOffset(instr);
  12593. insertBeforeInstr->InsertBefore(bailOnIrregularLength);
  12594. if(shareableBailOutInfo)
  12595. {
  12596. ShareBailOut();
  12597. bailOnIrregularLength = bailOnIrregularLength->ConvertToBailOutInstr(shareableBailOutInfo, bailOutKind);
  12598. }
  12599. else
  12600. {
  12601. GenerateBailAtOperation(&bailOnIrregularLength, bailOutKind);
  12602. shareableBailOutInfo = bailOnIrregularLength->GetBailOutInfo();
  12603. shareableBailOutInfoOriginalOwner = bailOnIrregularLength;
  12604. }
  12605. }
  12606. }
  12607. const auto InsertHeadSegmentLoad = [&]()
  12608. {
  12609. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment load\n"));
  12610. Assert(newHeadSegmentSym);
  12611. IR::RegOpnd *const headSegmentOpnd =
  12612. IR::RegOpnd::New(newHeadSegmentSym, newHeadSegmentSym->GetType(), instr->m_func);
  12613. headSegmentOpnd->SetIsJITOptimizedReg(true);
  12614. IR::RegOpnd *const jitOptimizedBaseOpnd = baseOpnd->Copy(instr->m_func)->AsRegOpnd();
  12615. jitOptimizedBaseOpnd->SetIsJITOptimizedReg(true);
  12616. IR::Instr *loadObjectArray;
  12617. if(baseValueType.GetObjectType() == ObjectType::ObjectWithArray)
  12618. {
  12619. loadObjectArray =
  12620. IR::Instr::New(
  12621. Js::OpCode::LdIndir,
  12622. headSegmentOpnd,
  12623. IR::IndirOpnd::New(
  12624. jitOptimizedBaseOpnd,
  12625. Js::DynamicObject::GetOffsetOfObjectArray(),
  12626. jitOptimizedBaseOpnd->GetType(),
  12627. instr->m_func),
  12628. instr->m_func);
  12629. }
  12630. else
  12631. {
  12632. loadObjectArray = nullptr;
  12633. }
  12634. IR::Instr *const loadHeadSegment =
  12635. IR::Instr::New(
  12636. Js::OpCode::LdIndir,
  12637. headSegmentOpnd,
  12638. IR::IndirOpnd::New(
  12639. loadObjectArray ? headSegmentOpnd : jitOptimizedBaseOpnd,
  12640. Lowerer::GetArrayOffsetOfHeadSegment(baseValueType),
  12641. headSegmentOpnd->GetType(),
  12642. instr->m_func),
  12643. instr->m_func);
  12644. if(hoistHeadSegmentLoadOutOfLoop)
  12645. {
  12646. Assert(!(isLikelyJsArray && hoistHeadSegmentLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegments()));
  12647. TRACE_PHASE_INSTR(
  12648. Js::ArraySegmentHoistPhase,
  12649. instr,
  12650. _u("Hoisting array segment load out of loop %u to landing pad block %u\n"),
  12651. hoistHeadSegmentLoadOutOfLoop->GetLoopNumber(),
  12652. hoistHeadSegmentLoadOutOfLoop->landingPad->GetBlockNum());
  12653. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment load out of loop\n"));
  12654. if(loadObjectArray)
  12655. {
  12656. InsertInstrInLandingPad(loadObjectArray, hoistHeadSegmentLoadOutOfLoop);
  12657. }
  12658. InsertInstrInLandingPad(loadHeadSegment, hoistHeadSegmentLoadOutOfLoop);
  12659. }
  12660. else
  12661. {
  12662. if(loadObjectArray)
  12663. {
  12664. loadObjectArray->SetByteCodeOffset(instr);
  12665. insertBeforeInstr->InsertBefore(loadObjectArray);
  12666. }
  12667. loadHeadSegment->SetByteCodeOffset(instr);
  12668. insertBeforeInstr->InsertBefore(loadHeadSegment);
  12669. instr->loadedArrayHeadSegment = true;
  12670. }
  12671. };
  12672. if(doHeadSegmentLoad && isLikelyJsArray)
  12673. {
  12674. // For javascript arrays, the head segment is required to load the head segment length
  12675. InsertHeadSegmentLoad();
  12676. }
  12677. if(doHeadSegmentLengthLoad)
  12678. {
  12679. Assert(!isLikelyJsArray || newHeadSegmentSym || baseArrayValueInfo && baseArrayValueInfo->HeadSegmentSym());
  12680. Assert(newHeadSegmentLengthSym);
  12681. Assert(!headSegmentLengthValue);
  12682. TRACE_TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Separating array segment length load\n"));
  12683. // Create an initial value for the head segment length
  12684. CurrentBlockData()->liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  12685. headSegmentLengthValue = NewIntRangeValue(0, Js::SparseArraySegmentBase::MaxLength, false);
  12686. headSegmentLengthConstantBounds = IntConstantBounds(0, Js::SparseArraySegmentBase::MaxLength);
  12687. CurrentBlockData()->SetValue(headSegmentLengthValue, newHeadSegmentLengthSym);
  12688. // SetValue above would have set the sym store to newHeadSegmentLengthSym. This sym won't be used for copy-prop
  12689. // though, so remove it as the sym store.
  12690. this->SetSymStoreDirect(headSegmentLengthValue->GetValueInfo(), nullptr);
  12691. StackSym *const headSegmentSym =
  12692. isLikelyJsArray
  12693. ? newHeadSegmentSym ? newHeadSegmentSym : baseArrayValueInfo->HeadSegmentSym()
  12694. : nullptr;
  12695. IR::Instr *const loadHeadSegmentLength =
  12696. IR::Instr::New(
  12697. Js::OpCode::LdIndir,
  12698. IR::RegOpnd::New(newHeadSegmentLengthSym, newHeadSegmentLengthSym->GetType(), instr->m_func),
  12699. IR::IndirOpnd::New(
  12700. isLikelyJsArray ? IR::RegOpnd::New(headSegmentSym, headSegmentSym->GetType(), instr->m_func) : baseOpnd,
  12701. isLikelyJsArray
  12702. ? Js::SparseArraySegmentBase::GetOffsetOfLength()
  12703. : Lowerer::GetArrayOffsetOfLength(baseValueType),
  12704. newHeadSegmentLengthSym->GetType(),
  12705. instr->m_func),
  12706. instr->m_func);
  12707. loadHeadSegmentLength->GetDst()->SetIsJITOptimizedReg(true);
  12708. loadHeadSegmentLength->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->SetIsJITOptimizedReg(true);
  12709. // We don't check the head segment length for negative (very large uint32) values. For JS arrays, the bound checks
  12710. // cover that. For typed arrays, we currently don't allocate array buffers with more than 1 GB elements.
  12711. if(hoistHeadSegmentLengthLoadOutOfLoop)
  12712. {
  12713. Assert(
  12714. !(
  12715. isLikelyJsArray
  12716. ? hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsArrayHeadSegmentLengths()
  12717. : hoistHeadSegmentLengthLoadOutOfLoop->jsArrayKills.KillsTypedArrayHeadSegmentLengths()
  12718. ));
  12719. TRACE_PHASE_INSTR(
  12720. Js::ArraySegmentHoistPhase,
  12721. instr,
  12722. _u("Hoisting array segment length load out of loop %u to landing pad block %u\n"),
  12723. hoistHeadSegmentLengthLoadOutOfLoop->GetLoopNumber(),
  12724. hoistHeadSegmentLengthLoadOutOfLoop->landingPad->GetBlockNum());
  12725. TESTTRACE_PHASE_INSTR(Js::ArraySegmentHoistPhase, instr, _u("Hoisting array segment length load out of loop\n"));
  12726. InsertInstrInLandingPad(loadHeadSegmentLength, hoistHeadSegmentLengthLoadOutOfLoop);
  12727. // Hoist the head segment length value
  12728. for(InvariantBlockBackwardIterator it(
  12729. this,
  12730. currentBlock,
  12731. hoistHeadSegmentLengthLoadOutOfLoop->landingPad,
  12732. baseOpnd->m_sym,
  12733. baseValue->GetValueNumber());
  12734. it.IsValid();
  12735. it.MoveNext())
  12736. {
  12737. BasicBlock *const block = it.Block();
  12738. block->globOptData.liveVarSyms->Set(newHeadSegmentLengthSym->m_id);
  12739. Assert(!block->globOptData.FindValue(newHeadSegmentLengthSym));
  12740. Value *const headSegmentLengthValueCopy =
  12741. CopyValue(headSegmentLengthValue, headSegmentLengthValue->GetValueNumber());
  12742. block->globOptData.SetValue(headSegmentLengthValueCopy, newHeadSegmentLengthSym);
  12743. this->SetSymStoreDirect(headSegmentLengthValueCopy->GetValueInfo(), nullptr);
  12744. }
  12745. }
  12746. else
  12747. {
  12748. loadHeadSegmentLength->SetByteCodeOffset(instr);
  12749. insertBeforeInstr->InsertBefore(loadHeadSegmentLength);
  12750. instr->loadedArrayHeadSegmentLength = true;
  12751. }
  12752. }
  12753. if(doExtractBoundChecks)
  12754. {
  12755. Assert(!(eliminatedLowerBoundCheck && eliminatedUpperBoundCheck));
  12756. Assert(baseOwnerIndir);
  12757. Assert(!baseOwnerIndir->GetIndexOpnd() || baseOwnerIndir->GetIndexOpnd()->m_sym->IsTypeSpec());
  12758. Assert(doHeadSegmentLengthLoad || headSegmentLengthIsAvailable);
  12759. Assert(canBailOutOnArrayAccessHelperCall);
  12760. Assert(!isStore || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict || Js::IsSimd128LoadStore(instr->m_opcode));
  12761. StackSym *const headSegmentLengthSym =
  12762. headSegmentLengthIsAvailable ? baseArrayValueInfo->HeadSegmentLengthSym() : newHeadSegmentLengthSym;
  12763. Assert(headSegmentLengthSym);
  12764. Assert(headSegmentLengthValue);
  12765. ArrayLowerBoundCheckHoistInfo lowerBoundCheckHoistInfo;
  12766. ArrayUpperBoundCheckHoistInfo upperBoundCheckHoistInfo;
  12767. bool failedToUpdateCompatibleLowerBoundCheck = false, failedToUpdateCompatibleUpperBoundCheck = false;
  12768. if(DoBoundCheckHoist())
  12769. {
  12770. if(indexVarSym)
  12771. {
  12772. TRACE_PHASE_INSTR_VERBOSE(
  12773. Js::Phase::BoundCheckHoistPhase,
  12774. instr,
  12775. _u("Determining array bound check hoistability for index s%u\n"),
  12776. indexVarSym->m_id);
  12777. }
  12778. else
  12779. {
  12780. TRACE_PHASE_INSTR_VERBOSE(
  12781. Js::Phase::BoundCheckHoistPhase,
  12782. instr,
  12783. _u("Determining array bound check hoistability for index %d\n"),
  12784. indexConstantBounds.LowerBound());
  12785. }
  12786. DetermineArrayBoundCheckHoistability(
  12787. !eliminatedLowerBoundCheck,
  12788. !eliminatedUpperBoundCheck,
  12789. lowerBoundCheckHoistInfo,
  12790. upperBoundCheckHoistInfo,
  12791. isLikelyJsArray,
  12792. indexVarSym,
  12793. indexValue,
  12794. indexConstantBounds,
  12795. headSegmentLengthSym,
  12796. headSegmentLengthValue,
  12797. headSegmentLengthConstantBounds,
  12798. hoistHeadSegmentLengthLoadOutOfLoop,
  12799. failedToUpdateCompatibleLowerBoundCheck,
  12800. failedToUpdateCompatibleUpperBoundCheck);
  12801. #ifdef ENABLE_SIMDJS
  12802. // SIMD_JS
  12803. UpdateBoundCheckHoistInfoForSimd(upperBoundCheckHoistInfo, newBaseValueType, instr);
  12804. #endif
  12805. }
  12806. if(!eliminatedLowerBoundCheck)
  12807. {
  12808. eliminatedLowerBoundCheck = true;
  12809. Assert(indexVarSym);
  12810. Assert(baseOwnerIndir->GetIndexOpnd());
  12811. Assert(indexValue);
  12812. ArrayLowerBoundCheckHoistInfo &hoistInfo = lowerBoundCheckHoistInfo;
  12813. if(hoistInfo.HasAnyInfo())
  12814. {
  12815. BasicBlock *hoistBlock;
  12816. if(hoistInfo.CompatibleBoundCheckBlock())
  12817. {
  12818. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  12819. TRACE_PHASE_INSTR(
  12820. Js::Phase::BoundCheckHoistPhase,
  12821. instr,
  12822. _u("Hoisting array lower bound check into existing bound check instruction in block %u\n"),
  12823. hoistBlock->GetBlockNum());
  12824. TESTTRACE_PHASE_INSTR(
  12825. Js::Phase::BoundCheckHoistPhase,
  12826. instr,
  12827. _u("Hoisting array lower bound check into existing bound check instruction\n"));
  12828. }
  12829. else
  12830. {
  12831. Assert(hoistInfo.Loop());
  12832. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  12833. hoistBlock = landingPad;
  12834. StackSym *indexIntSym;
  12835. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  12836. {
  12837. if(!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  12838. {
  12839. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize
  12840. // it in this block if it is invariant, as the conversion will be hoisted along with value
  12841. // updates.
  12842. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  12843. IR::Instr *specializeBeforeInstr = nullptr;
  12844. if(!CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  12845. OptIsInvariant(
  12846. hoistInfo.IndexSym(),
  12847. currentBlock,
  12848. hoistInfo.Loop(),
  12849. CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  12850. false,
  12851. true))
  12852. {
  12853. specializationBlock = currentBlock;
  12854. specializeBeforeInstr = insertBeforeInstr;
  12855. }
  12856. Assert(tempBv->IsEmpty());
  12857. tempBv->Set(hoistInfo.IndexSym()->m_id);
  12858. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  12859. tempBv->ClearAll();
  12860. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  12861. }
  12862. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  12863. Assert(indexIntSym);
  12864. }
  12865. else
  12866. {
  12867. indexIntSym = hoistInfo.IndexSym();
  12868. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  12869. }
  12870. // The info in the landing pad may be better than the info in the current block due to changes made to
  12871. // the index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the
  12872. // landing pad.
  12873. if(!ValueInfo::IsLessThanOrEqualTo(
  12874. nullptr,
  12875. 0,
  12876. 0,
  12877. hoistInfo.IndexValue(),
  12878. hoistInfo.IndexConstantBounds().LowerBound(),
  12879. hoistInfo.IndexConstantBounds().UpperBound(),
  12880. hoistInfo.Offset()))
  12881. {
  12882. Assert(hoistInfo.IndexSym());
  12883. Assert(hoistInfo.Loop()->bailOutInfo);
  12884. EnsureBailTarget(hoistInfo.Loop());
  12885. if(hoistInfo.LoopCount())
  12886. {
  12887. // Generate the loop count and loop count based bound that will be used for the bound check
  12888. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  12889. {
  12890. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  12891. }
  12892. GenerateSecondaryInductionVariableBound(
  12893. hoistInfo.Loop(),
  12894. indexVarSym->GetInt32EquivSym(nullptr),
  12895. hoistInfo.LoopCount(),
  12896. hoistInfo.MaxMagnitudeChange(),
  12897. hoistInfo.IndexSym());
  12898. }
  12899. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  12900. IR::Opnd* upperBound = IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func);
  12901. upperBound->SetIsJITOptimizedReg(true);
  12902. // 0 <= indexSym + offset (src1 <= src2 + dst)
  12903. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  12904. lowerBound,
  12905. upperBound,
  12906. hoistInfo.Offset(),
  12907. hoistInfo.IsLoopCountBasedBound()
  12908. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  12909. : IR::BailOutOnFailedHoistedBoundCheck,
  12910. hoistInfo.Loop()->bailOutInfo,
  12911. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  12912. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  12913. TRACE_PHASE_INSTR(
  12914. Js::Phase::BoundCheckHoistPhase,
  12915. instr,
  12916. _u("Hoisting array lower bound check out of loop %u to landing pad block %u, as (0 <= s%u + %d)\n"),
  12917. hoistInfo.Loop()->GetLoopNumber(),
  12918. landingPad->GetBlockNum(),
  12919. hoistInfo.IndexSym()->m_id,
  12920. hoistInfo.Offset());
  12921. TESTTRACE_PHASE_INSTR(
  12922. Js::Phase::BoundCheckHoistPhase,
  12923. instr,
  12924. _u("Hoisting array lower bound check out of loop\n"));
  12925. // Record the bound check instruction as available
  12926. const IntBoundCheck boundCheckInfo(
  12927. ZeroValueNumber,
  12928. hoistInfo.IndexValueNumber(),
  12929. boundCheck,
  12930. landingPad);
  12931. {
  12932. const bool added = CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  12933. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  12934. }
  12935. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  12936. it.IsValid();
  12937. it.MoveNext())
  12938. {
  12939. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  12940. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  12941. }
  12942. }
  12943. }
  12944. // Update values of the syms involved in the bound check to reflect the bound check
  12945. if(hoistBlock != currentBlock && hoistInfo.IndexSym() && hoistInfo.Offset() != INT32_MIN)
  12946. {
  12947. for(InvariantBlockBackwardIterator it(
  12948. this,
  12949. currentBlock->next,
  12950. hoistBlock,
  12951. hoistInfo.IndexSym(),
  12952. hoistInfo.IndexValueNumber());
  12953. it.IsValid();
  12954. it.MoveNext())
  12955. {
  12956. Value *const value = it.InvariantSymValue();
  12957. IntConstantBounds constantBounds;
  12958. AssertVerify(value->GetValueInfo()->TryGetIntConstantBounds(&constantBounds, true));
  12959. ValueInfo *const newValueInfo =
  12960. UpdateIntBoundsForGreaterThanOrEqual(
  12961. value,
  12962. constantBounds,
  12963. nullptr,
  12964. IntConstantBounds(-hoistInfo.Offset(), -hoistInfo.Offset()),
  12965. false);
  12966. if(newValueInfo)
  12967. {
  12968. ChangeValueInfo(nullptr, value, newValueInfo);
  12969. if(it.Block() == currentBlock && value == indexValue)
  12970. {
  12971. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  12972. }
  12973. }
  12974. }
  12975. }
  12976. }
  12977. else
  12978. {
  12979. IR::Opnd* lowerBound = IR::IntConstOpnd::New(0, TyInt32, instr->m_func, true);
  12980. IR::Opnd* upperBound = baseOwnerIndir->GetIndexOpnd();
  12981. upperBound->SetIsJITOptimizedReg(true);
  12982. const int offset = 0;
  12983. IR::Instr *boundCheck;
  12984. if(shareableBailOutInfo)
  12985. {
  12986. ShareBailOut();
  12987. boundCheck = CreateBoundsCheckInstr(
  12988. lowerBound,
  12989. upperBound,
  12990. offset,
  12991. IR::BailOutOnArrayAccessHelperCall,
  12992. shareableBailOutInfo,
  12993. shareableBailOutInfo->bailOutFunc);
  12994. }
  12995. else
  12996. {
  12997. boundCheck = CreateBoundsCheckInstr(
  12998. lowerBound,
  12999. upperBound,
  13000. offset,
  13001. instr->m_func);
  13002. }
  13003. boundCheck->SetByteCodeOffset(instr);
  13004. insertBeforeInstr->InsertBefore(boundCheck);
  13005. if(!shareableBailOutInfo)
  13006. {
  13007. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  13008. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  13009. shareableBailOutInfoOriginalOwner = boundCheck;
  13010. }
  13011. TRACE_PHASE_INSTR(
  13012. Js::Phase::BoundCheckEliminationPhase,
  13013. instr,
  13014. _u("Separating array lower bound check, as (0 <= s%u)\n"),
  13015. indexVarSym->m_id);
  13016. TESTTRACE_PHASE_INSTR(
  13017. Js::Phase::BoundCheckEliminationPhase,
  13018. instr,
  13019. _u("Separating array lower bound check\n"));
  13020. if(DoBoundCheckHoist())
  13021. {
  13022. // Record the bound check instruction as available
  13023. const bool added =
  13024. CurrentBlockData()->availableIntBoundChecks->AddNew(
  13025. IntBoundCheck(ZeroValueNumber, indexValue->GetValueNumber(), boundCheck, currentBlock)) >= 0;
  13026. Assert(added || failedToUpdateCompatibleLowerBoundCheck);
  13027. }
  13028. }
  13029. // Update the index value to reflect the bound check
  13030. ValueInfo *const newValueInfo =
  13031. UpdateIntBoundsForGreaterThanOrEqual(
  13032. indexValue,
  13033. indexConstantBounds,
  13034. nullptr,
  13035. IntConstantBounds(0, 0),
  13036. false);
  13037. if(newValueInfo)
  13038. {
  13039. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  13040. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  13041. }
  13042. }
  13043. if(!eliminatedUpperBoundCheck)
  13044. {
  13045. eliminatedUpperBoundCheck = true;
  13046. ArrayUpperBoundCheckHoistInfo &hoistInfo = upperBoundCheckHoistInfo;
  13047. if(hoistInfo.HasAnyInfo())
  13048. {
  13049. BasicBlock *hoistBlock;
  13050. if(hoistInfo.CompatibleBoundCheckBlock())
  13051. {
  13052. hoistBlock = hoistInfo.CompatibleBoundCheckBlock();
  13053. TRACE_PHASE_INSTR(
  13054. Js::Phase::BoundCheckHoistPhase,
  13055. instr,
  13056. _u("Hoisting array upper bound check into existing bound check instruction in block %u\n"),
  13057. hoistBlock->GetBlockNum());
  13058. TESTTRACE_PHASE_INSTR(
  13059. Js::Phase::BoundCheckHoistPhase,
  13060. instr,
  13061. _u("Hoisting array upper bound check into existing bound check instruction\n"));
  13062. }
  13063. else
  13064. {
  13065. Assert(hoistInfo.Loop());
  13066. BasicBlock *const landingPad = hoistInfo.Loop()->landingPad;
  13067. hoistBlock = landingPad;
  13068. StackSym *indexIntSym;
  13069. if(hoistInfo.IndexSym() && hoistInfo.IndexSym()->IsVar())
  13070. {
  13071. if(!landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()))
  13072. {
  13073. // Int-specialize the index sym, as the BoundCheck instruction requires int operands. Specialize it
  13074. // in this block if it is invariant, as the conversion will be hoisted along with value updates.
  13075. BasicBlock *specializationBlock = hoistInfo.Loop()->landingPad;
  13076. IR::Instr *specializeBeforeInstr = nullptr;
  13077. if(!CurrentBlockData()->IsInt32TypeSpecialized(hoistInfo.IndexSym()) &&
  13078. OptIsInvariant(
  13079. hoistInfo.IndexSym(),
  13080. currentBlock,
  13081. hoistInfo.Loop(),
  13082. CurrentBlockData()->FindValue(hoistInfo.IndexSym()),
  13083. false,
  13084. true))
  13085. {
  13086. specializationBlock = currentBlock;
  13087. specializeBeforeInstr = insertBeforeInstr;
  13088. }
  13089. Assert(tempBv->IsEmpty());
  13090. tempBv->Set(hoistInfo.IndexSym()->m_id);
  13091. ToInt32(tempBv, specializationBlock, false, specializeBeforeInstr);
  13092. tempBv->ClearAll();
  13093. Assert(landingPad->globOptData.IsInt32TypeSpecialized(hoistInfo.IndexSym()));
  13094. }
  13095. indexIntSym = hoistInfo.IndexSym()->GetInt32EquivSym(nullptr);
  13096. Assert(indexIntSym);
  13097. }
  13098. else
  13099. {
  13100. indexIntSym = hoistInfo.IndexSym();
  13101. Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32);
  13102. }
  13103. // The info in the landing pad may be better than the info in the current block due to changes made to the
  13104. // index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the landing pad.
  13105. if(!ValueInfo::IsLessThanOrEqualTo(
  13106. hoistInfo.IndexValue(),
  13107. hoistInfo.IndexConstantBounds().LowerBound(),
  13108. hoistInfo.IndexConstantBounds().UpperBound(),
  13109. hoistInfo.HeadSegmentLengthValue(),
  13110. hoistInfo.HeadSegmentLengthConstantBounds().LowerBound(),
  13111. hoistInfo.HeadSegmentLengthConstantBounds().UpperBound(),
  13112. hoistInfo.Offset()))
  13113. {
  13114. Assert(hoistInfo.Loop()->bailOutInfo);
  13115. EnsureBailTarget(hoistInfo.Loop());
  13116. if(hoistInfo.LoopCount())
  13117. {
  13118. // Generate the loop count and loop count based bound that will be used for the bound check
  13119. if(!hoistInfo.LoopCount()->HasBeenGenerated())
  13120. {
  13121. GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount());
  13122. }
  13123. GenerateSecondaryInductionVariableBound(
  13124. hoistInfo.Loop(),
  13125. indexVarSym->GetInt32EquivSym(nullptr),
  13126. hoistInfo.LoopCount(),
  13127. hoistInfo.MaxMagnitudeChange(),
  13128. hoistInfo.IndexSym());
  13129. }
  13130. IR::Opnd* lowerBound = indexIntSym
  13131. ? static_cast<IR::Opnd *>(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func))
  13132. : IR::IntConstOpnd::New(
  13133. hoistInfo.IndexConstantBounds().LowerBound(),
  13134. TyInt32,
  13135. instr->m_func);
  13136. lowerBound->SetIsJITOptimizedReg(true);
  13137. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  13138. upperBound->SetIsJITOptimizedReg(true);
  13139. // indexSym <= headSegmentLength + offset (src1 <= src2 + dst)
  13140. IR::Instr *const boundCheck = CreateBoundsCheckInstr(
  13141. lowerBound,
  13142. upperBound,
  13143. hoistInfo.Offset(),
  13144. hoistInfo.IsLoopCountBasedBound()
  13145. ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck
  13146. : IR::BailOutOnFailedHoistedBoundCheck,
  13147. hoistInfo.Loop()->bailOutInfo,
  13148. hoistInfo.Loop()->bailOutInfo->bailOutFunc);
  13149. InsertInstrInLandingPad(boundCheck, hoistInfo.Loop());
  13150. if(indexIntSym)
  13151. {
  13152. TRACE_PHASE_INSTR(
  13153. Js::Phase::BoundCheckHoistPhase,
  13154. instr,
  13155. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"),
  13156. hoistInfo.Loop()->GetLoopNumber(),
  13157. landingPad->GetBlockNum(),
  13158. hoistInfo.IndexSym()->m_id,
  13159. headSegmentLengthSym->m_id,
  13160. hoistInfo.Offset());
  13161. }
  13162. else
  13163. {
  13164. TRACE_PHASE_INSTR(
  13165. Js::Phase::BoundCheckHoistPhase,
  13166. instr,
  13167. _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"),
  13168. hoistInfo.Loop()->GetLoopNumber(),
  13169. landingPad->GetBlockNum(),
  13170. hoistInfo.IndexConstantBounds().LowerBound(),
  13171. headSegmentLengthSym->m_id,
  13172. hoistInfo.Offset());
  13173. }
  13174. TESTTRACE_PHASE_INSTR(
  13175. Js::Phase::BoundCheckHoistPhase,
  13176. instr,
  13177. _u("Hoisting array upper bound check out of loop\n"));
  13178. // Record the bound check instruction as available
  13179. const IntBoundCheck boundCheckInfo(
  13180. hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber,
  13181. hoistInfo.HeadSegmentLengthValue()->GetValueNumber(),
  13182. boundCheck,
  13183. landingPad);
  13184. {
  13185. const bool added = CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  13186. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  13187. }
  13188. for(InvariantBlockBackwardIterator it(this, currentBlock, landingPad, nullptr);
  13189. it.IsValid();
  13190. it.MoveNext())
  13191. {
  13192. const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0;
  13193. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  13194. }
  13195. }
  13196. }
  13197. // Update values of the syms involved in the bound check to reflect the bound check
  13198. Assert(!hoistInfo.Loop() || hoistBlock != currentBlock);
  13199. if(hoistBlock != currentBlock)
  13200. {
  13201. for(InvariantBlockBackwardIterator it(this, currentBlock->next, hoistBlock, nullptr);
  13202. it.IsValid();
  13203. it.MoveNext())
  13204. {
  13205. BasicBlock *const block = it.Block();
  13206. Value *leftValue;
  13207. IntConstantBounds leftConstantBounds;
  13208. if(hoistInfo.IndexSym())
  13209. {
  13210. leftValue = block->globOptData.FindValue(hoistInfo.IndexSym());
  13211. if(!leftValue || leftValue->GetValueNumber() != hoistInfo.IndexValueNumber())
  13212. {
  13213. continue;
  13214. }
  13215. AssertVerify(leftValue->GetValueInfo()->TryGetIntConstantBounds(&leftConstantBounds, true));
  13216. }
  13217. else
  13218. {
  13219. leftValue = nullptr;
  13220. leftConstantBounds = hoistInfo.IndexConstantBounds();
  13221. }
  13222. Value *const rightValue = block->globOptData.FindValue(headSegmentLengthSym);
  13223. if(!rightValue)
  13224. {
  13225. continue;
  13226. }
  13227. Assert(rightValue->GetValueNumber() == headSegmentLengthValue->GetValueNumber());
  13228. IntConstantBounds rightConstantBounds;
  13229. AssertVerify(rightValue->GetValueInfo()->TryGetIntConstantBounds(&rightConstantBounds));
  13230. ValueInfo *const newValueInfoForLessThanOrEqual =
  13231. UpdateIntBoundsForLessThanOrEqual(
  13232. leftValue,
  13233. leftConstantBounds,
  13234. rightValue,
  13235. rightConstantBounds,
  13236. hoistInfo.Offset(),
  13237. false);
  13238. if (newValueInfoForLessThanOrEqual)
  13239. {
  13240. ChangeValueInfo(nullptr, leftValue, newValueInfoForLessThanOrEqual);
  13241. AssertVerify(newValueInfoForLessThanOrEqual->TryGetIntConstantBounds(&leftConstantBounds, true));
  13242. if(block == currentBlock && leftValue == indexValue)
  13243. {
  13244. Assert(newValueInfoForLessThanOrEqual->IsInt());
  13245. indexConstantBounds = leftConstantBounds;
  13246. }
  13247. }
  13248. if(hoistInfo.Offset() != INT32_MIN)
  13249. {
  13250. ValueInfo *const newValueInfoForGreaterThanOrEqual =
  13251. UpdateIntBoundsForGreaterThanOrEqual(
  13252. rightValue,
  13253. rightConstantBounds,
  13254. leftValue,
  13255. leftConstantBounds,
  13256. -hoistInfo.Offset(),
  13257. false);
  13258. if (newValueInfoForGreaterThanOrEqual)
  13259. {
  13260. ChangeValueInfo(nullptr, rightValue, newValueInfoForGreaterThanOrEqual);
  13261. if(block == currentBlock)
  13262. {
  13263. Assert(rightValue == headSegmentLengthValue);
  13264. AssertVerify(newValueInfoForGreaterThanOrEqual->TryGetIntConstantBounds(&headSegmentLengthConstantBounds));
  13265. }
  13266. }
  13267. }
  13268. }
  13269. }
  13270. }
  13271. else
  13272. {
  13273. IR::Opnd* lowerBound = baseOwnerIndir->GetIndexOpnd()
  13274. ? static_cast<IR::Opnd *>(baseOwnerIndir->GetIndexOpnd())
  13275. : IR::IntConstOpnd::New(baseOwnerIndir->GetOffset(), TyInt32, instr->m_func);
  13276. lowerBound->SetIsJITOptimizedReg(true);
  13277. IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func);
  13278. upperBound->SetIsJITOptimizedReg(true);
  13279. const int offset = GetBoundCheckOffsetForSimd(newBaseValueType, instr, -1);
  13280. IR::Instr *boundCheck;
  13281. // index <= headSegmentLength - 1 (src1 <= src2 + dst)
  13282. if (shareableBailOutInfo)
  13283. {
  13284. ShareBailOut();
  13285. boundCheck = CreateBoundsCheckInstr(
  13286. lowerBound,
  13287. upperBound,
  13288. offset,
  13289. IR::BailOutOnArrayAccessHelperCall,
  13290. shareableBailOutInfo,
  13291. shareableBailOutInfo->bailOutFunc);
  13292. }
  13293. else
  13294. {
  13295. boundCheck = CreateBoundsCheckInstr(
  13296. lowerBound,
  13297. upperBound,
  13298. offset,
  13299. instr->m_func);
  13300. }
  13301. boundCheck->SetByteCodeOffset(instr);
  13302. insertBeforeInstr->InsertBefore(boundCheck);
  13303. if(!shareableBailOutInfo)
  13304. {
  13305. GenerateBailAtOperation(&boundCheck, IR::BailOutOnArrayAccessHelperCall);
  13306. shareableBailOutInfo = boundCheck->GetBailOutInfo();
  13307. shareableBailOutInfoOriginalOwner = boundCheck;
  13308. }
  13309. instr->extractedUpperBoundCheckWithoutHoisting = true;
  13310. if(baseOwnerIndir->GetIndexOpnd())
  13311. {
  13312. TRACE_PHASE_INSTR(
  13313. Js::Phase::BoundCheckEliminationPhase,
  13314. instr,
  13315. _u("Separating array upper bound check, as (s%u < s%u)\n"),
  13316. indexVarSym->m_id,
  13317. headSegmentLengthSym->m_id);
  13318. }
  13319. else
  13320. {
  13321. TRACE_PHASE_INSTR(
  13322. Js::Phase::BoundCheckEliminationPhase,
  13323. instr,
  13324. _u("Separating array upper bound check, as (%d < s%u)\n"),
  13325. baseOwnerIndir->GetOffset(),
  13326. headSegmentLengthSym->m_id);
  13327. }
  13328. TESTTRACE_PHASE_INSTR(
  13329. Js::Phase::BoundCheckEliminationPhase,
  13330. instr,
  13331. _u("Separating array upper bound check\n"));
  13332. if(DoBoundCheckHoist())
  13333. {
  13334. // Record the bound check instruction as available
  13335. const bool added =
  13336. CurrentBlockData()->availableIntBoundChecks->AddNew(
  13337. IntBoundCheck(
  13338. indexValue ? indexValue->GetValueNumber() : ZeroValueNumber,
  13339. headSegmentLengthValue->GetValueNumber(),
  13340. boundCheck,
  13341. currentBlock)) >= 0;
  13342. Assert(added || failedToUpdateCompatibleUpperBoundCheck);
  13343. }
  13344. }
  13345. // Update the index and head segment length values to reflect the bound check
  13346. ValueInfo *newValueInfo =
  13347. UpdateIntBoundsForLessThan(
  13348. indexValue,
  13349. indexConstantBounds,
  13350. headSegmentLengthValue,
  13351. headSegmentLengthConstantBounds,
  13352. false);
  13353. if(newValueInfo)
  13354. {
  13355. ChangeValueInfo(nullptr, indexValue, newValueInfo);
  13356. AssertVerify(newValueInfo->TryGetIntConstantBounds(&indexConstantBounds));
  13357. }
  13358. newValueInfo =
  13359. UpdateIntBoundsForGreaterThan(
  13360. headSegmentLengthValue,
  13361. headSegmentLengthConstantBounds,
  13362. indexValue,
  13363. indexConstantBounds,
  13364. false);
  13365. if(newValueInfo)
  13366. {
  13367. ChangeValueInfo(nullptr, headSegmentLengthValue, newValueInfo);
  13368. }
  13369. }
  13370. }
  13371. if(doHeadSegmentLoad && !isLikelyJsArray)
  13372. {
  13373. // For typed arrays, load the length first, followed by the bound checks, and then load the head segment. This
  13374. // allows the length sym to become dead by the time of the head segment load, freeing up the register for use by the
  13375. // head segment sym.
  13376. InsertHeadSegmentLoad();
  13377. }
  13378. if(doArrayChecks || doHeadSegmentLoad || doHeadSegmentLengthLoad || doLengthLoad)
  13379. {
  13380. UpdateValue(newHeadSegmentSym, newHeadSegmentLengthSym, newLengthSym);
  13381. baseValueInfo = baseValue->GetValueInfo();
  13382. baseArrayValueInfo = baseValueInfo->IsArrayValueInfo() ? baseValueInfo->AsArrayValueInfo() : nullptr;
  13383. // Iterate up to the root loop's landing pad until all necessary value info is updated
  13384. uint hoistItemCount =
  13385. static_cast<uint>(!!hoistChecksOutOfLoop) +
  13386. !!hoistHeadSegmentLoadOutOfLoop +
  13387. !!hoistHeadSegmentLengthLoadOutOfLoop +
  13388. !!hoistLengthLoadOutOfLoop;
  13389. if(hoistItemCount != 0)
  13390. {
  13391. Loop *rootLoop = nullptr;
  13392. for(Loop *loop = currentBlock->loop; loop; loop = loop->parent)
  13393. {
  13394. rootLoop = loop;
  13395. }
  13396. Assert(rootLoop);
  13397. ValueInfo *valueInfoToHoist = baseValueInfo;
  13398. bool removeHeadSegment, removeHeadSegmentLength, removeLength;
  13399. if(baseArrayValueInfo)
  13400. {
  13401. removeHeadSegment = baseArrayValueInfo->HeadSegmentSym() && !hoistHeadSegmentLoadOutOfLoop;
  13402. removeHeadSegmentLength =
  13403. baseArrayValueInfo->HeadSegmentLengthSym() && !hoistHeadSegmentLengthLoadOutOfLoop;
  13404. removeLength = baseArrayValueInfo->LengthSym() && !hoistLengthLoadOutOfLoop;
  13405. }
  13406. else
  13407. {
  13408. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  13409. }
  13410. for(InvariantBlockBackwardIterator it(
  13411. this,
  13412. currentBlock,
  13413. rootLoop->landingPad,
  13414. baseOpnd->m_sym,
  13415. baseValue->GetValueNumber());
  13416. it.IsValid();
  13417. it.MoveNext())
  13418. {
  13419. if(removeHeadSegment || removeHeadSegmentLength || removeLength)
  13420. {
  13421. // Remove information that shouldn't be there anymore, from the value info
  13422. valueInfoToHoist =
  13423. valueInfoToHoist->AsArrayValueInfo()->Copy(
  13424. alloc,
  13425. !removeHeadSegment,
  13426. !removeHeadSegmentLength,
  13427. !removeLength);
  13428. removeLength = removeHeadSegmentLength = removeHeadSegment = false;
  13429. }
  13430. BasicBlock *const block = it.Block();
  13431. Value *const blockBaseValue = it.InvariantSymValue();
  13432. HoistInvariantValueInfo(valueInfoToHoist, blockBaseValue, block);
  13433. // See if we have completed hoisting value info for one of the items
  13434. if(hoistChecksOutOfLoop && block == hoistChecksOutOfLoop->landingPad)
  13435. {
  13436. // All other items depend on array checks, so we can just stop here
  13437. hoistChecksOutOfLoop = nullptr;
  13438. break;
  13439. }
  13440. if(hoistHeadSegmentLoadOutOfLoop && block == hoistHeadSegmentLoadOutOfLoop->landingPad)
  13441. {
  13442. hoistHeadSegmentLoadOutOfLoop = nullptr;
  13443. if(--hoistItemCount == 0)
  13444. break;
  13445. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentSym())
  13446. removeHeadSegment = true;
  13447. }
  13448. if(hoistHeadSegmentLengthLoadOutOfLoop && block == hoistHeadSegmentLengthLoadOutOfLoop->landingPad)
  13449. {
  13450. hoistHeadSegmentLengthLoadOutOfLoop = nullptr;
  13451. if(--hoistItemCount == 0)
  13452. break;
  13453. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->HeadSegmentLengthSym())
  13454. removeHeadSegmentLength = true;
  13455. }
  13456. if(hoistLengthLoadOutOfLoop && block == hoistLengthLoadOutOfLoop->landingPad)
  13457. {
  13458. hoistLengthLoadOutOfLoop = nullptr;
  13459. if(--hoistItemCount == 0)
  13460. break;
  13461. if(valueInfoToHoist->IsArrayValueInfo() && valueInfoToHoist->AsArrayValueInfo()->LengthSym())
  13462. removeLength = true;
  13463. }
  13464. }
  13465. }
  13466. }
  13467. }
  13468. IR::ArrayRegOpnd *baseArrayOpnd;
  13469. if(baseArrayValueInfo)
  13470. {
  13471. // Update the opnd to include the associated syms
  13472. baseArrayOpnd =
  13473. baseArrayValueInfo->CreateOpnd(
  13474. baseOpnd,
  13475. needsHeadSegment,
  13476. needsHeadSegmentLength || (!isLikelyJsArray && needsLength),
  13477. needsLength,
  13478. eliminatedLowerBoundCheck,
  13479. eliminatedUpperBoundCheck,
  13480. instr->m_func);
  13481. if(baseOwnerInstr)
  13482. {
  13483. Assert(baseOwnerInstr->GetSrc1() == baseOpnd);
  13484. baseOwnerInstr->ReplaceSrc1(baseArrayOpnd);
  13485. }
  13486. else
  13487. {
  13488. Assert(baseOwnerIndir);
  13489. Assert(baseOwnerIndir->GetBaseOpnd() == baseOpnd);
  13490. baseOwnerIndir->ReplaceBaseOpnd(baseArrayOpnd);
  13491. }
  13492. baseOpnd = baseArrayOpnd;
  13493. }
  13494. else
  13495. {
  13496. baseArrayOpnd = nullptr;
  13497. }
  13498. if(isLikelyJsArray)
  13499. {
  13500. // Insert an instruction to indicate to the dead-store pass that implicit calls need to be kept disabled until this
  13501. // instruction. Operations other than LdElem and StElem don't benefit much from arrays having no missing values, so
  13502. // no need to ensure that the array still has no missing values. For a particular array, if none of the accesses
  13503. // benefit much from the no-missing-values information, it may be beneficial to avoid checking for no missing
  13504. // values, especially in the case for a single array access, where the cost of the check could be relatively
  13505. // significant. An StElem has to do additional checks in the common path if the array may have missing values, and
  13506. // a StElem that operates on an array that has no missing values is more likely to keep the no-missing-values info
  13507. // on the array more precise, so it still benefits a little from the no-missing-values info.
  13508. CaptureNoImplicitCallUses(baseOpnd, isLoad || isStore);
  13509. }
  13510. else if(baseArrayOpnd && baseArrayOpnd->HeadSegmentLengthSym())
  13511. {
  13512. // A typed array's array buffer may be transferred to a web worker as part of an implicit call, in which case the typed
  13513. // array's length is set to zero. Insert an instruction to indicate to the dead-store pass that implicit calls need to
  13514. // be disabled until this instruction.
  13515. IR::RegOpnd *const headSegmentLengthOpnd =
  13516. IR::RegOpnd::New(
  13517. baseArrayOpnd->HeadSegmentLengthSym(),
  13518. baseArrayOpnd->HeadSegmentLengthSym()->GetType(),
  13519. instr->m_func);
  13520. const IR::AutoReuseOpnd autoReuseHeadSegmentLengthOpnd(headSegmentLengthOpnd, instr->m_func);
  13521. CaptureNoImplicitCallUses(headSegmentLengthOpnd, false);
  13522. }
  13523. const auto OnEliminated = [&](const Js::Phase phase, const char *const eliminatedLoad)
  13524. {
  13525. TRACE_TESTTRACE_PHASE_INSTR(phase, instr, _u("Eliminating array %S\n"), eliminatedLoad);
  13526. };
  13527. OnEliminated(Js::Phase::ArrayCheckHoistPhase, "checks");
  13528. if(baseArrayOpnd)
  13529. {
  13530. if(baseArrayOpnd->HeadSegmentSym())
  13531. {
  13532. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment load");
  13533. }
  13534. if(baseArrayOpnd->HeadSegmentLengthSym())
  13535. {
  13536. OnEliminated(Js::Phase::ArraySegmentHoistPhase, "head segment length load");
  13537. }
  13538. if(baseArrayOpnd->LengthSym())
  13539. {
  13540. OnEliminated(Js::Phase::ArrayLengthHoistPhase, "length load");
  13541. }
  13542. if(baseArrayOpnd->EliminatedLowerBoundCheck())
  13543. {
  13544. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "lower bound check");
  13545. }
  13546. if(baseArrayOpnd->EliminatedUpperBoundCheck())
  13547. {
  13548. OnEliminated(Js::Phase::BoundCheckEliminationPhase, "upper bound check");
  13549. }
  13550. }
  13551. if(!canBailOutOnArrayAccessHelperCall)
  13552. {
  13553. return;
  13554. }
  13555. // Bail out instead of generating a helper call. This helps to remove the array reference when the head segment and head
  13556. // segment length are available, reduces code size, and allows bound checks to be separated.
  13557. if(instr->HasBailOutInfo())
  13558. {
  13559. const IR::BailOutKind bailOutKind = instr->GetBailOutKind();
  13560. Assert(
  13561. !(bailOutKind & ~IR::BailOutKindBits) ||
  13562. (bailOutKind & ~IR::BailOutKindBits) == IR::BailOutOnImplicitCallsPreOp);
  13563. instr->SetBailOutKind(bailOutKind & IR::BailOutKindBits | IR::BailOutOnArrayAccessHelperCall);
  13564. }
  13565. else
  13566. {
  13567. GenerateBailAtOperation(&instr, IR::BailOutOnArrayAccessHelperCall);
  13568. }
  13569. }
  13570. void
  13571. GlobOpt::CaptureNoImplicitCallUses(
  13572. IR::Opnd *opnd,
  13573. const bool usesNoMissingValuesInfo,
  13574. IR::Instr *const includeCurrentInstr)
  13575. {
  13576. Assert(!IsLoopPrePass());
  13577. Assert(noImplicitCallUsesToInsert);
  13578. Assert(opnd);
  13579. // The opnd may be deleted later, so make a copy to ensure it is alive for inserting NoImplicitCallUses later
  13580. opnd = opnd->Copy(func);
  13581. if(!usesNoMissingValuesInfo)
  13582. {
  13583. const ValueType valueType(opnd->GetValueType());
  13584. if(valueType.IsArrayOrObjectWithArray() && valueType.HasNoMissingValues())
  13585. {
  13586. // Inserting NoImplicitCallUses for an opnd with a definitely-array-with-no-missing-values value type means that the
  13587. // instruction following it uses the information that the array has no missing values in some way, for instance, it
  13588. // may omit missing value checks. Based on that, the dead-store phase in turn ensures that the necessary bailouts
  13589. // are inserted to ensure that the array still has no missing values until the following instruction. Since
  13590. // 'usesNoMissingValuesInfo' is false, change the value type to indicate to the dead-store phase that the following
  13591. // instruction does not use the no-missing-values information.
  13592. opnd->SetValueType(valueType.SetHasNoMissingValues(false));
  13593. }
  13594. }
  13595. if(includeCurrentInstr)
  13596. {
  13597. IR::Instr *const noImplicitCallUses =
  13598. IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, includeCurrentInstr->m_func);
  13599. noImplicitCallUses->SetSrc1(opnd);
  13600. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  13601. includeCurrentInstr->InsertAfter(noImplicitCallUses);
  13602. return;
  13603. }
  13604. noImplicitCallUsesToInsert->Add(opnd);
  13605. }
  13606. void
  13607. GlobOpt::InsertNoImplicitCallUses(IR::Instr *const instr)
  13608. {
  13609. Assert(noImplicitCallUsesToInsert);
  13610. const int n = noImplicitCallUsesToInsert->Count();
  13611. if(n == 0)
  13612. {
  13613. return;
  13614. }
  13615. IR::Instr *const insertBeforeInstr = instr->GetInsertBeforeByteCodeUsesInstr();
  13616. for(int i = 0; i < n;)
  13617. {
  13618. IR::Instr *const noImplicitCallUses = IR::PragmaInstr::New(Js::OpCode::NoImplicitCallUses, 0, instr->m_func);
  13619. noImplicitCallUses->SetSrc1(noImplicitCallUsesToInsert->Item(i));
  13620. noImplicitCallUses->GetSrc1()->SetIsJITOptimizedReg(true);
  13621. ++i;
  13622. if(i < n)
  13623. {
  13624. noImplicitCallUses->SetSrc2(noImplicitCallUsesToInsert->Item(i));
  13625. noImplicitCallUses->GetSrc2()->SetIsJITOptimizedReg(true);
  13626. ++i;
  13627. }
  13628. noImplicitCallUses->SetByteCodeOffset(instr);
  13629. insertBeforeInstr->InsertBefore(noImplicitCallUses);
  13630. }
  13631. noImplicitCallUsesToInsert->Clear();
  13632. }
  13633. void
  13634. GlobOpt::PrepareLoopArrayCheckHoist()
  13635. {
  13636. if(IsLoopPrePass() || !currentBlock->loop || !currentBlock->isLoopHeader || !currentBlock->loop->parent)
  13637. {
  13638. return;
  13639. }
  13640. if(currentBlock->loop->parent->needImplicitCallBailoutChecksForJsArrayCheckHoist)
  13641. {
  13642. // If the parent loop is an array check elimination candidate, so is the current loop. Even though the current loop may
  13643. // not have array accesses, if the parent loop hoists array checks, the current loop also needs implicit call checks.
  13644. currentBlock->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist = true;
  13645. }
  13646. }
  13647. JsArrayKills
  13648. GlobOpt::CheckJsArrayKills(IR::Instr *const instr)
  13649. {
  13650. Assert(instr);
  13651. JsArrayKills kills;
  13652. if(instr->UsesAllFields())
  13653. {
  13654. // Calls can (but are unlikely to) change a javascript array into an ES5 array, which may have different behavior for
  13655. // index properties.
  13656. kills.SetKillsAllArrays();
  13657. return kills;
  13658. }
  13659. const bool doArrayMissingValueCheckHoist = DoArrayMissingValueCheckHoist();
  13660. const bool doNativeArrayTypeSpec = DoNativeArrayTypeSpec();
  13661. const bool doArraySegmentHoist = DoArraySegmentHoist(ValueType::GetObject(ObjectType::Array));
  13662. Assert(doArraySegmentHoist == DoArraySegmentHoist(ValueType::GetObject(ObjectType::ObjectWithArray)));
  13663. const bool doArrayLengthHoist = DoArrayLengthHoist();
  13664. if(!doArrayMissingValueCheckHoist && !doNativeArrayTypeSpec && !doArraySegmentHoist && !doArrayLengthHoist)
  13665. {
  13666. return kills;
  13667. }
  13668. // The following operations may create missing values in an array in an unlikely circumstance. Even though they don't kill
  13669. // the fact that the 'this' parameter is an array (when implicit calls are disabled), we don't have a way to say the value
  13670. // type is definitely array but it likely has no missing values. So, these will kill the definite value type as well, making
  13671. // it likely array, such that the array checks will have to be redone.
  13672. const bool useValueTypes = !IsLoopPrePass(); // Source value types are not guaranteed to be correct in a loop prepass
  13673. switch(instr->m_opcode)
  13674. {
  13675. case Js::OpCode::StElemI_A:
  13676. case Js::OpCode::StElemI_A_Strict:
  13677. {
  13678. Assert(instr->GetDst());
  13679. if(!instr->GetDst()->IsIndirOpnd())
  13680. {
  13681. break;
  13682. }
  13683. const ValueType baseValueType =
  13684. useValueTypes ? instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType() : ValueType::Uninitialized;
  13685. if(useValueTypes && baseValueType.IsNotArrayOrObjectWithArray())
  13686. {
  13687. break;
  13688. }
  13689. if(instr->IsProfiledInstr())
  13690. {
  13691. const Js::StElemInfo *const stElemInfo = instr->AsProfiledInstr()->u.stElemInfo;
  13692. if(doArraySegmentHoist && stElemInfo->LikelyStoresOutsideHeadSegmentBounds())
  13693. {
  13694. kills.SetKillsArrayHeadSegments();
  13695. kills.SetKillsArrayHeadSegmentLengths();
  13696. }
  13697. if(doArrayLengthHoist &&
  13698. !(useValueTypes && baseValueType.IsNotArray()) &&
  13699. stElemInfo->LikelyStoresOutsideArrayBounds())
  13700. {
  13701. kills.SetKillsArrayLengths();
  13702. }
  13703. }
  13704. break;
  13705. }
  13706. case Js::OpCode::DeleteElemI_A:
  13707. case Js::OpCode::DeleteElemIStrict_A:
  13708. Assert(instr->GetSrc1());
  13709. if(!instr->GetSrc1()->IsIndirOpnd() ||
  13710. (useValueTypes && instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType().IsNotArrayOrObjectWithArray()))
  13711. {
  13712. break;
  13713. }
  13714. if(doArrayMissingValueCheckHoist)
  13715. {
  13716. kills.SetKillsArraysWithNoMissingValues();
  13717. }
  13718. if(doArraySegmentHoist)
  13719. {
  13720. kills.SetKillsArrayHeadSegmentLengths();
  13721. }
  13722. break;
  13723. case Js::OpCode::StFld:
  13724. case Js::OpCode::StFldStrict:
  13725. {
  13726. Assert(instr->GetDst());
  13727. if(!doArraySegmentHoist && !doArrayLengthHoist)
  13728. {
  13729. break;
  13730. }
  13731. IR::SymOpnd *const symDst = instr->GetDst()->AsSymOpnd();
  13732. if(!symDst->IsPropertySymOpnd())
  13733. {
  13734. break;
  13735. }
  13736. IR::PropertySymOpnd *const dst = symDst->AsPropertySymOpnd();
  13737. if(dst->m_sym->AsPropertySym()->m_propertyId != Js::PropertyIds::length)
  13738. {
  13739. break;
  13740. }
  13741. if(useValueTypes && dst->GetPropertyOwnerValueType().IsNotArray())
  13742. {
  13743. // Setting the 'length' property of an object that is not an array, even if it has an internal array, does
  13744. // not kill the head segment or head segment length of any arrays.
  13745. break;
  13746. }
  13747. if(doArraySegmentHoist)
  13748. {
  13749. kills.SetKillsArrayHeadSegmentLengths();
  13750. }
  13751. if(doArrayLengthHoist)
  13752. {
  13753. kills.SetKillsArrayLengths();
  13754. }
  13755. break;
  13756. }
  13757. case Js::OpCode::InlineArrayPush:
  13758. {
  13759. Assert(instr->GetSrc2());
  13760. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  13761. Assert(arrayOpnd);
  13762. const ValueType arrayValueType(arrayOpnd->GetValueType());
  13763. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  13764. {
  13765. break;
  13766. }
  13767. if(doArrayMissingValueCheckHoist)
  13768. {
  13769. kills.SetKillsArraysWithNoMissingValues();
  13770. }
  13771. if(doArraySegmentHoist)
  13772. {
  13773. kills.SetKillsArrayHeadSegments();
  13774. kills.SetKillsArrayHeadSegmentLengths();
  13775. }
  13776. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  13777. {
  13778. kills.SetKillsArrayLengths();
  13779. }
  13780. // Don't kill NativeArray, if there is no mismatch between array's type and element's type.
  13781. if(doNativeArrayTypeSpec &&
  13782. !(useValueTypes && arrayValueType.IsNativeArray() &&
  13783. ((arrayValueType.IsLikelyNativeIntArray() && instr->GetSrc2()->IsInt32()) ||
  13784. (arrayValueType.IsLikelyNativeFloatArray() && instr->GetSrc2()->IsFloat()))
  13785. ) &&
  13786. !(useValueTypes && arrayValueType.IsNotNativeArray()))
  13787. {
  13788. kills.SetKillsNativeArrays();
  13789. }
  13790. break;
  13791. }
  13792. case Js::OpCode::InlineArrayPop:
  13793. {
  13794. IR::Opnd *const arrayOpnd = instr->GetSrc1();
  13795. Assert(arrayOpnd);
  13796. const ValueType arrayValueType(arrayOpnd->GetValueType());
  13797. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  13798. {
  13799. break;
  13800. }
  13801. if(doArraySegmentHoist)
  13802. {
  13803. kills.SetKillsArrayHeadSegmentLengths();
  13804. }
  13805. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  13806. {
  13807. kills.SetKillsArrayLengths();
  13808. }
  13809. break;
  13810. }
  13811. case Js::OpCode::CallDirect:
  13812. {
  13813. Assert(instr->GetSrc1());
  13814. // Find the 'this' parameter and check if it's possible for it to be an array
  13815. IR::Opnd *const arrayOpnd = instr->FindCallArgumentOpnd(1);
  13816. Assert(arrayOpnd);
  13817. const ValueType arrayValueType(arrayOpnd->GetValueType());
  13818. if(!arrayOpnd->IsRegOpnd() || (useValueTypes && arrayValueType.IsNotArrayOrObjectWithArray()))
  13819. {
  13820. break;
  13821. }
  13822. const IR::JnHelperMethod helperMethod = instr->GetSrc1()->AsHelperCallOpnd()->m_fnHelper;
  13823. if(doArrayMissingValueCheckHoist)
  13824. {
  13825. switch(helperMethod)
  13826. {
  13827. case IR::HelperArray_Reverse:
  13828. case IR::HelperArray_Shift:
  13829. case IR::HelperArray_Splice:
  13830. case IR::HelperArray_Unshift:
  13831. kills.SetKillsArraysWithNoMissingValues();
  13832. break;
  13833. }
  13834. }
  13835. if(doArraySegmentHoist)
  13836. {
  13837. switch(helperMethod)
  13838. {
  13839. case IR::HelperArray_Reverse:
  13840. case IR::HelperArray_Shift:
  13841. case IR::HelperArray_Splice:
  13842. case IR::HelperArray_Unshift:
  13843. kills.SetKillsArrayHeadSegments();
  13844. kills.SetKillsArrayHeadSegmentLengths();
  13845. break;
  13846. }
  13847. }
  13848. if(doArrayLengthHoist && !(useValueTypes && arrayValueType.IsNotArray()))
  13849. {
  13850. switch(helperMethod)
  13851. {
  13852. case IR::HelperArray_Shift:
  13853. case IR::HelperArray_Splice:
  13854. case IR::HelperArray_Unshift:
  13855. kills.SetKillsArrayLengths();
  13856. break;
  13857. }
  13858. }
  13859. if(doNativeArrayTypeSpec && !(useValueTypes && arrayValueType.IsNotNativeArray()))
  13860. {
  13861. switch(helperMethod)
  13862. {
  13863. case IR::HelperArray_Reverse:
  13864. case IR::HelperArray_Shift:
  13865. case IR::HelperArray_Slice:
  13866. // Currently not inlined.
  13867. //case IR::HelperArray_Sort:
  13868. case IR::HelperArray_Splice:
  13869. case IR::HelperArray_Unshift:
  13870. kills.SetKillsNativeArrays();
  13871. break;
  13872. }
  13873. }
  13874. break;
  13875. }
  13876. }
  13877. return kills;
  13878. }
  13879. GlobOptBlockData const * GlobOpt::CurrentBlockData() const
  13880. {
  13881. return &this->currentBlock->globOptData;
  13882. }
  13883. GlobOptBlockData * GlobOpt::CurrentBlockData()
  13884. {
  13885. return &this->currentBlock->globOptData;
  13886. }
  13887. bool
  13888. GlobOpt::IsOperationThatLikelyKillsJsArraysWithNoMissingValues(IR::Instr *const instr)
  13889. {
  13890. // StElem is profiled with information indicating whether it will likely create a missing value in the array. In that case,
  13891. // we prefer to kill the no-missing-values information in the value so that we don't bail out in a likely circumstance.
  13892. return
  13893. (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict) &&
  13894. DoArrayMissingValueCheckHoist() &&
  13895. instr->IsProfiledInstr() &&
  13896. instr->AsProfiledInstr()->u.stElemInfo->LikelyCreatesMissingValue();
  13897. }
  13898. bool
  13899. GlobOpt::NeedBailOnImplicitCallForArrayCheckHoist(BasicBlock const * const block, const bool isForwardPass) const
  13900. {
  13901. Assert(block);
  13902. return isForwardPass && block->loop && block->loop->needImplicitCallBailoutChecksForJsArrayCheckHoist;
  13903. }
  13904. bool
  13905. GlobOpt::PrepareForIgnoringIntOverflow(IR::Instr *const instr)
  13906. {
  13907. Assert(instr);
  13908. const bool isBoundary = instr->m_opcode == Js::OpCode::NoIntOverflowBoundary;
  13909. // Update the instruction's "int overflow matters" flag based on whether we are currently allowing ignoring int overflows.
  13910. // Some operations convert their srcs to int32s, those can still ignore int overflow.
  13911. if(instr->ignoreIntOverflowInRange)
  13912. {
  13913. instr->ignoreIntOverflowInRange = !intOverflowCurrentlyMattersInRange || OpCodeAttr::IsInt32(instr->m_opcode);
  13914. }
  13915. if(!intOverflowDoesNotMatterRange)
  13916. {
  13917. Assert(intOverflowCurrentlyMattersInRange);
  13918. // There are no more ranges of instructions where int overflow does not matter, in this block.
  13919. return isBoundary;
  13920. }
  13921. if(instr == intOverflowDoesNotMatterRange->LastInstr())
  13922. {
  13923. Assert(isBoundary);
  13924. // Reached the last instruction in the range
  13925. intOverflowCurrentlyMattersInRange = true;
  13926. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  13927. return isBoundary;
  13928. }
  13929. if(!intOverflowCurrentlyMattersInRange)
  13930. {
  13931. return isBoundary;
  13932. }
  13933. if(instr != intOverflowDoesNotMatterRange->FirstInstr())
  13934. {
  13935. // Have not reached the next range
  13936. return isBoundary;
  13937. }
  13938. Assert(isBoundary);
  13939. // This is the first instruction in a range of instructions where int overflow does not matter. There can be many inputs to
  13940. // instructions in the range, some of which are inputs to the range itself (that is, the values are not defined in the
  13941. // range). Ignoring int overflow is only valid for int operations, so we need to ensure that all inputs to the range are
  13942. // int (not "likely int") before ignoring any overflows in the range. Ensuring that a sym with a "likely int" value is an
  13943. // int requires a bail-out. These bail-out check need to happen before any overflows are ignored, otherwise it's too late.
  13944. // The backward pass tracked all inputs into the range. Iterate over them and verify the values, and insert lossless
  13945. // conversions to int as necessary, before the first instruction in the range. If for any reason all values cannot be
  13946. // guaranteed to be ints, the optimization will be disabled for this range.
  13947. intOverflowCurrentlyMattersInRange = false;
  13948. {
  13949. BVSparse<JitArenaAllocator> tempBv1(tempAlloc);
  13950. BVSparse<JitArenaAllocator> tempBv2(tempAlloc);
  13951. {
  13952. // Just renaming the temp BVs for this section to indicate how they're used so that it makes sense
  13953. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  13954. BVSparse<JitArenaAllocator> &symsToInclude = tempBv2;
  13955. #if DBG_DUMP
  13956. SymID couldNotConvertSymId = 0;
  13957. #endif
  13958. FOREACH_BITSET_IN_SPARSEBV(id, intOverflowDoesNotMatterRange->SymsRequiredToBeInt())
  13959. {
  13960. Sym *const sym = func->m_symTable->Find(id);
  13961. Assert(sym);
  13962. // Some instructions with property syms are also tracked by the backward pass, and may be included in the range
  13963. // (LdSlot for instance). These property syms don't get their values until either copy-prop resolves a value for
  13964. // them, or a new value is created once the use of the property sym is reached. In either case, we're not that
  13965. // far yet, so we need to find the future value of the property sym by evaluating copy-prop in reverse.
  13966. Value *const value = sym->IsStackSym() ? CurrentBlockData()->FindValue(sym) : CurrentBlockData()->FindFuturePropertyValue(sym->AsPropertySym());
  13967. if(!value)
  13968. {
  13969. #if DBG_DUMP
  13970. couldNotConvertSymId = id;
  13971. #endif
  13972. intOverflowCurrentlyMattersInRange = true;
  13973. BREAK_BITSET_IN_SPARSEBV;
  13974. }
  13975. const bool isInt32OrUInt32Float =
  13976. value->GetValueInfo()->IsFloatConstant() &&
  13977. Js::JavascriptNumber::IsInt32OrUInt32(value->GetValueInfo()->AsFloatConstant()->FloatValue());
  13978. if(value->GetValueInfo()->IsInt() || isInt32OrUInt32Float)
  13979. {
  13980. if(!IsLoopPrePass())
  13981. {
  13982. // Input values that are already int can be excluded from int-specialization. We can treat unsigned
  13983. // int32 values as int32 values (ignoring the overflow), since the values will only be used inside the
  13984. // range where overflow does not matter.
  13985. symsToExclude.Set(sym->m_id);
  13986. }
  13987. continue;
  13988. }
  13989. if(!DoAggressiveIntTypeSpec() || !value->GetValueInfo()->IsLikelyInt())
  13990. {
  13991. // When aggressive int specialization is off, syms with "likely int" values cannot be forced to int since
  13992. // int bail-out checks are not allowed in that mode. Similarly, with aggressive int specialization on, it
  13993. // wouldn't make sense to force non-"likely int" values to int since it would almost guarantee a bail-out at
  13994. // runtime. In both cases, just disable ignoring overflow for this range.
  13995. #if DBG_DUMP
  13996. couldNotConvertSymId = id;
  13997. #endif
  13998. intOverflowCurrentlyMattersInRange = true;
  13999. BREAK_BITSET_IN_SPARSEBV;
  14000. }
  14001. if(IsLoopPrePass())
  14002. {
  14003. // The loop prepass does not modify bit-vectors. Since it doesn't add bail-out checks, it also does not need
  14004. // to specialize anything up-front. It only needs to be consistent in how it determines whether to allow
  14005. // ignoring overflow for a range, based on the values of inputs into the range.
  14006. continue;
  14007. }
  14008. // Since input syms are tracked in the backward pass, where there is no value tracking, it will not be aware of
  14009. // copy-prop. If a copy-prop sym is available, it will be used instead, so exclude the original sym and include
  14010. // the copy-prop sym for specialization.
  14011. StackSym *const copyPropSym = CurrentBlockData()->GetCopyPropSym(sym, value);
  14012. if(copyPropSym)
  14013. {
  14014. symsToExclude.Set(sym->m_id);
  14015. Assert(!symsToExclude.Test(copyPropSym->m_id));
  14016. const bool needsToBeLossless =
  14017. !intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Test(sym->m_id);
  14018. if(intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Test(copyPropSym->m_id) ||
  14019. symsToInclude.TestAndSet(copyPropSym->m_id))
  14020. {
  14021. // The copy-prop sym is already included
  14022. if(needsToBeLossless)
  14023. {
  14024. // The original sym needs to be lossless, so make the copy-prop sym lossless as well.
  14025. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Clear(copyPropSym->m_id);
  14026. }
  14027. }
  14028. else if(!needsToBeLossless)
  14029. {
  14030. // The copy-prop sym was not included before, and the original sym can be lossy, so make it lossy.
  14031. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Set(copyPropSym->m_id);
  14032. }
  14033. }
  14034. else if(!sym->IsStackSym())
  14035. {
  14036. // Only stack syms can be converted to int, and copy-prop syms are stack syms. If a copy-prop sym was not
  14037. // found for the property sym, we can't ignore overflows in this range.
  14038. #if DBG_DUMP
  14039. couldNotConvertSymId = id;
  14040. #endif
  14041. intOverflowCurrentlyMattersInRange = true;
  14042. BREAK_BITSET_IN_SPARSEBV;
  14043. }
  14044. } NEXT_BITSET_IN_SPARSEBV;
  14045. if(intOverflowCurrentlyMattersInRange)
  14046. {
  14047. #if DBG_DUMP
  14048. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func) && !IsLoopPrePass())
  14049. {
  14050. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14051. Output::Print(
  14052. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u, Disabled ignoring overflows\n"),
  14053. func->GetJITFunctionBody()->GetDisplayName(),
  14054. func->GetDebugNumberSet(debugStringBuffer),
  14055. Js::PhaseNames[Js::ForwardPhase],
  14056. currentBlock->GetBlockNum());
  14057. Output::Print(_u(" Input sym could not be turned into an int: %u\n"), couldNotConvertSymId);
  14058. Output::Print(_u(" First instr: "));
  14059. instr->m_next->Dump();
  14060. Output::Flush();
  14061. }
  14062. #endif
  14063. intOverflowDoesNotMatterRange = intOverflowDoesNotMatterRange->Next();
  14064. return isBoundary;
  14065. }
  14066. if(IsLoopPrePass())
  14067. {
  14068. return isBoundary;
  14069. }
  14070. // Update the syms to specialize after enumeration
  14071. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  14072. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  14073. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Or(&symsToInclude);
  14074. }
  14075. {
  14076. // Exclude syms that are already live as lossless int32, and exclude lossy conversions of syms that are already live
  14077. // as lossy int32.
  14078. // symsToExclude = liveInt32Syms - liveLossyInt32Syms // syms live as lossless int
  14079. // lossySymsToExclude = symsRequiredToBeLossyInt & liveLossyInt32Syms; // syms we want as lossy int that are already live as lossy int
  14080. // symsToExclude |= lossySymsToExclude
  14081. // symsRequiredToBeInt -= symsToExclude
  14082. // symsRequiredToBeLossyInt -= symsToExclude
  14083. BVSparse<JitArenaAllocator> &symsToExclude = tempBv1;
  14084. BVSparse<JitArenaAllocator> &lossySymsToExclude = tempBv2;
  14085. symsToExclude.Minus(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveLossyInt32Syms);
  14086. lossySymsToExclude.And(
  14087. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(),
  14088. CurrentBlockData()->liveLossyInt32Syms);
  14089. symsToExclude.Or(&lossySymsToExclude);
  14090. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(&symsToExclude);
  14091. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Minus(&symsToExclude);
  14092. }
  14093. #if DBG
  14094. {
  14095. // Verify that the syms to be converted are live
  14096. // liveSyms = liveInt32Syms | liveFloat64Syms | liveVarSyms
  14097. // deadSymsRequiredToBeInt = symsRequiredToBeInt - liveSyms
  14098. BVSparse<JitArenaAllocator> &liveSyms = tempBv1;
  14099. BVSparse<JitArenaAllocator> &deadSymsRequiredToBeInt = tempBv2;
  14100. liveSyms.Or(CurrentBlockData()->liveInt32Syms, CurrentBlockData()->liveFloat64Syms);
  14101. liveSyms.Or(CurrentBlockData()->liveVarSyms);
  14102. deadSymsRequiredToBeInt.Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), &liveSyms);
  14103. Assert(deadSymsRequiredToBeInt.IsEmpty());
  14104. }
  14105. #endif
  14106. }
  14107. // Int-specialize the syms before the first instruction of the range (the current instruction)
  14108. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Minus(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt());
  14109. #if DBG_DUMP
  14110. if(PHASE_TRACE(Js::TrackCompoundedIntOverflowPhase, func))
  14111. {
  14112. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14113. Output::Print(
  14114. _u("TrackCompoundedIntOverflow - Top function: %s (%s), Phase: %s, Block: %u\n"),
  14115. func->GetJITFunctionBody()->GetDisplayName(),
  14116. func->GetDebugNumberSet(debugStringBuffer),
  14117. Js::PhaseNames[Js::ForwardPhase],
  14118. currentBlock->GetBlockNum());
  14119. Output::Print(_u(" Input syms to be int-specialized (lossless): "));
  14120. intOverflowDoesNotMatterRange->SymsRequiredToBeInt()->Dump();
  14121. Output::Print(_u(" Input syms to be converted to int (lossy): "));
  14122. intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt()->Dump();
  14123. Output::Print(_u(" First instr: "));
  14124. instr->m_next->Dump();
  14125. Output::Flush();
  14126. }
  14127. #endif
  14128. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeInt(), currentBlock, false /* lossy */, instr);
  14129. ToInt32(intOverflowDoesNotMatterRange->SymsRequiredToBeLossyInt(), currentBlock, true /* lossy */, instr);
  14130. return isBoundary;
  14131. }
  14132. void
  14133. GlobOpt::VerifyIntSpecForIgnoringIntOverflow(IR::Instr *const instr)
  14134. {
  14135. if(intOverflowCurrentlyMattersInRange || IsLoopPrePass())
  14136. {
  14137. return;
  14138. }
  14139. Assert(instr->m_opcode != Js::OpCode::Mul_I4 ||
  14140. (instr->m_opcode == Js::OpCode::Mul_I4 && !instr->ShouldCheckFor32BitOverflow() && instr->ShouldCheckForNon32BitOverflow() ));
  14141. // Instructions that are marked as "overflow doesn't matter" in the range must guarantee that they operate on int values and
  14142. // result in int values, for ignoring overflow to be valid. So, int-specialization is required for such instructions in the
  14143. // range. Ld_A is an exception because it only specializes if the src sym is available as a required specialized sym, and it
  14144. // doesn't generate bailouts or cause ignoring int overflow to be invalid.
  14145. // MULs are allowed to start a region and have BailOutInfo since they will bailout on non-32 bit overflow.
  14146. if(instr->m_opcode == Js::OpCode::Ld_A ||
  14147. ((!instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::Mul_I4) &&
  14148. (!instr->GetDst() || instr->GetDst()->IsInt32()) &&
  14149. (!instr->GetSrc1() || instr->GetSrc1()->IsInt32()) &&
  14150. (!instr->GetSrc2() || instr->GetSrc2()->IsInt32())))
  14151. {
  14152. return;
  14153. }
  14154. if (!instr->HasBailOutInfo() && !instr->HasAnySideEffects())
  14155. {
  14156. return;
  14157. }
  14158. // This can happen for Neg_A if it needs to bail out on negative zero, and perhaps other cases as well. It's too late to fix
  14159. // the problem (overflows may already be ignored), so handle it by bailing out at compile-time and disabling tracking int
  14160. // overflow.
  14161. Assert(!func->IsTrackCompoundedIntOverflowDisabled());
  14162. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  14163. {
  14164. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  14165. Output::Print(
  14166. _u("BailOut (compile-time): function: %s (%s) instr: "),
  14167. func->GetJITFunctionBody()->GetDisplayName(),
  14168. func->GetDebugNumberSet(debugStringBuffer));
  14169. #if DBG_DUMP
  14170. instr->Dump();
  14171. #else
  14172. Output::Print(_u("%s "), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  14173. #endif
  14174. Output::Print(_u("(overflow does not matter but could not int-spec or needed bailout)\n"));
  14175. Output::Flush();
  14176. }
  14177. if(func->IsTrackCompoundedIntOverflowDisabled())
  14178. {
  14179. // Tracking int overflows is already off for some reason. Prevent trying to rejit again because it won't help and the
  14180. // same thing will happen again and cause an infinite loop. Just abort jitting this function.
  14181. if(PHASE_TRACE(Js::BailOutPhase, this->func))
  14182. {
  14183. Output::Print(_u(" Aborting JIT because TrackIntOverflow is already off\n"));
  14184. Output::Flush();
  14185. }
  14186. throw Js::OperationAbortedException();
  14187. }
  14188. throw Js::RejitException(RejitReason::TrackIntOverflowDisabled);
  14189. }
  14190. // It makes lowering easier if it can assume that the first src is never a constant,
  14191. // at least for commutative operators. For non-commutative, just hoist the constant.
  14192. void
  14193. GlobOpt::PreLowerCanonicalize(IR::Instr *instr, Value **pSrc1Val, Value **pSrc2Val)
  14194. {
  14195. IR::Opnd *dst = instr->GetDst();
  14196. IR::Opnd *src1 = instr->GetSrc1();
  14197. IR::Opnd *src2 = instr->GetSrc2();
  14198. if (src1->IsImmediateOpnd())
  14199. {
  14200. // Swap for dst, src
  14201. }
  14202. else if (src2 && dst && src2->IsRegOpnd())
  14203. {
  14204. if (src2->GetIsDead() && !src1->GetIsDead() && !src1->IsEqual(dst))
  14205. {
  14206. // Swap if src2 is dead, as the reg can be reuse for the dst for opEqs like on x86 (ADD r1, r2)
  14207. }
  14208. else if (src2->IsEqual(dst))
  14209. {
  14210. // Helps lowering of opEqs
  14211. }
  14212. else
  14213. {
  14214. return;
  14215. }
  14216. // Make sure we don't swap 2 srcs with valueOf calls.
  14217. if (OpCodeAttr::OpndHasImplicitCall(instr->m_opcode))
  14218. {
  14219. if (instr->IsBranchInstr())
  14220. {
  14221. if (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive())
  14222. {
  14223. return;
  14224. }
  14225. }
  14226. else if (!src1->GetValueType().IsPrimitive() && !src2->GetValueType().IsPrimitive())
  14227. {
  14228. return;
  14229. }
  14230. }
  14231. }
  14232. else
  14233. {
  14234. return;
  14235. }
  14236. Js::OpCode opcode = instr->m_opcode;
  14237. switch (opcode)
  14238. {
  14239. case Js::OpCode::And_A:
  14240. case Js::OpCode::Mul_A:
  14241. case Js::OpCode::Or_A:
  14242. case Js::OpCode::Xor_A:
  14243. case Js::OpCode::And_I4:
  14244. case Js::OpCode::Mul_I4:
  14245. case Js::OpCode::Or_I4:
  14246. case Js::OpCode::Xor_I4:
  14247. case Js::OpCode::Add_I4:
  14248. swap_srcs:
  14249. if (!instr->GetSrc2()->IsImmediateOpnd())
  14250. {
  14251. instr->m_opcode = opcode;
  14252. instr->SwapOpnds();
  14253. Value *tempVal = *pSrc1Val;
  14254. *pSrc1Val = *pSrc2Val;
  14255. *pSrc2Val = tempVal;
  14256. return;
  14257. }
  14258. break;
  14259. case Js::OpCode::BrSrEq_A:
  14260. case Js::OpCode::BrSrNotNeq_A:
  14261. case Js::OpCode::BrEq_I4:
  14262. goto swap_srcs;
  14263. case Js::OpCode::BrSrNeq_A:
  14264. case Js::OpCode::BrNeq_A:
  14265. case Js::OpCode::BrSrNotEq_A:
  14266. case Js::OpCode::BrNotEq_A:
  14267. case Js::OpCode::BrNeq_I4:
  14268. goto swap_srcs;
  14269. case Js::OpCode::BrGe_A:
  14270. opcode = Js::OpCode::BrLe_A;
  14271. goto swap_srcs;
  14272. case Js::OpCode::BrNotGe_A:
  14273. opcode = Js::OpCode::BrNotLe_A;
  14274. goto swap_srcs;
  14275. case Js::OpCode::BrGe_I4:
  14276. opcode = Js::OpCode::BrLe_I4;
  14277. goto swap_srcs;
  14278. case Js::OpCode::BrGt_A:
  14279. opcode = Js::OpCode::BrLt_A;
  14280. goto swap_srcs;
  14281. case Js::OpCode::BrNotGt_A:
  14282. opcode = Js::OpCode::BrNotLt_A;
  14283. goto swap_srcs;
  14284. case Js::OpCode::BrGt_I4:
  14285. opcode = Js::OpCode::BrLt_I4;
  14286. goto swap_srcs;
  14287. case Js::OpCode::BrLe_A:
  14288. opcode = Js::OpCode::BrGe_A;
  14289. goto swap_srcs;
  14290. case Js::OpCode::BrNotLe_A:
  14291. opcode = Js::OpCode::BrNotGe_A;
  14292. goto swap_srcs;
  14293. case Js::OpCode::BrLe_I4:
  14294. opcode = Js::OpCode::BrGe_I4;
  14295. goto swap_srcs;
  14296. case Js::OpCode::BrLt_A:
  14297. opcode = Js::OpCode::BrGt_A;
  14298. goto swap_srcs;
  14299. case Js::OpCode::BrNotLt_A:
  14300. opcode = Js::OpCode::BrNotGt_A;
  14301. goto swap_srcs;
  14302. case Js::OpCode::BrLt_I4:
  14303. opcode = Js::OpCode::BrGt_I4;
  14304. goto swap_srcs;
  14305. case Js::OpCode::BrEq_A:
  14306. case Js::OpCode::BrNotNeq_A:
  14307. case Js::OpCode::CmEq_A:
  14308. case Js::OpCode::CmNeq_A:
  14309. // this == "" not the same as "" == this...
  14310. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  14311. {
  14312. return;
  14313. }
  14314. goto swap_srcs;
  14315. case Js::OpCode::CmGe_A:
  14316. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  14317. {
  14318. return;
  14319. }
  14320. opcode = Js::OpCode::CmLe_A;
  14321. goto swap_srcs;
  14322. case Js::OpCode::CmGt_A:
  14323. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  14324. {
  14325. return;
  14326. }
  14327. opcode = Js::OpCode::CmLt_A;
  14328. goto swap_srcs;
  14329. case Js::OpCode::CmLe_A:
  14330. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  14331. {
  14332. return;
  14333. }
  14334. opcode = Js::OpCode::CmGe_A;
  14335. goto swap_srcs;
  14336. case Js::OpCode::CmLt_A:
  14337. if (!src1->IsImmediateOpnd() && (!src1->GetValueType().IsPrimitive() || !src2->GetValueType().IsPrimitive()))
  14338. {
  14339. return;
  14340. }
  14341. opcode = Js::OpCode::CmGt_A;
  14342. goto swap_srcs;
  14343. case Js::OpCode::CallI:
  14344. case Js::OpCode::CallIFixed:
  14345. case Js::OpCode::NewScObject:
  14346. case Js::OpCode::NewScObjectSpread:
  14347. case Js::OpCode::NewScObjArray:
  14348. case Js::OpCode::NewScObjArraySpread:
  14349. case Js::OpCode::NewScObjectNoCtor:
  14350. // Don't insert load to register if the function operand is a fixed function.
  14351. if (instr->HasFixedFunctionAddressTarget())
  14352. {
  14353. return;
  14354. }
  14355. break;
  14356. // Can't do add because <32 + "Hello"> isn't equal to <"Hello" + 32>
  14357. // Lower can do the swap. Other op-codes listed below don't need immediate source hoisting, as the fast paths handle it,
  14358. // or the lowering handles the hoisting.
  14359. case Js::OpCode::Add_A:
  14360. if (src1->IsFloat())
  14361. {
  14362. goto swap_srcs;
  14363. }
  14364. return;
  14365. case Js::OpCode::Sub_I4:
  14366. case Js::OpCode::Neg_I4:
  14367. case Js::OpCode::Not_I4:
  14368. case Js::OpCode::NewScFunc:
  14369. case Js::OpCode::NewScGenFunc:
  14370. case Js::OpCode::NewScArray:
  14371. case Js::OpCode::NewScIntArray:
  14372. case Js::OpCode::NewScFltArray:
  14373. case Js::OpCode::NewScArrayWithMissingValues:
  14374. case Js::OpCode::NewRegEx:
  14375. case Js::OpCode::Ld_A:
  14376. case Js::OpCode::Ld_I4:
  14377. case Js::OpCode::ThrowRuntimeError:
  14378. case Js::OpCode::TrapIfMinIntOverNegOne:
  14379. case Js::OpCode::TrapIfTruncOverflow:
  14380. case Js::OpCode::TrapIfZero:
  14381. case Js::OpCode::FromVar:
  14382. case Js::OpCode::Conv_Prim:
  14383. case Js::OpCode::LdC_A_I4:
  14384. case Js::OpCode::LdStr:
  14385. case Js::OpCode::InitFld:
  14386. case Js::OpCode::InitRootFld:
  14387. case Js::OpCode::StartCall:
  14388. case Js::OpCode::ArgOut_A:
  14389. case Js::OpCode::ArgOut_A_Inline:
  14390. case Js::OpCode::ArgOut_A_Dynamic:
  14391. case Js::OpCode::ArgOut_A_FromStackArgs:
  14392. case Js::OpCode::ArgOut_A_InlineBuiltIn:
  14393. case Js::OpCode::ArgOut_A_InlineSpecialized:
  14394. case Js::OpCode::ArgOut_A_SpreadArg:
  14395. case Js::OpCode::InlineeEnd:
  14396. case Js::OpCode::EndCallForPolymorphicInlinee:
  14397. case Js::OpCode::InlineeMetaArg:
  14398. case Js::OpCode::InlineBuiltInEnd:
  14399. case Js::OpCode::InlineNonTrackingBuiltInEnd:
  14400. case Js::OpCode::CallHelper:
  14401. case Js::OpCode::LdElemUndef:
  14402. case Js::OpCode::LdElemUndefScoped:
  14403. case Js::OpCode::RuntimeTypeError:
  14404. case Js::OpCode::RuntimeReferenceError:
  14405. case Js::OpCode::Ret:
  14406. case Js::OpCode::NewScObjectSimple:
  14407. case Js::OpCode::NewScObjectLiteral:
  14408. case Js::OpCode::StFld:
  14409. case Js::OpCode::StRootFld:
  14410. case Js::OpCode::StSlot:
  14411. case Js::OpCode::StSlotChkUndecl:
  14412. case Js::OpCode::StElemC:
  14413. case Js::OpCode::StArrSegElemC:
  14414. case Js::OpCode::StElemI_A:
  14415. case Js::OpCode::StElemI_A_Strict:
  14416. case Js::OpCode::CallDirect:
  14417. case Js::OpCode::BrNotHasSideEffects:
  14418. case Js::OpCode::NewConcatStrMulti:
  14419. case Js::OpCode::NewConcatStrMultiBE:
  14420. case Js::OpCode::ExtendArg_A:
  14421. #ifdef ENABLE_DOM_FAST_PATH
  14422. case Js::OpCode::DOMFastPathGetter:
  14423. case Js::OpCode::DOMFastPathSetter:
  14424. #endif
  14425. case Js::OpCode::NewScopeSlots:
  14426. case Js::OpCode::NewScopeSlotsWithoutPropIds:
  14427. case Js::OpCode::NewStackScopeSlots:
  14428. case Js::OpCode::IsInst:
  14429. case Js::OpCode::BailOnEqual:
  14430. case Js::OpCode::BailOnNotEqual:
  14431. case Js::OpCode::StArrViewElem:
  14432. return;
  14433. }
  14434. if (!src1->IsImmediateOpnd())
  14435. {
  14436. return;
  14437. }
  14438. // The fast paths or lowering of the remaining instructions may not support handling immediate opnds for the first src. The
  14439. // immediate src1 is hoisted here into a separate instruction.
  14440. if (src1->IsIntConstOpnd())
  14441. {
  14442. IR::Instr *newInstr = instr->HoistSrc1(Js::OpCode::Ld_I4);
  14443. ToInt32Dst(newInstr, newInstr->GetDst()->AsRegOpnd(), this->currentBlock);
  14444. }
  14445. else if (src1->IsInt64ConstOpnd())
  14446. {
  14447. instr->HoistSrc1(Js::OpCode::Ld_I4);
  14448. }
  14449. else
  14450. {
  14451. instr->HoistSrc1(Js::OpCode::Ld_A);
  14452. }
  14453. src1 = instr->GetSrc1();
  14454. src1->AsRegOpnd()->m_sym->SetIsConst();
  14455. }
  14456. // Clear the ValueMap pf the values invalidated by this instr.
  14457. void
  14458. GlobOpt::ProcessKills(IR::Instr *instr)
  14459. {
  14460. this->ProcessFieldKills(instr);
  14461. this->ProcessValueKills(instr);
  14462. this->ProcessArrayValueKills(instr);
  14463. }
  14464. bool
  14465. GlobOpt::OptIsInvariant(IR::Opnd *src, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives)
  14466. {
  14467. if(!loop->CanHoistInvariants())
  14468. {
  14469. return false;
  14470. }
  14471. Sym *sym;
  14472. switch(src->GetKind())
  14473. {
  14474. case IR::OpndKindAddr:
  14475. case IR::OpndKindFloatConst:
  14476. case IR::OpndKindIntConst:
  14477. return true;
  14478. case IR::OpndKindReg:
  14479. sym = src->AsRegOpnd()->m_sym;
  14480. break;
  14481. case IR::OpndKindSym:
  14482. sym = src->AsSymOpnd()->m_sym;
  14483. if (src->AsSymOpnd()->IsPropertySymOpnd())
  14484. {
  14485. if (src->AsSymOpnd()->AsPropertySymOpnd()->IsTypeChecked())
  14486. {
  14487. // We do not handle hoisting these yet. We might be hoisting this across the instr with the type check protecting this one.
  14488. // And somehow, the dead-store pass now removes the type check on that instr later on...
  14489. // For CheckFixedFld, there is no benefit hoisting these if they don't have a type check as they won't generate code.
  14490. return false;
  14491. }
  14492. }
  14493. break;
  14494. case IR::OpndKindHelperCall:
  14495. // Helper calls, like the private slot getter, can be invariant.
  14496. // Consider moving more math builtin to invariant?
  14497. return HelperMethodAttributes::IsInVariant(src->AsHelperCallOpnd()->m_fnHelper);
  14498. default:
  14499. return false;
  14500. }
  14501. return OptIsInvariant(sym, block, loop, srcVal, isNotTypeSpecConv, allowNonPrimitives);
  14502. }
  14503. bool
  14504. GlobOpt::OptIsInvariant(Sym *sym, BasicBlock *block, Loop *loop, Value *srcVal, bool isNotTypeSpecConv, bool allowNonPrimitives, Value **loopHeadValRef)
  14505. {
  14506. Value *localLoopHeadVal;
  14507. if(!loopHeadValRef)
  14508. {
  14509. loopHeadValRef = &localLoopHeadVal;
  14510. }
  14511. Value *&loopHeadVal = *loopHeadValRef;
  14512. loopHeadVal = nullptr;
  14513. if(!loop->CanHoistInvariants())
  14514. {
  14515. return false;
  14516. }
  14517. if (sym->IsStackSym())
  14518. {
  14519. if (sym->AsStackSym()->IsTypeSpec())
  14520. {
  14521. StackSym *varSym = sym->AsStackSym()->GetVarEquivSym(this->func);
  14522. // Make sure the int32/float64 version of this is available.
  14523. // Note: We could handle this by converting the src, but usually the
  14524. // conversion is hoistable if this is hoistable anyway.
  14525. // In some weird cases it may not be however, so we'll bail out.
  14526. if (sym->AsStackSym()->IsInt32())
  14527. {
  14528. Assert(block->globOptData.liveInt32Syms->Test(varSym->m_id));
  14529. if (!loop->landingPad->globOptData.liveInt32Syms->Test(varSym->m_id) ||
  14530. (loop->landingPad->globOptData.liveLossyInt32Syms->Test(varSym->m_id) &&
  14531. !block->globOptData.liveLossyInt32Syms->Test(varSym->m_id)))
  14532. {
  14533. // Either the int32 sym is not live in the landing pad, or it's lossy in the landing pad and the
  14534. // instruction's block is using the lossless version. In either case, the instruction cannot be hoisted
  14535. // without doing a conversion of this operand.
  14536. return false;
  14537. }
  14538. }
  14539. else if (sym->AsStackSym()->IsFloat64())
  14540. {
  14541. if (!loop->landingPad->globOptData.liveFloat64Syms->Test(varSym->m_id))
  14542. {
  14543. return false;
  14544. }
  14545. }
  14546. #ifdef ENABLE_SIMDJS
  14547. else
  14548. {
  14549. Assert(sym->AsStackSym()->IsSimd128());
  14550. if (!loop->landingPad->globOptData.liveSimd128F4Syms->Test(varSym->m_id) && !loop->landingPad->globOptData.liveSimd128I4Syms->Test(varSym->m_id))
  14551. {
  14552. return false;
  14553. }
  14554. }
  14555. #endif
  14556. sym = sym->AsStackSym()->GetVarEquivSym(this->func);
  14557. }
  14558. else
  14559. {
  14560. // Make sure the var version of this is available.
  14561. // Note: We could handle this by converting the src, but usually the
  14562. // conversion is hoistable if this is hoistable anyway.
  14563. // In some weird cases it may not be however, so we'll bail out.
  14564. if (!loop->landingPad->globOptData.liveVarSyms->Test(sym->m_id))
  14565. {
  14566. return false;
  14567. }
  14568. }
  14569. }
  14570. else if (sym->IsPropertySym())
  14571. {
  14572. if (!loop->landingPad->globOptData.liveFields->Test(sym->m_id))
  14573. {
  14574. return false;
  14575. }
  14576. }
  14577. else
  14578. {
  14579. return false;
  14580. }
  14581. // We rely on having a value.
  14582. if (srcVal == NULL)
  14583. {
  14584. return false;
  14585. }
  14586. // A symbol is invariant if its current value is the same as it was upon entering the loop.
  14587. loopHeadVal = loop->landingPad->globOptData.FindValue(sym);
  14588. if (loopHeadVal == NULL || loopHeadVal->GetValueNumber() != srcVal->GetValueNumber())
  14589. {
  14590. return false;
  14591. }
  14592. // Can't hoist non-primitives, unless we have safeguards against valueof/tostring. Additionally, we need to consider
  14593. // the value annotations on the source *before* the loop: if we hoist this instruction outside the loop, we can't
  14594. // necessarily rely on type annotations added (and enforced) earlier in the loop's body.
  14595. //
  14596. // It might look as though !loopHeadVal->GetValueInfo()->IsPrimitive() implies
  14597. // !loop->landingPad->globOptData.IsTypeSpecialized(sym), but it turns out that this is not always the case. We
  14598. // encountered a test case in which we had previously hoisted a FromVar (to float 64) instruction, but its bailout code was
  14599. // BailoutPrimitiveButString, rather than BailoutNumberOnly, which would have allowed us to conclude that the dest was
  14600. // definitely a float64. Instead, it was only *likely* a float64, causing IsPrimitive to return false.
  14601. if (!allowNonPrimitives && !loopHeadVal->GetValueInfo()->IsPrimitive() && !loop->landingPad->globOptData.IsTypeSpecialized(sym))
  14602. {
  14603. return false;
  14604. }
  14605. if(!isNotTypeSpecConv && loop->symsDefInLoop->Test(sym->m_id))
  14606. {
  14607. // Typically, a sym is considered invariant if it has the same value in the current block and in the loop landing pad.
  14608. // The sym may have had a different value earlier in the loop or on the back-edge, but as long as it's reassigned to its
  14609. // value outside the loop, it would be considered invariant in this block. Consider that case:
  14610. // s1 = s2[invariant]
  14611. // <loop start>
  14612. // s1 = s2[invariant]
  14613. // // s1 now has the same value as in the landing pad, and is considered invariant
  14614. // s1 += s3
  14615. // // s1 is not invariant here, or on the back-edge
  14616. // ++s3 // s3 is not invariant, so the add above cannot be hoisted
  14617. // <loop end>
  14618. //
  14619. // A problem occurs at the point of (s1 += s3) when:
  14620. // - At (s1 = s2) inside the loop, s1 was made to be the sym store of that value. This by itself is legal, because
  14621. // after that transfer, s1 and s2 have the same value.
  14622. // - (s1 += s3) is type-specialized but s1 is not specialized in the loop header. This happens when s1 is not
  14623. // specialized entering the loop, and since s1 is not used before it's defined in the loop, it's not specialized
  14624. // on back-edges.
  14625. //
  14626. // With that, at (s1 += s3), the conversion of s1 to the type-specialized version would be hoisted because s1 is
  14627. // invariant just before that instruction. Since this add is specialized, the specialized version of the sym is modified
  14628. // in the loop without a reassignment at (s1 = s2) inside the loop, and (s1 += s3) would then use an incorrect value of
  14629. // s1 (it would use the value of s1 from the previous loop iteration, instead of using the value of s2).
  14630. //
  14631. // The problem here, is that we cannot hoist the conversion of s1 into its specialized version across the assignment
  14632. // (s1 = s2) inside the loop. So for the purposes of type specialization, don't consider a sym invariant if it has a def
  14633. // inside the loop.
  14634. return false;
  14635. }
  14636. // For values with an int range, require additionally that the range is the same as in the landing pad, as the range may
  14637. // have been changed on this path based on branches, and int specialization and invariant hoisting may rely on the range
  14638. // being the same. For type spec conversions, only require that if the value is an int constant in the current block, that
  14639. // it is also an int constant with the same value in the landing pad. Other range differences don't matter for type spec.
  14640. IntConstantBounds srcIntConstantBounds, loopHeadIntConstantBounds;
  14641. if(srcVal->GetValueInfo()->TryGetIntConstantBounds(&srcIntConstantBounds) &&
  14642. (isNotTypeSpecConv || srcIntConstantBounds.IsConstant()) &&
  14643. (
  14644. !loopHeadVal->GetValueInfo()->TryGetIntConstantBounds(&loopHeadIntConstantBounds) ||
  14645. loopHeadIntConstantBounds.LowerBound() != srcIntConstantBounds.LowerBound() ||
  14646. loopHeadIntConstantBounds.UpperBound() != srcIntConstantBounds.UpperBound()
  14647. ))
  14648. {
  14649. return false;
  14650. }
  14651. // If the loopHeadVal is primitive, the current value should be as well. This really should be
  14652. // srcVal->GetValueInfo()->IsPrimitive() instead of IsLikelyPrimitive, but this stronger assertion
  14653. // doesn't hold in some cases when this method is called out of the array code.
  14654. Assert((!loopHeadVal->GetValueInfo()->IsPrimitive()) || srcVal->GetValueInfo()->IsLikelyPrimitive());
  14655. return true;
  14656. }
  14657. bool
  14658. GlobOpt::OptIsInvariant(
  14659. IR::Instr *instr,
  14660. BasicBlock *block,
  14661. Loop *loop,
  14662. Value *src1Val,
  14663. Value *src2Val,
  14664. bool isNotTypeSpecConv,
  14665. const bool forceInvariantHoisting)
  14666. {
  14667. if (!loop->CanHoistInvariants())
  14668. {
  14669. return false;
  14670. }
  14671. if (!OpCodeAttr::CanCSE(instr->m_opcode))
  14672. {
  14673. return false;
  14674. }
  14675. bool allowNonPrimitives = !OpCodeAttr::OpndHasImplicitCall(instr->m_opcode);
  14676. switch(instr->m_opcode)
  14677. {
  14678. // Can't legally hoist these
  14679. case Js::OpCode::LdLen_A:
  14680. return false;
  14681. //Can't Hoist BailOnNotStackArgs, as it is necessary as InlineArgsOptimization relies on this opcode
  14682. //to decide whether to throw rejit exception or not.
  14683. case Js::OpCode::BailOnNotStackArgs:
  14684. return false;
  14685. // Usually not worth hoisting these
  14686. case Js::OpCode::LdStr:
  14687. case Js::OpCode::Ld_A:
  14688. case Js::OpCode::Ld_I4:
  14689. case Js::OpCode::LdC_A_I4:
  14690. if(!forceInvariantHoisting)
  14691. {
  14692. return false;
  14693. }
  14694. break;
  14695. // Can't hoist these outside the function it's for. The LdArgumentsFromFrame for an inlinee depends on the inlinee meta arg
  14696. // that holds the arguments object, which is only initialized at the start of the inlinee. So, can't hoist this outside the
  14697. // inlinee.
  14698. case Js::OpCode::LdArgumentsFromFrame:
  14699. if(instr->m_func != loop->GetFunc())
  14700. {
  14701. return false;
  14702. }
  14703. break;
  14704. case Js::OpCode::FromVar:
  14705. if (instr->HasBailOutInfo())
  14706. {
  14707. allowNonPrimitives = true;
  14708. }
  14709. break;
  14710. case Js::OpCode::CheckObjType:
  14711. // Bug 11712101: If the operand is a field, ensure that its containing object type is invariant
  14712. // before hoisting -- that is, don't hoist a CheckObjType over a DeleteFld on that object.
  14713. // (CheckObjType only checks the operand and its immediate parent, so we don't need to go
  14714. // any farther up the object graph.)
  14715. Assert(instr->GetSrc1());
  14716. PropertySym *propertySym = instr->GetSrc1()->AsPropertySymOpnd()->GetPropertySym();
  14717. if (propertySym->HasObjectTypeSym()) {
  14718. StackSym *objectTypeSym = propertySym->GetObjectTypeSym();
  14719. if (!this->OptIsInvariant(objectTypeSym, block, loop, this->CurrentBlockData()->FindValue(objectTypeSym), true, true)) {
  14720. return false;
  14721. }
  14722. }
  14723. break;
  14724. }
  14725. IR::Opnd *dst = instr->GetDst();
  14726. if (dst && !dst->IsRegOpnd())
  14727. {
  14728. return false;
  14729. }
  14730. IR::Opnd *src1 = instr->GetSrc1();
  14731. if (src1)
  14732. {
  14733. if (!this->OptIsInvariant(src1, block, loop, src1Val, isNotTypeSpecConv, allowNonPrimitives))
  14734. {
  14735. return false;
  14736. }
  14737. IR::Opnd *src2 = instr->GetSrc2();
  14738. if (src2)
  14739. {
  14740. if (!this->OptIsInvariant(src2, block, loop, src2Val, isNotTypeSpecConv, allowNonPrimitives))
  14741. {
  14742. return false;
  14743. }
  14744. }
  14745. }
  14746. return true;
  14747. }
  14748. bool
  14749. GlobOpt::OptDstIsInvariant(IR::RegOpnd *dst)
  14750. {
  14751. StackSym *dstSym = dst->m_sym;
  14752. if (dstSym->IsTypeSpec())
  14753. {
  14754. // The type-specialized sym may be single def, but not the original...
  14755. dstSym = dstSym->GetVarEquivSym(this->func);
  14756. }
  14757. return (dstSym->m_isSingleDef);
  14758. }
  14759. void
  14760. GlobOpt::OptHoistUpdateValueType(
  14761. Loop* loop,
  14762. IR::Instr* instr,
  14763. IR::Opnd* srcOpnd,
  14764. Value* opndVal)
  14765. {
  14766. if (opndVal == nullptr || instr->m_opcode == Js::OpCode::FromVar)
  14767. {
  14768. return;
  14769. }
  14770. Sym* opndSym = srcOpnd->GetSym();;
  14771. if (opndSym)
  14772. {
  14773. BasicBlock* landingPad = loop->landingPad;
  14774. Value* opndValueInLandingPad = landingPad->globOptData.FindValue(opndSym);
  14775. Assert(opndVal->GetValueNumber() == opndValueInLandingPad->GetValueNumber());
  14776. ValueType opndValueTypeInLandingPad = opndValueInLandingPad->GetValueInfo()->Type();
  14777. if (srcOpnd->GetValueType() != opndValueTypeInLandingPad)
  14778. {
  14779. if (instr->m_opcode == Js::OpCode::SetConcatStrMultiItemBE)
  14780. {
  14781. Assert(!opndValueTypeInLandingPad.IsString());
  14782. Assert(instr->GetDst());
  14783. IR::RegOpnd* strOpnd = IR::RegOpnd::New(TyVar, instr->m_func);
  14784. strOpnd->SetValueType(ValueType::String);
  14785. strOpnd->SetValueTypeFixed();
  14786. IR::Instr* convPrimStrInstr =
  14787. IR::Instr::New(Js::OpCode::Conv_PrimStr, strOpnd, srcOpnd->Use(instr->m_func), instr->m_func);
  14788. instr->ReplaceSrc(srcOpnd, strOpnd);
  14789. if (loop->bailOutInfo->bailOutInstr)
  14790. {
  14791. loop->bailOutInfo->bailOutInstr->InsertBefore(convPrimStrInstr);
  14792. }
  14793. else
  14794. {
  14795. landingPad->InsertAfter(convPrimStrInstr);
  14796. }
  14797. }
  14798. srcOpnd->SetValueType(opndValueTypeInLandingPad);
  14799. }
  14800. if (opndSym->IsPropertySym())
  14801. {
  14802. // Also fix valueInfo on objPtr
  14803. StackSym* opndObjPtrSym = opndSym->AsPropertySym()->m_stackSym;
  14804. Value* opndObjPtrSymValInLandingPad = landingPad->globOptData.FindValue(opndObjPtrSym);
  14805. ValueInfo* opndObjPtrSymValueInfoInLandingPad = opndObjPtrSymValInLandingPad->GetValueInfo();
  14806. srcOpnd->AsSymOpnd()->SetPropertyOwnerValueType(opndObjPtrSymValueInfoInLandingPad->Type());
  14807. }
  14808. }
  14809. }
  14810. void
  14811. GlobOpt::OptHoistInvariant(
  14812. IR::Instr *instr,
  14813. BasicBlock *block,
  14814. Loop *loop,
  14815. Value *dstVal,
  14816. Value *const src1Val,
  14817. Value *const src2Val,
  14818. bool isNotTypeSpecConv,
  14819. bool lossy,
  14820. IR::BailOutKind bailoutKind)
  14821. {
  14822. BasicBlock *landingPad = loop->landingPad;
  14823. IR::Opnd* src1 = instr->GetSrc1();
  14824. if (src1)
  14825. {
  14826. // We are hoisting this instruction possibly past other uses, which might invalidate the last use info. Clear it.
  14827. OptHoistUpdateValueType(loop, instr, src1, src1Val);
  14828. if (src1->IsRegOpnd())
  14829. {
  14830. src1->AsRegOpnd()->m_isTempLastUse = false;
  14831. }
  14832. IR::Opnd* src2 = instr->GetSrc2();
  14833. if (src2)
  14834. {
  14835. OptHoistUpdateValueType(loop, instr, src2, src2Val);
  14836. if (src2->IsRegOpnd())
  14837. {
  14838. src2->AsRegOpnd()->m_isTempLastUse = false;
  14839. }
  14840. }
  14841. }
  14842. IR::RegOpnd *dst = instr->GetDst() ? instr->GetDst()->AsRegOpnd() : nullptr;
  14843. if(dst)
  14844. {
  14845. switch (instr->m_opcode)
  14846. {
  14847. case Js::OpCode::CmEq_I4:
  14848. case Js::OpCode::CmNeq_I4:
  14849. case Js::OpCode::CmLt_I4:
  14850. case Js::OpCode::CmLe_I4:
  14851. case Js::OpCode::CmGt_I4:
  14852. case Js::OpCode::CmGe_I4:
  14853. case Js::OpCode::CmUnLt_I4:
  14854. case Js::OpCode::CmUnLe_I4:
  14855. case Js::OpCode::CmUnGt_I4:
  14856. case Js::OpCode::CmUnGe_I4:
  14857. // These operations are a special case. They generate a lossy int value, and the var sym is initialized using
  14858. // Conv_Bool. A sym cannot be live only as a lossy int sym, the var needs to be live as well since the lossy int
  14859. // sym cannot be used to convert to var. We don't know however, whether the Conv_Bool will be hoisted. The idea
  14860. // currently is that the sym is only used on the path in which it is initialized inside the loop. So, don't
  14861. // hoist any liveness info for the dst.
  14862. if (!this->GetIsAsmJSFunc())
  14863. {
  14864. lossy = true;
  14865. }
  14866. break;
  14867. case Js::OpCode::FromVar:
  14868. {
  14869. StackSym* src1StackSym = IR::RegOpnd::TryGetStackSym(instr->GetSrc1());
  14870. if (instr->HasBailOutInfo())
  14871. {
  14872. IR::BailOutKind instrBailoutKind = instr->GetBailOutKind();
  14873. #ifdef ENABLE_SIMDJS
  14874. Assert(instrBailoutKind == IR::BailOutIntOnly ||
  14875. instrBailoutKind == IR::BailOutExpectingInteger ||
  14876. instrBailoutKind == IR::BailOutOnNotPrimitive ||
  14877. instrBailoutKind == IR::BailOutNumberOnly ||
  14878. instrBailoutKind == IR::BailOutPrimitiveButString ||
  14879. instrBailoutKind == IR::BailOutSimd128F4Only ||
  14880. instrBailoutKind == IR::BailOutSimd128I4Only);
  14881. #else
  14882. Assert(instrBailoutKind == IR::BailOutIntOnly ||
  14883. instrBailoutKind == IR::BailOutExpectingInteger ||
  14884. instrBailoutKind == IR::BailOutOnNotPrimitive ||
  14885. instrBailoutKind == IR::BailOutNumberOnly ||
  14886. instrBailoutKind == IR::BailOutPrimitiveButString);
  14887. #endif
  14888. }
  14889. else if (src1StackSym && bailoutKind != IR::BailOutInvalid)
  14890. {
  14891. // We may be hoisting FromVar from a region where it didn't need a bailout (src1 had a definite value type) to a region
  14892. // where it would. In such cases, the FromVar needs a bailout based on the value type of src1 in its new position.
  14893. Assert(!src1StackSym->IsTypeSpec());
  14894. Value* landingPadSrc1val = landingPad->globOptData.FindValue(src1StackSym);
  14895. Assert(src1Val->GetValueNumber() == landingPadSrc1val->GetValueNumber());
  14896. ValueInfo *src1ValueInfo = src1Val->GetValueInfo();
  14897. ValueInfo *landingPadSrc1ValueInfo = landingPadSrc1val->GetValueInfo();
  14898. IRType dstType = dst->GetType();
  14899. const auto AddBailOutToFromVar = [&]()
  14900. {
  14901. instr->GetSrc1()->SetValueType(landingPadSrc1val->GetValueInfo()->Type());
  14902. EnsureBailTarget(loop);
  14903. if (block->IsLandingPad())
  14904. {
  14905. instr = instr->ConvertToBailOutInstr(instr, bailoutKind, loop->bailOutInfo->bailOutOffset);
  14906. }
  14907. else
  14908. {
  14909. instr = instr->ConvertToBailOutInstr(instr, bailoutKind);
  14910. }
  14911. };
  14912. // A definite type in the source position and not a definite type in the destination (landing pad)
  14913. // and no bailout on the instruction; we should put a bailout on the hoisted instruction.
  14914. if (dstType == TyInt32)
  14915. {
  14916. if (lossy)
  14917. {
  14918. if ((src1ValueInfo->IsPrimitive() || block->globOptData.IsTypeSpecialized(src1StackSym)) && // didn't need a lossy type spec bailout in the source block
  14919. (!landingPadSrc1ValueInfo->IsPrimitive() && !landingPad->globOptData.IsTypeSpecialized(src1StackSym))) // needs a lossy type spec bailout in the landing pad
  14920. {
  14921. bailoutKind = IR::BailOutOnNotPrimitive;
  14922. AddBailOutToFromVar();
  14923. }
  14924. }
  14925. else if (src1ValueInfo->IsInt() && !landingPadSrc1ValueInfo->IsInt())
  14926. {
  14927. AddBailOutToFromVar();
  14928. }
  14929. }
  14930. else if ((dstType == TyFloat64 && src1ValueInfo->IsNumber() && !landingPadSrc1ValueInfo->IsNumber()) ||
  14931. (IRType_IsSimd128(dstType) && src1ValueInfo->IsSimd128() && !landingPadSrc1ValueInfo->IsSimd128()))
  14932. {
  14933. AddBailOutToFromVar();
  14934. }
  14935. }
  14936. break;
  14937. }
  14938. }
  14939. if (dstVal == NULL)
  14940. {
  14941. dstVal = this->NewGenericValue(ValueType::Uninitialized, dst);
  14942. }
  14943. // ToVar/FromVar don't need a new dst because it has to be invariant if their src is invariant.
  14944. bool dstDoesntNeedLoad = (!isNotTypeSpecConv && instr->m_opcode != Js::OpCode::LdC_A_I4);
  14945. StackSym *varSym = dst->m_sym;
  14946. if (varSym->IsTypeSpec())
  14947. {
  14948. varSym = varSym->GetVarEquivSym(this->func);
  14949. }
  14950. Value *const landingPadDstVal = loop->landingPad->globOptData.FindValue(varSym);
  14951. if(landingPadDstVal
  14952. ? dstVal->GetValueNumber() != landingPadDstVal->GetValueNumber()
  14953. : loop->symsDefInLoop->Test(varSym->m_id))
  14954. {
  14955. // We need a temp for FromVar/ToVar if dst changes in the loop.
  14956. dstDoesntNeedLoad = false;
  14957. }
  14958. if (!dstDoesntNeedLoad && this->OptDstIsInvariant(dst) == false)
  14959. {
  14960. // Keep dst in place, hoist instr using a new dst.
  14961. instr->UnlinkDst();
  14962. // Set type specialization info correctly for this new sym
  14963. StackSym *copyVarSym;
  14964. IR::RegOpnd *copyReg;
  14965. if (dst->m_sym->IsTypeSpec())
  14966. {
  14967. copyVarSym = StackSym::New(TyVar, instr->m_func);
  14968. StackSym *copySym = copyVarSym;
  14969. if (dst->m_sym->IsInt32())
  14970. {
  14971. if(lossy)
  14972. {
  14973. // The new sym would only be live as a lossy int since we're only hoisting the store to the int version
  14974. // of the sym, and cannot be converted to var. It is not legal to have a sym only live as a lossy int,
  14975. // so don't update liveness info for this sym.
  14976. }
  14977. else
  14978. {
  14979. block->globOptData.liveInt32Syms->Set(copyVarSym->m_id);
  14980. }
  14981. copySym = copySym->GetInt32EquivSym(instr->m_func);
  14982. }
  14983. else if (dst->m_sym->IsFloat64())
  14984. {
  14985. block->globOptData.liveFloat64Syms->Set(copyVarSym->m_id);
  14986. copySym = copySym->GetFloat64EquivSym(instr->m_func);
  14987. }
  14988. #ifdef ENABLE_SIMDJS
  14989. else if (dst->IsSimd128())
  14990. {
  14991. // SIMD_JS
  14992. if (dst->IsSimd128F4())
  14993. {
  14994. block->globOptData.liveSimd128F4Syms->Set(copyVarSym->m_id);
  14995. copySym = copySym->GetSimd128F4EquivSym(instr->m_func);
  14996. }
  14997. else
  14998. {
  14999. Assert(dst->IsSimd128I4());
  15000. block->globOptData.liveSimd128I4Syms->Set(copyVarSym->m_id);
  15001. copySym = copySym->GetSimd128I4EquivSym(instr->m_func);
  15002. }
  15003. }
  15004. #endif
  15005. copyReg = IR::RegOpnd::New(copySym, copySym->GetType(), instr->m_func);
  15006. }
  15007. else
  15008. {
  15009. copyReg = IR::RegOpnd::New(dst->GetType(), instr->m_func);
  15010. copyVarSym = copyReg->m_sym;
  15011. block->globOptData.liveVarSyms->Set(copyVarSym->m_id);
  15012. }
  15013. copyReg->SetValueType(dst->GetValueType());
  15014. IR::Instr *copyInstr = IR::Instr::New(Js::OpCode::Ld_A, dst, copyReg, instr->m_func);
  15015. copyInstr->SetByteCodeOffset(instr);
  15016. instr->SetDst(copyReg);
  15017. instr->InsertBefore(copyInstr);
  15018. dst->m_sym->m_mayNotBeTempLastUse = true;
  15019. if (instr->GetSrc1() && instr->GetSrc1()->IsImmediateOpnd())
  15020. {
  15021. // Propagate IsIntConst if appropriate
  15022. switch(instr->m_opcode)
  15023. {
  15024. case Js::OpCode::Ld_A:
  15025. case Js::OpCode::Ld_I4:
  15026. case Js::OpCode::LdC_A_I4:
  15027. copyReg->m_sym->SetIsConst();
  15028. break;
  15029. }
  15030. }
  15031. ValueInfo *dstValueInfo = dstVal->GetValueInfo();
  15032. if((!dstValueInfo->GetSymStore() || dstValueInfo->GetSymStore() == varSym) && !lossy)
  15033. {
  15034. // The destination's value may have been transferred from one of the invariant sources, in which case we should
  15035. // keep the sym store intact, as that sym will likely have a better lifetime than this new copy sym. For
  15036. // instance, if we're inside a conditioned block, because we don't make the copy sym live and set its value in
  15037. // all preceding blocks, this sym would not be live after exiting this block, causing this value to not
  15038. // participate in copy-prop after this block.
  15039. this->SetSymStoreDirect(dstValueInfo, copyVarSym);
  15040. }
  15041. block->globOptData.InsertNewValue(dstVal, copyReg);
  15042. dst = copyReg;
  15043. }
  15044. }
  15045. // Move to landing pad
  15046. block->UnlinkInstr(instr);
  15047. if (loop->bailOutInfo->bailOutInstr)
  15048. {
  15049. loop->bailOutInfo->bailOutInstr->InsertBefore(instr);
  15050. }
  15051. else
  15052. {
  15053. landingPad->InsertAfter(instr);
  15054. }
  15055. GlobOpt::MarkNonByteCodeUsed(instr);
  15056. if (instr->HasBailOutInfo() || instr->HasAuxBailOut())
  15057. {
  15058. Assert(loop->bailOutInfo);
  15059. EnsureBailTarget(loop);
  15060. // Copy bailout info of loop top.
  15061. instr->ReplaceBailOutInfo(loop->bailOutInfo);
  15062. }
  15063. if(!dst)
  15064. {
  15065. return;
  15066. }
  15067. // The bailout info's liveness for the dst sym is not updated in loop landing pads because bailout instructions previously
  15068. // hoisted into the loop's landing pad may bail out before the current type of the dst sym became live (perhaps due to this
  15069. // instruction). Since the landing pad will have a shared bailout point, the bailout info cannot assume that the current
  15070. // type of the dst sym was live during every bailout hoisted into the landing pad.
  15071. StackSym *const dstSym = dst->m_sym;
  15072. StackSym *const dstVarSym = dstSym->IsTypeSpec() ? dstSym->GetVarEquivSym(nullptr) : dstSym;
  15073. Assert(dstVarSym);
  15074. if(isNotTypeSpecConv || !loop->landingPad->globOptData.IsLive(dstVarSym))
  15075. {
  15076. // A new dst is being hoisted, or the same single-def dst that would not be live before this block. So, make it live and
  15077. // update the value info with the same value info in this block.
  15078. if(lossy)
  15079. {
  15080. // This is a lossy conversion to int. The instruction was given a new dst specifically for hoisting, so this new dst
  15081. // will not be live as a var before this block. A sym cannot be live only as a lossy int sym, the var needs to be
  15082. // live as well since the lossy int sym cannot be used to convert to var. Since the var version of the sym is not
  15083. // going to be initialized, don't hoist any liveness info for the dst. The sym is only going to be used on the path
  15084. // in which it is initialized inside the loop.
  15085. Assert(dstSym->IsTypeSpec());
  15086. Assert(dstSym->IsInt32());
  15087. return;
  15088. }
  15089. // Check if the dst value was transferred from the src. If so, the value transfer needs to be replicated.
  15090. bool isTransfer = dstVal == src1Val;
  15091. StackSym *transferValueOfSym = nullptr;
  15092. if(isTransfer)
  15093. {
  15094. Assert(instr->GetSrc1());
  15095. if(instr->GetSrc1()->IsRegOpnd())
  15096. {
  15097. StackSym *src1Sym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  15098. if(src1Sym->IsTypeSpec())
  15099. {
  15100. src1Sym = src1Sym->GetVarEquivSym(nullptr);
  15101. Assert(src1Sym);
  15102. }
  15103. if(dstVal == block->globOptData.FindValue(src1Sym))
  15104. {
  15105. transferValueOfSym = src1Sym;
  15106. }
  15107. }
  15108. }
  15109. // SIMD_JS
  15110. if (instr->m_opcode == Js::OpCode::ExtendArg_A)
  15111. {
  15112. // Check if we should have CSE'ed this EA
  15113. Assert(instr->GetSrc1());
  15114. // If the dstVal symstore is not the dst itself, then we copied the Value from another expression.
  15115. if (dstVal->GetValueInfo()->GetSymStore() != instr->GetDst()->GetStackSym())
  15116. {
  15117. isTransfer = true;
  15118. transferValueOfSym = dstVal->GetValueInfo()->GetSymStore()->AsStackSym();
  15119. }
  15120. }
  15121. const ValueNumber dstValueNumber = dstVal->GetValueNumber();
  15122. ValueNumber dstNewValueNumber = InvalidValueNumber;
  15123. for(InvariantBlockBackwardIterator it(this, block, loop->landingPad, nullptr); it.IsValid(); it.MoveNext())
  15124. {
  15125. BasicBlock *const hoistBlock = it.Block();
  15126. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  15127. Assert(!hoistBlockData.IsLive(dstVarSym));
  15128. hoistBlockData.MakeLive(dstSym, lossy);
  15129. Value *newDstValue;
  15130. do
  15131. {
  15132. if(isTransfer)
  15133. {
  15134. if(transferValueOfSym)
  15135. {
  15136. newDstValue = hoistBlockData.FindValue(transferValueOfSym);
  15137. if(newDstValue && newDstValue->GetValueNumber() == dstValueNumber)
  15138. {
  15139. break;
  15140. }
  15141. }
  15142. // It's a transfer, but we don't have a sym whose value number matches in the target block. Use a new value
  15143. // number since we don't know if there is already a value with the current number for the target block.
  15144. if(dstNewValueNumber == InvalidValueNumber)
  15145. {
  15146. dstNewValueNumber = NewValueNumber();
  15147. }
  15148. newDstValue = CopyValue(dstVal, dstNewValueNumber);
  15149. break;
  15150. }
  15151. newDstValue = CopyValue(dstVal, dstValueNumber);
  15152. } while(false);
  15153. hoistBlockData.SetValue(newDstValue, dstVarSym);
  15154. }
  15155. return;
  15156. }
  15157. #if DBG
  15158. if(instr->GetSrc1()->IsRegOpnd()) // Type spec conversion may load a constant into a dst sym
  15159. {
  15160. StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  15161. Assert(srcSym != dstSym); // Type spec conversion must be changing the type, so the syms must be different
  15162. StackSym *const srcVarSym = srcSym->IsTypeSpec() ? srcSym->GetVarEquivSym(nullptr) : srcSym;
  15163. Assert(srcVarSym == dstVarSym); // Type spec conversion must be between variants of the same var sym
  15164. }
  15165. #endif
  15166. bool changeValueType = false, changeValueTypeToInt = false;
  15167. if(dstSym->IsTypeSpec())
  15168. {
  15169. if(dst->IsInt32())
  15170. {
  15171. if(!lossy)
  15172. {
  15173. Assert(
  15174. !instr->HasBailOutInfo() ||
  15175. instr->GetBailOutKind() == IR::BailOutIntOnly ||
  15176. instr->GetBailOutKind() == IR::BailOutExpectingInteger);
  15177. changeValueType = changeValueTypeToInt = true;
  15178. }
  15179. }
  15180. else if (dst->IsFloat64())
  15181. {
  15182. if(instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOutNumberOnly)
  15183. {
  15184. changeValueType = true;
  15185. }
  15186. }
  15187. #ifdef ENABLE_SIMDJS
  15188. else
  15189. {
  15190. // SIMD_JS
  15191. Assert(dst->IsSimd128());
  15192. if (instr->HasBailOutInfo() &&
  15193. (instr->GetBailOutKind() == IR::BailOutSimd128F4Only || instr->GetBailOutKind() == IR::BailOutSimd128I4Only))
  15194. {
  15195. changeValueType = true;
  15196. }
  15197. }
  15198. #endif
  15199. }
  15200. ValueInfo *previousValueInfoBeforeUpdate = nullptr, *previousValueInfoAfterUpdate = nullptr;
  15201. for(InvariantBlockBackwardIterator it(
  15202. this,
  15203. block,
  15204. loop->landingPad,
  15205. dstVarSym,
  15206. dstVal->GetValueNumber());
  15207. it.IsValid();
  15208. it.MoveNext())
  15209. {
  15210. BasicBlock *const hoistBlock = it.Block();
  15211. GlobOptBlockData &hoistBlockData = hoistBlock->globOptData;
  15212. #if DBG
  15213. // TODO: There are some odd cases with field hoisting where the sym is invariant in only part of the loop and the info
  15214. // does not flow through all blocks. Un-comment the verification below after PRE replaces field hoisting.
  15215. //// Verify that the src sym is live as the required type, and that the conversion is valid
  15216. //Assert(IsLive(dstVarSym, &hoistBlockData));
  15217. //if(instr->GetSrc1()->IsRegOpnd())
  15218. //{
  15219. // IR::RegOpnd *const src = instr->GetSrc1()->AsRegOpnd();
  15220. // StackSym *const srcSym = instr->GetSrc1()->AsRegOpnd()->m_sym;
  15221. // if(srcSym->IsTypeSpec())
  15222. // {
  15223. // if(src->IsInt32())
  15224. // {
  15225. // Assert(hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  15226. // Assert(!hoistBlockData.liveLossyInt32Syms->Test(dstVarSym->m_id)); // shouldn't try to convert a lossy int32 to anything
  15227. // }
  15228. // else
  15229. // {
  15230. // Assert(src->IsFloat64());
  15231. // Assert(hoistBlockData.liveFloat64Syms->Test(dstVarSym->m_id));
  15232. // if(dstSym->IsTypeSpec() && dst->IsInt32())
  15233. // {
  15234. // Assert(lossy); // shouldn't try to do a lossless conversion from float64 to int32
  15235. // }
  15236. // }
  15237. // }
  15238. // else
  15239. // {
  15240. // Assert(hoistBlockData.liveVarSyms->Test(dstVarSym->m_id));
  15241. // }
  15242. //}
  15243. //if(dstSym->IsTypeSpec() && dst->IsInt32())
  15244. //{
  15245. // // If the sym is already specialized as required in the block to which we are attempting to hoist the conversion,
  15246. // // that info should have flowed into this block
  15247. // if(lossy)
  15248. // {
  15249. // Assert(!hoistBlockData.liveInt32Syms->Test(dstVarSym->m_id));
  15250. // }
  15251. // else
  15252. // {
  15253. // Assert(!IsInt32TypeSpecialized(dstVarSym, hoistBlock));
  15254. // }
  15255. //}
  15256. #endif
  15257. hoistBlockData.MakeLive(dstSym, lossy);
  15258. if(!changeValueType)
  15259. {
  15260. continue;
  15261. }
  15262. Value *const hoistBlockValue = it.InvariantSymValue();
  15263. ValueInfo *const hoistBlockValueInfo = hoistBlockValue->GetValueInfo();
  15264. if(hoistBlockValueInfo == previousValueInfoBeforeUpdate)
  15265. {
  15266. if(hoistBlockValueInfo != previousValueInfoAfterUpdate)
  15267. {
  15268. HoistInvariantValueInfo(previousValueInfoAfterUpdate, hoistBlockValue, hoistBlock);
  15269. }
  15270. }
  15271. else
  15272. {
  15273. previousValueInfoBeforeUpdate = hoistBlockValueInfo;
  15274. ValueInfo *const newValueInfo =
  15275. changeValueTypeToInt
  15276. ? hoistBlockValueInfo->SpecializeToInt32(alloc)
  15277. : hoistBlockValueInfo->SpecializeToFloat64(alloc);
  15278. previousValueInfoAfterUpdate = newValueInfo;
  15279. ChangeValueInfo(changeValueTypeToInt ? nullptr : hoistBlock, hoistBlockValue, newValueInfo);
  15280. }
  15281. }
  15282. }
  15283. bool
  15284. GlobOpt::TryHoistInvariant(
  15285. IR::Instr *instr,
  15286. BasicBlock *block,
  15287. Value *dstVal,
  15288. Value *src1Val,
  15289. Value *src2Val,
  15290. bool isNotTypeSpecConv,
  15291. const bool lossy,
  15292. const bool forceInvariantHoisting,
  15293. IR::BailOutKind bailoutKind)
  15294. {
  15295. Assert(!this->IsLoopPrePass());
  15296. if (OptIsInvariant(instr, block, block->loop, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  15297. {
  15298. #if DBG
  15299. if (Js::Configuration::Global.flags.Trace.IsEnabled(Js::InvariantsPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId()))
  15300. {
  15301. Output::Print(_u(" **** INVARIANT *** "));
  15302. instr->Dump();
  15303. }
  15304. #endif
  15305. #if ENABLE_DEBUG_CONFIG_OPTIONS
  15306. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::InvariantsPhase))
  15307. {
  15308. Output::Print(_u(" **** INVARIANT *** "));
  15309. Output::Print(_u("%s \n"), Js::OpCodeUtil::GetOpCodeName(instr->m_opcode));
  15310. }
  15311. #endif
  15312. Loop *loop = block->loop;
  15313. // Try hoisting from to outer most loop
  15314. while (loop->parent && OptIsInvariant(instr, block, loop->parent, src1Val, src2Val, isNotTypeSpecConv, forceInvariantHoisting))
  15315. {
  15316. loop = loop->parent;
  15317. }
  15318. // Record the byte code use here since we are going to move this instruction up
  15319. if (isNotTypeSpecConv)
  15320. {
  15321. InsertNoImplicitCallUses(instr);
  15322. this->CaptureByteCodeSymUses(instr);
  15323. this->InsertByteCodeUses(instr, true);
  15324. }
  15325. #if DBG
  15326. else
  15327. {
  15328. PropertySym *propertySymUse = NULL;
  15329. NoRecoverMemoryJitArenaAllocator tempAllocator(_u("BE-GlobOpt-Temp"), this->alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  15330. BVSparse<JitArenaAllocator> * tempByteCodeUse = JitAnew(&tempAllocator, BVSparse<JitArenaAllocator>, &tempAllocator);
  15331. GlobOpt::TrackByteCodeSymUsed(instr, tempByteCodeUse, &propertySymUse);
  15332. Assert(tempByteCodeUse->Count() == 0 && propertySymUse == NULL);
  15333. }
  15334. #endif
  15335. OptHoistInvariant(instr, block, loop, dstVal, src1Val, src2Val, isNotTypeSpecConv, lossy, bailoutKind);
  15336. return true;
  15337. }
  15338. return false;
  15339. }
  15340. InvariantBlockBackwardIterator::InvariantBlockBackwardIterator(
  15341. GlobOpt *const globOpt,
  15342. BasicBlock *const exclusiveBeginBlock,
  15343. BasicBlock *const inclusiveEndBlock,
  15344. StackSym *const invariantSym,
  15345. const ValueNumber invariantSymValueNumber)
  15346. : globOpt(globOpt),
  15347. exclusiveEndBlock(inclusiveEndBlock->prev),
  15348. invariantSym(invariantSym),
  15349. invariantSymValueNumber(invariantSymValueNumber),
  15350. block(exclusiveBeginBlock)
  15351. #if DBG
  15352. ,
  15353. inclusiveEndBlock(inclusiveEndBlock)
  15354. #endif
  15355. {
  15356. Assert(exclusiveBeginBlock);
  15357. Assert(inclusiveEndBlock);
  15358. Assert(!inclusiveEndBlock->isDeleted);
  15359. Assert(exclusiveBeginBlock != inclusiveEndBlock);
  15360. Assert(!invariantSym == (invariantSymValueNumber == InvalidValueNumber));
  15361. MoveNext();
  15362. }
  15363. bool
  15364. InvariantBlockBackwardIterator::IsValid() const
  15365. {
  15366. return block != exclusiveEndBlock;
  15367. }
  15368. void
  15369. InvariantBlockBackwardIterator::MoveNext()
  15370. {
  15371. Assert(IsValid());
  15372. while(true)
  15373. {
  15374. #if DBG
  15375. BasicBlock *const previouslyIteratedBlock = block;
  15376. #endif
  15377. block = block->prev;
  15378. if(!IsValid())
  15379. {
  15380. Assert(previouslyIteratedBlock == inclusiveEndBlock);
  15381. break;
  15382. }
  15383. if(block->isDeleted)
  15384. {
  15385. continue;
  15386. }
  15387. if(!block->globOptData.HasData())
  15388. {
  15389. // This block's info has already been merged with all of its successors
  15390. continue;
  15391. }
  15392. if(!invariantSym)
  15393. {
  15394. break;
  15395. }
  15396. invariantSymValue = block->globOptData.FindValue(invariantSym);
  15397. if(!invariantSymValue || invariantSymValue->GetValueNumber() != invariantSymValueNumber)
  15398. {
  15399. // BailOnNoProfile and throw blocks are not moved outside loops. A sym table cleanup on these paths may delete the
  15400. // values. Field hoisting also has some odd cases where the hoisted stack sym is invariant in only part of the loop.
  15401. continue;
  15402. }
  15403. break;
  15404. }
  15405. }
  15406. BasicBlock *
  15407. InvariantBlockBackwardIterator::Block() const
  15408. {
  15409. Assert(IsValid());
  15410. return block;
  15411. }
  15412. Value *
  15413. InvariantBlockBackwardIterator::InvariantSymValue() const
  15414. {
  15415. Assert(IsValid());
  15416. Assert(invariantSym);
  15417. return invariantSymValue;
  15418. }
  15419. void
  15420. GlobOpt::HoistInvariantValueInfo(
  15421. ValueInfo *const invariantValueInfoToHoist,
  15422. Value *const valueToUpdate,
  15423. BasicBlock *const targetBlock)
  15424. {
  15425. Assert(invariantValueInfoToHoist);
  15426. Assert(valueToUpdate);
  15427. Assert(targetBlock);
  15428. // Why are we trying to change the value type of the type sym value? Asserting here to make sure we don't deep copy the type sym's value info.
  15429. Assert(!invariantValueInfoToHoist->IsJsType());
  15430. Sym *const symStore = valueToUpdate->GetValueInfo()->GetSymStore();
  15431. ValueInfo *newValueInfo;
  15432. if(invariantValueInfoToHoist->GetSymStore() == symStore)
  15433. {
  15434. newValueInfo = invariantValueInfoToHoist;
  15435. }
  15436. else
  15437. {
  15438. newValueInfo = invariantValueInfoToHoist->Copy(alloc);
  15439. this->SetSymStoreDirect(newValueInfo, symStore);
  15440. }
  15441. ChangeValueInfo(targetBlock, valueToUpdate, newValueInfo);
  15442. }
  15443. // static
  15444. bool
  15445. GlobOpt::DoInlineArgsOpt(Func const * func)
  15446. {
  15447. Func const * topFunc = func->GetTopFunc();
  15448. Assert(topFunc != func);
  15449. bool doInlineArgsOpt =
  15450. !PHASE_OFF(Js::InlineArgsOptPhase, topFunc) &&
  15451. !func->GetHasCalls() &&
  15452. !func->GetHasUnoptimizedArgumentsAccess() &&
  15453. func->m_canDoInlineArgsOpt;
  15454. return doInlineArgsOpt;
  15455. }
  15456. bool
  15457. GlobOpt::IsSwitchOptEnabled(Func const * func)
  15458. {
  15459. Assert(func->IsTopFunc());
  15460. return !PHASE_OFF(Js::SwitchOptPhase, func) && !func->IsSwitchOptDisabled() && !IsTypeSpecPhaseOff(func)
  15461. && DoAggressiveIntTypeSpec(func) && func->DoGlobOpt();
  15462. }
  15463. bool
  15464. GlobOpt::DoConstFold() const
  15465. {
  15466. return !PHASE_OFF(Js::ConstFoldPhase, func);
  15467. }
  15468. bool
  15469. GlobOpt::IsTypeSpecPhaseOff(Func const *func)
  15470. {
  15471. return PHASE_OFF(Js::TypeSpecPhase, func) || func->IsJitInDebugMode() || !func->DoGlobOptsForGeneratorFunc();
  15472. }
  15473. bool
  15474. GlobOpt::DoTypeSpec() const
  15475. {
  15476. return doTypeSpec;
  15477. }
  15478. bool
  15479. GlobOpt::DoAggressiveIntTypeSpec(Func const * func)
  15480. {
  15481. return
  15482. !PHASE_OFF(Js::AggressiveIntTypeSpecPhase, func) &&
  15483. !IsTypeSpecPhaseOff(func) &&
  15484. !func->IsAggressiveIntTypeSpecDisabled();
  15485. }
  15486. bool
  15487. GlobOpt::DoAggressiveIntTypeSpec() const
  15488. {
  15489. return doAggressiveIntTypeSpec;
  15490. }
  15491. bool
  15492. GlobOpt::DoAggressiveMulIntTypeSpec() const
  15493. {
  15494. return doAggressiveMulIntTypeSpec;
  15495. }
  15496. bool
  15497. GlobOpt::DoDivIntTypeSpec() const
  15498. {
  15499. return doDivIntTypeSpec;
  15500. }
  15501. // static
  15502. bool
  15503. GlobOpt::DoLossyIntTypeSpec(Func const * func)
  15504. {
  15505. return
  15506. !PHASE_OFF(Js::LossyIntTypeSpecPhase, func) &&
  15507. !IsTypeSpecPhaseOff(func) &&
  15508. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsLossyIntTypeSpecDisabled());
  15509. }
  15510. bool
  15511. GlobOpt::DoLossyIntTypeSpec() const
  15512. {
  15513. return doLossyIntTypeSpec;
  15514. }
  15515. // static
  15516. bool
  15517. GlobOpt::DoFloatTypeSpec(Func const * func)
  15518. {
  15519. return
  15520. !PHASE_OFF(Js::FloatTypeSpecPhase, func) &&
  15521. !IsTypeSpecPhaseOff(func) &&
  15522. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsFloatTypeSpecDisabled()) &&
  15523. AutoSystemInfo::Data.SSE2Available();
  15524. }
  15525. bool
  15526. GlobOpt::DoFloatTypeSpec() const
  15527. {
  15528. return doFloatTypeSpec;
  15529. }
  15530. bool
  15531. GlobOpt::DoStringTypeSpec(Func const * func)
  15532. {
  15533. return !PHASE_OFF(Js::StringTypeSpecPhase, func) && !IsTypeSpecPhaseOff(func);
  15534. }
  15535. // static
  15536. bool
  15537. GlobOpt::DoTypedArrayTypeSpec(Func const * func)
  15538. {
  15539. return !PHASE_OFF(Js::TypedArrayTypeSpecPhase, func) &&
  15540. !IsTypeSpecPhaseOff(func) &&
  15541. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsTypedArrayTypeSpecDisabled(func->IsLoopBody()))
  15542. #if defined(_M_IX86)
  15543. && AutoSystemInfo::Data.SSE2Available()
  15544. #endif
  15545. ;
  15546. }
  15547. // static
  15548. bool
  15549. GlobOpt::DoNativeArrayTypeSpec(Func const * func)
  15550. {
  15551. return !PHASE_OFF(Js::NativeArrayPhase, func) &&
  15552. !IsTypeSpecPhaseOff(func)
  15553. #if defined(_M_IX86)
  15554. && AutoSystemInfo::Data.SSE2Available()
  15555. #endif
  15556. ;
  15557. }
  15558. bool
  15559. GlobOpt::DoArrayCheckHoist(Func const * const func)
  15560. {
  15561. Assert(func->IsTopFunc());
  15562. return
  15563. !PHASE_OFF(Js::ArrayCheckHoistPhase, func) &&
  15564. !func->IsArrayCheckHoistDisabled() &&
  15565. !func->IsJitInDebugMode() && // StElemI fast path is not allowed when in debug mode, so it cannot have bailout
  15566. func->DoGlobOptsForGeneratorFunc();
  15567. }
  15568. bool
  15569. GlobOpt::DoArrayCheckHoist() const
  15570. {
  15571. return doArrayCheckHoist;
  15572. }
  15573. bool
  15574. GlobOpt::DoArrayCheckHoist(const ValueType baseValueType, Loop* loop, IR::Instr const * const instr) const
  15575. {
  15576. if(!DoArrayCheckHoist() || (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  15577. {
  15578. return false;
  15579. }
  15580. if(!baseValueType.IsLikelyArrayOrObjectWithArray() ||
  15581. (loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func)))
  15582. {
  15583. return true;
  15584. }
  15585. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant JS array checks
  15586. #if DBG_DUMP
  15587. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  15588. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  15589. {
  15590. Output::Print(_u("DoArrayCheckHoist disabled for JS arrays because of external: "));
  15591. func->DumpFullFunctionName();
  15592. Output::Print(_u("\n"));
  15593. Output::Flush();
  15594. }
  15595. #endif
  15596. return false;
  15597. }
  15598. bool
  15599. GlobOpt::DoArrayMissingValueCheckHoist(Func const * const func)
  15600. {
  15601. return
  15602. DoArrayCheckHoist(func) &&
  15603. !PHASE_OFF(Js::ArrayMissingValueCheckHoistPhase, func) &&
  15604. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayMissingValueCheckHoistDisabled(func->IsLoopBody()));
  15605. }
  15606. bool
  15607. GlobOpt::DoArrayMissingValueCheckHoist() const
  15608. {
  15609. return doArrayMissingValueCheckHoist;
  15610. }
  15611. bool
  15612. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType, Func const * const func)
  15613. {
  15614. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  15615. if(!DoArrayCheckHoist(func) || PHASE_OFF(Js::ArraySegmentHoistPhase, func))
  15616. {
  15617. return false;
  15618. }
  15619. if(!baseValueType.IsLikelyArrayOrObjectWithArray())
  15620. {
  15621. return true;
  15622. }
  15623. return
  15624. !PHASE_OFF(Js::JsArraySegmentHoistPhase, func) &&
  15625. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsJsArraySegmentHoistDisabled(func->IsLoopBody()));
  15626. }
  15627. bool
  15628. GlobOpt::DoArraySegmentHoist(const ValueType baseValueType) const
  15629. {
  15630. Assert(baseValueType.IsLikelyAnyOptimizedArray());
  15631. return baseValueType.IsLikelyArrayOrObjectWithArray() ? doJsArraySegmentHoist : doArraySegmentHoist;
  15632. }
  15633. bool
  15634. GlobOpt::DoTypedArraySegmentLengthHoist(Loop *const loop) const
  15635. {
  15636. if(!DoArraySegmentHoist(ValueType::GetObject(ObjectType::Int32Array)))
  15637. {
  15638. return false;
  15639. }
  15640. if(loop ? ImplicitCallFlagsAllowOpts(loop) : ImplicitCallFlagsAllowOpts(func))
  15641. {
  15642. return true;
  15643. }
  15644. // The function or loop does not allow disabling implicit calls, which is required to eliminate redundant typed array
  15645. // segment length loads.
  15646. #if DBG_DUMP
  15647. if((((loop ? loop->GetImplicitCallFlags() : func->m_fg->implicitCallFlags) & ~Js::ImplicitCall_External) == 0) &&
  15648. Js::Configuration::Global.flags.Trace.IsEnabled(Js::HostOptPhase))
  15649. {
  15650. Output::Print(_u("DoArraySegmentLengthHoist disabled for typed arrays because of external: "));
  15651. func->DumpFullFunctionName();
  15652. Output::Print(_u("\n"));
  15653. Output::Flush();
  15654. }
  15655. #endif
  15656. return false;
  15657. }
  15658. bool
  15659. GlobOpt::DoArrayLengthHoist(Func const * const func)
  15660. {
  15661. return
  15662. DoArrayCheckHoist(func) &&
  15663. !PHASE_OFF(Js::Phase::ArrayLengthHoistPhase, func) &&
  15664. (!func->HasProfileInfo() || !func->GetReadOnlyProfileInfo()->IsArrayLengthHoistDisabled(func->IsLoopBody()));
  15665. }
  15666. bool
  15667. GlobOpt::DoArrayLengthHoist() const
  15668. {
  15669. return doArrayLengthHoist;
  15670. }
  15671. bool
  15672. GlobOpt::DoEliminateArrayAccessHelperCall(Func *const func)
  15673. {
  15674. return DoArrayCheckHoist(func);
  15675. }
  15676. bool
  15677. GlobOpt::DoEliminateArrayAccessHelperCall() const
  15678. {
  15679. return doEliminateArrayAccessHelperCall;
  15680. }
  15681. bool
  15682. GlobOpt::DoLdLenIntSpec(IR::Instr * const instr, const ValueType baseValueType)
  15683. {
  15684. Assert(!instr || instr->m_opcode == Js::OpCode::LdLen_A);
  15685. Assert(!instr || instr->GetDst());
  15686. Assert(!instr || instr->GetSrc1());
  15687. if(PHASE_OFF(Js::LdLenIntSpecPhase, func) ||
  15688. IsTypeSpecPhaseOff(func) ||
  15689. (func->HasProfileInfo() && func->GetReadOnlyProfileInfo()->IsLdLenIntSpecDisabled()) ||
  15690. (instr && !IsLoopPrePass() && instr->DoStackArgsOpt(func)))
  15691. {
  15692. return false;
  15693. }
  15694. if(instr &&
  15695. instr->IsProfiledInstr() &&
  15696. (
  15697. !instr->AsProfiledInstr()->u.ldElemInfo->GetElementType().IsLikelyInt() ||
  15698. instr->GetDst()->AsRegOpnd()->m_sym->m_isNotInt
  15699. ))
  15700. {
  15701. return false;
  15702. }
  15703. Assert(!instr || baseValueType == instr->GetSrc1()->GetValueType());
  15704. return
  15705. baseValueType.HasBeenString() ||
  15706. (baseValueType.IsLikelyAnyOptimizedArray() && baseValueType.GetObjectType() != ObjectType::ObjectWithArray);
  15707. }
  15708. bool
  15709. GlobOpt::DoPathDependentValues() const
  15710. {
  15711. return !PHASE_OFF(Js::Phase::PathDependentValuesPhase, func);
  15712. }
  15713. bool
  15714. GlobOpt::DoTrackRelativeIntBounds() const
  15715. {
  15716. return doTrackRelativeIntBounds;
  15717. }
  15718. bool
  15719. GlobOpt::DoBoundCheckElimination() const
  15720. {
  15721. return doBoundCheckElimination;
  15722. }
  15723. bool
  15724. GlobOpt::DoBoundCheckHoist() const
  15725. {
  15726. return doBoundCheckHoist;
  15727. }
  15728. bool
  15729. GlobOpt::DoLoopCountBasedBoundCheckHoist() const
  15730. {
  15731. return doLoopCountBasedBoundCheckHoist;
  15732. }
  15733. bool
  15734. GlobOpt::DoPowIntIntTypeSpec() const
  15735. {
  15736. return doPowIntIntTypeSpec;
  15737. }
  15738. bool
  15739. GlobOpt::DoTagChecks() const
  15740. {
  15741. return doTagChecks;
  15742. }
  15743. bool
  15744. GlobOpt::TrackArgumentsObject()
  15745. {
  15746. if (PHASE_OFF(Js::StackArgOptPhase, this->func))
  15747. {
  15748. this->CannotAllocateArgumentsObjectOnStack();
  15749. return false;
  15750. }
  15751. return func->GetHasStackArgs();
  15752. }
  15753. void
  15754. GlobOpt::CannotAllocateArgumentsObjectOnStack()
  15755. {
  15756. func->SetHasStackArgs(false);
  15757. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  15758. if (PHASE_TESTTRACE(Js::StackArgOptPhase, this->func))
  15759. {
  15760. char16 debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  15761. Output::Print(_u("Stack args disabled for function %s(%s)\n"), func->GetJITFunctionBody()->GetDisplayName(), func->GetDebugNumberSet(debugStringBuffer));
  15762. Output::Flush();
  15763. }
  15764. #endif
  15765. }
  15766. IR::Instr *
  15767. GlobOpt::PreOptPeep(IR::Instr *instr)
  15768. {
  15769. if (OpCodeAttr::HasDeadFallThrough(instr->m_opcode))
  15770. {
  15771. switch (instr->m_opcode)
  15772. {
  15773. case Js::OpCode::BailOnNoProfile:
  15774. {
  15775. // Handle BailOnNoProfile
  15776. if (instr->HasBailOutInfo())
  15777. {
  15778. if (!this->prePassLoop)
  15779. {
  15780. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  15781. }
  15782. // Already processed.
  15783. return instr;
  15784. }
  15785. // Convert to bailout instr
  15786. IR::Instr *nextBytecodeOffsetInstr = instr->GetNextRealInstrOrLabel();
  15787. while(nextBytecodeOffsetInstr->GetByteCodeOffset() == Js::Constants::NoByteCodeOffset)
  15788. {
  15789. nextBytecodeOffsetInstr = nextBytecodeOffsetInstr->GetNextRealInstrOrLabel();
  15790. Assert(!nextBytecodeOffsetInstr->IsLabelInstr());
  15791. }
  15792. instr = instr->ConvertToBailOutInstr(nextBytecodeOffsetInstr, IR::BailOutOnNoProfile);
  15793. instr->ClearByteCodeOffset();
  15794. instr->SetByteCodeOffset(nextBytecodeOffsetInstr);
  15795. if (!this->currentBlock->loop)
  15796. {
  15797. FillBailOutInfo(this->currentBlock, instr->GetBailOutInfo());
  15798. }
  15799. else
  15800. {
  15801. Assert(this->prePassLoop);
  15802. }
  15803. break;
  15804. }
  15805. case Js::OpCode::BailOnException:
  15806. {
  15807. Assert(
  15808. (
  15809. this->func->HasTry() && this->func->DoOptimizeTry() &&
  15810. instr->m_prev->m_opcode == Js::OpCode::Catch &&
  15811. instr->m_prev->m_prev->IsLabelInstr() &&
  15812. instr->m_prev->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeCatch
  15813. )
  15814. ||
  15815. (
  15816. this->func->HasFinally() && this->func->DoOptimizeTry() &&
  15817. instr->m_prev->AsLabelInstr() &&
  15818. instr->m_prev->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeFinally
  15819. )
  15820. );
  15821. break;
  15822. }
  15823. case Js::OpCode::BailOnEarlyExit:
  15824. {
  15825. Assert(this->func->HasFinally() && this->func->DoOptimizeTry());
  15826. break;
  15827. }
  15828. default:
  15829. {
  15830. if(this->currentBlock->loop && !this->IsLoopPrePass())
  15831. {
  15832. return instr;
  15833. }
  15834. break;
  15835. }
  15836. }
  15837. RemoveCodeAfterNoFallthroughInstr(instr);
  15838. }
  15839. return instr;
  15840. }
  15841. void
  15842. GlobOpt::RemoveCodeAfterNoFallthroughInstr(IR::Instr *instr)
  15843. {
  15844. if (instr != this->currentBlock->GetLastInstr())
  15845. {
  15846. // Remove dead code after bailout
  15847. IR::Instr *instrDead = instr->m_next;
  15848. IR::Instr *instrNext;
  15849. for (; instrDead != this->currentBlock->GetLastInstr(); instrDead = instrNext)
  15850. {
  15851. instrNext = instrDead->m_next;
  15852. if (instrNext->m_opcode == Js::OpCode::FunctionExit)
  15853. {
  15854. break;
  15855. }
  15856. this->func->m_fg->RemoveInstr(instrDead, this);
  15857. }
  15858. IR::Instr *instrNextBlock = instrDead->m_next;
  15859. this->func->m_fg->RemoveInstr(instrDead, this);
  15860. this->currentBlock->SetLastInstr(instrNextBlock->m_prev);
  15861. }
  15862. // Cleanup dead successors
  15863. FOREACH_SUCCESSOR_BLOCK_EDITING(deadBlock, this->currentBlock, iter)
  15864. {
  15865. this->currentBlock->RemoveDeadSucc(deadBlock, this->func->m_fg);
  15866. if (this->currentBlock->GetDataUseCount() > 0)
  15867. {
  15868. this->currentBlock->DecrementDataUseCount();
  15869. }
  15870. } NEXT_SUCCESSOR_BLOCK_EDITING;
  15871. }
  15872. void
  15873. GlobOpt::ProcessTryHandler(IR::Instr* instr)
  15874. {
  15875. Assert(instr->m_next->IsLabelInstr() && instr->m_next->AsLabelInstr()->GetRegion()->GetType() == RegionType::RegionTypeTry);
  15876. Region* tryRegion = instr->m_next->AsLabelInstr()->GetRegion();
  15877. BVSparse<JitArenaAllocator> * writeThroughSymbolsSet = tryRegion->writeThroughSymbolsSet;
  15878. ToVar(writeThroughSymbolsSet, this->currentBlock);
  15879. }
  15880. bool
  15881. GlobOpt::ProcessExceptionHandlingEdges(IR::Instr* instr)
  15882. {
  15883. Assert(instr->m_opcode == Js::OpCode::BrOnException || instr->m_opcode == Js::OpCode::BrOnNoException);
  15884. if (instr->m_opcode == Js::OpCode::BrOnException)
  15885. {
  15886. if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
  15887. {
  15888. // BrOnException was added to model flow from try region to the catch region to assist
  15889. // the backward pass in propagating bytecode upward exposed info from the catch block
  15890. // to the try, and to handle break blocks. Removing it here as it has served its purpose
  15891. // and keeping it around might also have unintended effects while merging block data for
  15892. // the catch block's predecessors.
  15893. // Note that the Deadstore pass will still be able to propagate bytecode upward exposed info
  15894. // because it doesn't skip dead blocks for that.
  15895. this->RemoveFlowEdgeToCatchBlock(instr);
  15896. this->currentBlock->RemoveInstr(instr);
  15897. return true;
  15898. }
  15899. else
  15900. {
  15901. // We add BrOnException from a finally region to early exit, remove that since it has served its purpose
  15902. return this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
  15903. }
  15904. }
  15905. else if (instr->m_opcode == Js::OpCode::BrOnNoException)
  15906. {
  15907. if (instr->AsBranchInstr()->GetTarget()->GetRegion()->GetType() == RegionType::RegionTypeCatch)
  15908. {
  15909. this->RemoveFlowEdgeToCatchBlock(instr);
  15910. }
  15911. else
  15912. {
  15913. this->RemoveFlowEdgeToFinallyOnExceptionBlock(instr);
  15914. }
  15915. }
  15916. return false;
  15917. }
  15918. void
  15919. GlobOpt::InsertToVarAtDefInTryRegion(IR::Instr * instr, IR::Opnd * dstOpnd)
  15920. {
  15921. if ((this->currentRegion->GetType() == RegionTypeTry || this->currentRegion->GetType() == RegionTypeFinally) &&
  15922. dstOpnd->IsRegOpnd() && dstOpnd->AsRegOpnd()->m_sym->HasByteCodeRegSlot())
  15923. {
  15924. StackSym * sym = dstOpnd->AsRegOpnd()->m_sym;
  15925. if (sym->IsVar())
  15926. {
  15927. return;
  15928. }
  15929. StackSym * varSym = sym->GetVarEquivSym(nullptr);
  15930. if ((this->currentRegion->GetType() == RegionTypeTry && this->currentRegion->writeThroughSymbolsSet->Test(varSym->m_id)) ||
  15931. ((this->currentRegion->GetType() == RegionTypeFinally && this->currentRegion->GetMatchingTryRegion()->writeThroughSymbolsSet->Test(varSym->m_id))))
  15932. {
  15933. IR::RegOpnd * regOpnd = IR::RegOpnd::New(varSym, IRType::TyVar, instr->m_func);
  15934. this->ToVar(instr->m_next, regOpnd, this->currentBlock, NULL, false);
  15935. }
  15936. }
  15937. }
  15938. void
  15939. GlobOpt::RemoveFlowEdgeToCatchBlock(IR::Instr * instr)
  15940. {
  15941. Assert(instr->IsBranchInstr());
  15942. BasicBlock * catchBlock = nullptr;
  15943. BasicBlock * predBlock = nullptr;
  15944. if (instr->m_opcode == Js::OpCode::BrOnException)
  15945. {
  15946. catchBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  15947. predBlock = this->currentBlock;
  15948. }
  15949. else
  15950. {
  15951. Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
  15952. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  15953. Assert(nextInstr->IsLabelInstr());
  15954. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  15955. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeCatch)
  15956. {
  15957. catchBlock = nextLabel->GetBasicBlock();
  15958. predBlock = this->currentBlock;
  15959. }
  15960. else
  15961. {
  15962. Assert(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional());
  15963. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  15964. IR::BranchInstr * branchToCatchBlock = nextLabel->m_next->AsBranchInstr();
  15965. IR::LabelInstr * catchBlockLabel = branchToCatchBlock->GetTarget();
  15966. Assert(catchBlockLabel->GetRegion()->GetType() == RegionTypeCatch);
  15967. catchBlock = catchBlockLabel->GetBasicBlock();
  15968. predBlock = nextBlock;
  15969. }
  15970. }
  15971. Assert(catchBlock);
  15972. Assert(predBlock);
  15973. if (this->func->m_fg->FindEdge(predBlock, catchBlock))
  15974. {
  15975. predBlock->RemoveDeadSucc(catchBlock, this->func->m_fg);
  15976. if (predBlock == this->currentBlock)
  15977. {
  15978. predBlock->DecrementDataUseCount();
  15979. }
  15980. }
  15981. }
  15982. bool
  15983. GlobOpt::RemoveFlowEdgeToFinallyOnExceptionBlock(IR::Instr * instr)
  15984. {
  15985. Assert(instr->IsBranchInstr());
  15986. if (instr->m_opcode == Js::OpCode::BrOnNoException && instr->AsBranchInstr()->m_brFinallyToEarlyExit)
  15987. {
  15988. // We add edge from finally to early exit block
  15989. // We should not remove this edge
  15990. // If a loop has continue, and we add edge in finally to continue
  15991. // Break block removal can move all continues inside the loop to branch to the continue added within finally
  15992. // If we get rid of this edge, then loop may loose all backedges
  15993. // Ideally, doing tail duplication before globopt would enable us to remove these edges, but since we do it after globopt, keep it this way for now
  15994. // See test1() in core/test/tryfinallytests.js
  15995. return false;
  15996. }
  15997. BasicBlock * finallyBlock = nullptr;
  15998. BasicBlock * predBlock = nullptr;
  15999. if (instr->m_opcode == Js::OpCode::BrOnException)
  16000. {
  16001. finallyBlock = instr->AsBranchInstr()->GetTarget()->GetBasicBlock();
  16002. predBlock = this->currentBlock;
  16003. }
  16004. else
  16005. {
  16006. Assert(instr->m_opcode == Js::OpCode::BrOnNoException);
  16007. IR::Instr * nextInstr = instr->GetNextRealInstrOrLabel();
  16008. Assert(nextInstr->IsLabelInstr());
  16009. IR::LabelInstr * nextLabel = nextInstr->AsLabelInstr();
  16010. if (nextLabel->GetRegion() && nextLabel->GetRegion()->GetType() == RegionTypeFinally)
  16011. {
  16012. finallyBlock = nextLabel->GetBasicBlock();
  16013. predBlock = this->currentBlock;
  16014. }
  16015. else
  16016. {
  16017. if (!(nextLabel->m_next->IsBranchInstr() && nextLabel->m_next->AsBranchInstr()->IsUnconditional()))
  16018. {
  16019. return false;
  16020. }
  16021. BasicBlock * nextBlock = nextLabel->GetBasicBlock();
  16022. IR::BranchInstr * branchTofinallyBlockOrEarlyExit = nextLabel->m_next->AsBranchInstr();
  16023. IR::LabelInstr * finallyBlockLabelOrEarlyExitLabel = branchTofinallyBlockOrEarlyExit->GetTarget();
  16024. finallyBlock = finallyBlockLabelOrEarlyExitLabel->GetBasicBlock();
  16025. predBlock = nextBlock;
  16026. }
  16027. }
  16028. Assert(finallyBlock && predBlock);
  16029. if (this->func->m_fg->FindEdge(predBlock, finallyBlock))
  16030. {
  16031. predBlock->RemoveDeadSucc(finallyBlock, this->func->m_fg);
  16032. if (instr->m_opcode == Js::OpCode::BrOnException)
  16033. {
  16034. this->currentBlock->RemoveInstr(instr);
  16035. }
  16036. if (finallyBlock->GetFirstInstr()->AsLabelInstr()->IsUnreferenced())
  16037. {
  16038. // Traverse predBlocks of finallyBlock, if any of the preds have a different region, set m_hasNonBranchRef to true
  16039. // If not, this label can get eliminated and an incorrect region from the predecessor can get propagated in lowered code
  16040. // See test3() in tryfinallytests.js
  16041. Region * finallyRegion = finallyBlock->GetFirstInstr()->AsLabelInstr()->GetRegion();
  16042. FOREACH_PREDECESSOR_BLOCK(pred, finallyBlock)
  16043. {
  16044. Region * predRegion = pred->GetFirstInstr()->AsLabelInstr()->GetRegion();
  16045. if (predRegion != finallyRegion)
  16046. {
  16047. finallyBlock->GetFirstInstr()->AsLabelInstr()->m_hasNonBranchRef = true;
  16048. }
  16049. } NEXT_PREDECESSOR_BLOCK;
  16050. }
  16051. if (predBlock == this->currentBlock)
  16052. {
  16053. predBlock->DecrementDataUseCount();
  16054. }
  16055. }
  16056. return true;
  16057. }
  16058. IR::Instr *
  16059. GlobOpt::OptPeep(IR::Instr *instr, Value *src1Val, Value *src2Val)
  16060. {
  16061. IR::Opnd *dst, *src1, *src2;
  16062. if (this->IsLoopPrePass())
  16063. {
  16064. return instr;
  16065. }
  16066. switch (instr->m_opcode)
  16067. {
  16068. case Js::OpCode::DeadBrEqual:
  16069. case Js::OpCode::DeadBrRelational:
  16070. case Js::OpCode::DeadBrSrEqual:
  16071. src1 = instr->GetSrc1();
  16072. src2 = instr->GetSrc2();
  16073. // These branches were turned into dead branches because they were unnecessary (branch to next, ...).
  16074. // The DeadBr are necessary in case the evaluation of the sources have side-effects.
  16075. // If we know for sure the srcs are primitive or have been type specialized, we don't need these instructions
  16076. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))) &&
  16077. ((src2Val && src2Val->GetValueInfo()->IsPrimitive()) || (src2->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src2->AsRegOpnd()->m_sym))))
  16078. {
  16079. this->CaptureByteCodeSymUses(instr);
  16080. instr->m_opcode = Js::OpCode::Nop;
  16081. }
  16082. break;
  16083. case Js::OpCode::DeadBrOnHasProperty:
  16084. src1 = instr->GetSrc1();
  16085. if (((src1Val && src1Val->GetValueInfo()->IsPrimitive()) || (src1->IsRegOpnd() && CurrentBlockData()->IsTypeSpecialized(src1->AsRegOpnd()->m_sym))))
  16086. {
  16087. this->CaptureByteCodeSymUses(instr);
  16088. instr->m_opcode = Js::OpCode::Nop;
  16089. }
  16090. break;
  16091. case Js::OpCode::Ld_A:
  16092. case Js::OpCode::Ld_I4:
  16093. src1 = instr->GetSrc1();
  16094. dst = instr->GetDst();
  16095. if (dst->IsRegOpnd() && dst->IsEqual(src1))
  16096. {
  16097. dst = instr->UnlinkDst();
  16098. if (!dst->GetIsJITOptimizedReg())
  16099. {
  16100. IR::ByteCodeUsesInstr *bytecodeUse = IR::ByteCodeUsesInstr::New(instr);
  16101. bytecodeUse->SetDst(dst);
  16102. instr->InsertAfter(bytecodeUse);
  16103. }
  16104. instr->FreeSrc1();
  16105. instr->m_opcode = Js::OpCode::Nop;
  16106. }
  16107. break;
  16108. }
  16109. return instr;
  16110. }
  16111. void
  16112. GlobOpt::OptimizeIndirUses(IR::IndirOpnd *indirOpnd, IR::Instr * *pInstr, Value **indirIndexValRef)
  16113. {
  16114. IR::Instr * &instr = *pInstr;
  16115. Assert(!indirIndexValRef || !*indirIndexValRef);
  16116. // Update value types and copy-prop the base
  16117. OptSrc(indirOpnd->GetBaseOpnd(), &instr, nullptr, indirOpnd);
  16118. IR::RegOpnd *indexOpnd = indirOpnd->GetIndexOpnd();
  16119. if (!indexOpnd)
  16120. {
  16121. return;
  16122. }
  16123. // Update value types and copy-prop the index
  16124. Value *indexVal = OptSrc(indexOpnd, &instr, nullptr, indirOpnd);
  16125. if(indirIndexValRef)
  16126. {
  16127. *indirIndexValRef = indexVal;
  16128. }
  16129. }
  16130. bool
  16131. GlobOpt::IsPREInstrCandidateLoad(Js::OpCode opcode)
  16132. {
  16133. switch (opcode)
  16134. {
  16135. case Js::OpCode::LdFld:
  16136. case Js::OpCode::LdFldForTypeOf:
  16137. case Js::OpCode::LdRootFld:
  16138. case Js::OpCode::LdRootFldForTypeOf:
  16139. case Js::OpCode::LdMethodFld:
  16140. case Js::OpCode::LdRootMethodFld:
  16141. case Js::OpCode::LdSlot:
  16142. case Js::OpCode::LdSlotArr:
  16143. return true;
  16144. }
  16145. return false;
  16146. }
  16147. bool
  16148. GlobOpt::IsPREInstrCandidateStore(Js::OpCode opcode)
  16149. {
  16150. switch (opcode)
  16151. {
  16152. case Js::OpCode::StFld:
  16153. case Js::OpCode::StRootFld:
  16154. case Js::OpCode::StSlot:
  16155. return true;
  16156. }
  16157. return false;
  16158. }
  16159. bool
  16160. GlobOpt::ImplicitCallFlagsAllowOpts(Loop *loop)
  16161. {
  16162. return loop->GetImplicitCallFlags() != Js::ImplicitCall_HasNoInfo &&
  16163. (((loop->GetImplicitCallFlags() & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  16164. }
  16165. bool
  16166. GlobOpt::ImplicitCallFlagsAllowOpts(Func const *func)
  16167. {
  16168. return func->m_fg->implicitCallFlags != Js::ImplicitCall_HasNoInfo &&
  16169. (((func->m_fg->implicitCallFlags & ~Js::ImplicitCall_Accessor) | Js::ImplicitCall_None) == Js::ImplicitCall_None);
  16170. }
  16171. #if DBG_DUMP
  16172. void
  16173. GlobOpt::Dump() const
  16174. {
  16175. this->DumpSymToValueMap();
  16176. }
  16177. void
  16178. GlobOpt::DumpSymToValueMap(BasicBlock const * block) const
  16179. {
  16180. Output::Print(_u("\n*** SymToValueMap ***\n"));
  16181. block->globOptData.DumpSymToValueMap();
  16182. }
  16183. void
  16184. GlobOpt::DumpSymToValueMap() const
  16185. {
  16186. DumpSymToValueMap(this->currentBlock);
  16187. }
  16188. void
  16189. GlobOpt::DumpSymVal(int index)
  16190. {
  16191. SymID id = index;
  16192. extern Func *CurrentFunc;
  16193. Sym *sym = this->func->m_symTable->Find(id);
  16194. AssertMsg(sym, "Sym not found!!!");
  16195. Output::Print(_u("Sym: "));
  16196. sym->Dump();
  16197. Output::Print(_u("\t\tValueNumber: "));
  16198. Value * pValue = CurrentBlockData()->FindValueFromMapDirect(sym->m_id);
  16199. pValue->Dump();
  16200. Output::Print(_u("\n"));
  16201. }
  16202. void
  16203. GlobOpt::Trace(BasicBlock * block, bool before) const
  16204. {
  16205. bool globOptTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::GlobOptPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16206. bool typeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::TypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16207. bool floatTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FloatTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16208. bool fieldHoistTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldHoistPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16209. bool fieldCopyPropTrace = fieldHoistTrace || Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldCopyPropPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16210. bool objTypeSpecTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ObjTypeSpecPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16211. bool valueTableTrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::ValueTablePhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16212. bool fieldPRETrace = Js::Configuration::Global.flags.Trace.IsEnabled(Js::FieldPREPhase, this->func->GetSourceContextId(), this->func->GetLocalFunctionId());
  16213. bool anyTrace = globOptTrace || typeSpecTrace || floatTypeSpecTrace || fieldCopyPropTrace || fieldHoistTrace || objTypeSpecTrace || valueTableTrace || fieldPRETrace;
  16214. if (!anyTrace)
  16215. {
  16216. return;
  16217. }
  16218. if (fieldPRETrace && this->IsLoopPrePass())
  16219. {
  16220. if (block->isLoopHeader && before)
  16221. {
  16222. Output::Print(_u("==== Loop Prepass block header #%-3d, Visiting Loop block head #%-3d\n"),
  16223. this->prePassLoop->GetHeadBlock()->GetBlockNum(), block->GetBlockNum());
  16224. }
  16225. }
  16226. if (!typeSpecTrace && !floatTypeSpecTrace && !valueTableTrace && !Js::Configuration::Global.flags.Verbose)
  16227. {
  16228. return;
  16229. }
  16230. if (before)
  16231. {
  16232. Output::Print(_u("========================================================================\n"));
  16233. Output::Print(_u("Begin OptBlock: Block #%-3d"), block->GetBlockNum());
  16234. if (block->loop)
  16235. {
  16236. Output::Print(_u(" Loop block header:%-3d currentLoop block head:%-3d %s"),
  16237. block->loop->GetHeadBlock()->GetBlockNum(),
  16238. this->prePassLoop ? this->prePassLoop->GetHeadBlock()->GetBlockNum() : 0,
  16239. this->IsLoopPrePass() ? _u("PrePass") : _u(""));
  16240. }
  16241. Output::Print(_u("\n"));
  16242. }
  16243. else
  16244. {
  16245. Output::Print(_u("-----------------------------------------------------------------------\n"));
  16246. Output::Print(_u("After OptBlock: Block #%-3d\n"), block->GetBlockNum());
  16247. }
  16248. if ((typeSpecTrace || floatTypeSpecTrace) && !block->globOptData.liveVarSyms->IsEmpty())
  16249. {
  16250. Output::Print(_u(" Live var syms: "));
  16251. block->globOptData.liveVarSyms->Dump();
  16252. }
  16253. if (typeSpecTrace && !block->globOptData.liveInt32Syms->IsEmpty())
  16254. {
  16255. Assert(this->tempBv->IsEmpty());
  16256. this->tempBv->Minus(block->globOptData.liveInt32Syms, block->globOptData.liveLossyInt32Syms);
  16257. if(!this->tempBv->IsEmpty())
  16258. {
  16259. Output::Print(_u(" Int32 type specialized (lossless) syms: "));
  16260. this->tempBv->Dump();
  16261. }
  16262. this->tempBv->ClearAll();
  16263. if(!block->globOptData.liveLossyInt32Syms->IsEmpty())
  16264. {
  16265. Output::Print(_u(" Int32 converted (lossy) syms: "));
  16266. block->globOptData.liveLossyInt32Syms->Dump();
  16267. }
  16268. }
  16269. if (floatTypeSpecTrace && !block->globOptData.liveFloat64Syms->IsEmpty())
  16270. {
  16271. Output::Print(_u(" Float64 type specialized syms: "));
  16272. block->globOptData.liveFloat64Syms->Dump();
  16273. }
  16274. if ((fieldCopyPropTrace || objTypeSpecTrace) && this->DoFieldCopyProp(block->loop) && !block->globOptData.liveFields->IsEmpty())
  16275. {
  16276. Output::Print(_u(" Live field syms: "));
  16277. block->globOptData.liveFields->Dump();
  16278. }
  16279. if ((fieldHoistTrace || objTypeSpecTrace) && this->DoFieldHoisting(block->loop) && HasHoistableFields(block))
  16280. {
  16281. Output::Print(_u(" Hoistable field sym: "));
  16282. block->globOptData.hoistableFields->Dump();
  16283. }
  16284. if (objTypeSpecTrace || valueTableTrace)
  16285. {
  16286. Output::Print(_u(" Value table:\n"));
  16287. block->globOptData.DumpSymToValueMap();
  16288. }
  16289. if (before)
  16290. {
  16291. Output::Print(_u("-----------------------------------------------------------------------\n")); \
  16292. }
  16293. Output::Flush();
  16294. }
  16295. void
  16296. GlobOpt::TraceSettings() const
  16297. {
  16298. Output::Print(_u("GlobOpt Settings:\r\n"));
  16299. Output::Print(_u(" FloatTypeSpec: %s\r\n"), this->DoFloatTypeSpec() ? _u("enabled") : _u("disabled"));
  16300. Output::Print(_u(" AggressiveIntTypeSpec: %s\r\n"), this->DoAggressiveIntTypeSpec() ? _u("enabled") : _u("disabled"));
  16301. Output::Print(_u(" LossyIntTypeSpec: %s\r\n"), this->DoLossyIntTypeSpec() ? _u("enabled") : _u("disabled"));
  16302. Output::Print(_u(" ArrayCheckHoist: %s\r\n"), this->func->IsArrayCheckHoistDisabled() ? _u("disabled") : _u("enabled"));
  16303. Output::Print(_u(" ImplicitCallFlags: %s\r\n"), Js::DynamicProfileInfo::GetImplicitCallFlagsString(this->func->m_fg->implicitCallFlags));
  16304. for (Loop * loop = this->func->m_fg->loopList; loop != NULL; loop = loop->next)
  16305. {
  16306. Output::Print(_u(" loop: %d, ImplicitCallFlags: %s\r\n"), loop->GetLoopNumber(),
  16307. Js::DynamicProfileInfo::GetImplicitCallFlagsString(loop->GetImplicitCallFlags()));
  16308. }
  16309. Output::Flush();
  16310. }
  16311. #endif // DBG_DUMP
  16312. IR::Instr *
  16313. GlobOpt::TrackMarkTempObject(IR::Instr * instrStart, IR::Instr * instrLast)
  16314. {
  16315. if (!this->func->GetHasMarkTempObjects())
  16316. {
  16317. return instrLast;
  16318. }
  16319. IR::Instr * instr = instrStart;
  16320. IR::Instr * instrEnd = instrLast->m_next;
  16321. IR::Instr * lastInstr = nullptr;
  16322. GlobOptBlockData& globOptData = *CurrentBlockData();
  16323. do
  16324. {
  16325. bool mayNeedBailOnImplicitCallsPreOp = !this->IsLoopPrePass()
  16326. && instr->HasAnyImplicitCalls()
  16327. && globOptData.maybeTempObjectSyms != nullptr;
  16328. if (mayNeedBailOnImplicitCallsPreOp)
  16329. {
  16330. IR::Opnd * src1 = instr->GetSrc1();
  16331. if (src1)
  16332. {
  16333. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src1, false);
  16334. IR::Opnd * src2 = instr->GetSrc2();
  16335. if (src2)
  16336. {
  16337. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, src2, false);
  16338. }
  16339. }
  16340. }
  16341. IR::Opnd *dst = instr->GetDst();
  16342. if (dst)
  16343. {
  16344. if (dst->IsRegOpnd())
  16345. {
  16346. TrackTempObjectSyms(instr, dst->AsRegOpnd());
  16347. }
  16348. else if (mayNeedBailOnImplicitCallsPreOp)
  16349. {
  16350. instr = GenerateBailOutMarkTempObjectIfNeeded(instr, dst, true);
  16351. }
  16352. }
  16353. lastInstr = instr;
  16354. instr = instr->m_next;
  16355. }
  16356. while (instr != instrEnd);
  16357. return lastInstr;
  16358. }
  16359. void
  16360. GlobOpt::TrackTempObjectSyms(IR::Instr * instr, IR::RegOpnd * opnd)
  16361. {
  16362. // If it is marked as dstIsTempObject, we should have mark temped it, or type specialized it to Ld_I4.
  16363. Assert(!instr->dstIsTempObject || ObjectTempVerify::CanMarkTemp(instr, nullptr));
  16364. GlobOptBlockData& globOptData = *CurrentBlockData();
  16365. bool canStoreTemp = false;
  16366. bool maybeTemp = false;
  16367. if (OpCodeAttr::TempObjectProducing(instr->m_opcode))
  16368. {
  16369. maybeTemp = instr->dstIsTempObject;
  16370. // We have to make sure that lower will always generate code to do stack allocation
  16371. // before we can store any other stack instance onto it. Otherwise, we would not
  16372. // walk object to box the stack property.
  16373. canStoreTemp = instr->dstIsTempObject && ObjectTemp::CanStoreTemp(instr);
  16374. }
  16375. else if (OpCodeAttr::TempObjectTransfer(instr->m_opcode))
  16376. {
  16377. // Need to check both sources, GetNewScObject has two srcs for transfer.
  16378. // No need to get var equiv sym here as transfer of type spec value does not transfer a mark temp object.
  16379. maybeTemp = globOptData.maybeTempObjectSyms && (
  16380. (instr->GetSrc1()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  16381. || (instr->GetSrc2() && instr->GetSrc2()->IsRegOpnd() && globOptData.maybeTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id)));
  16382. canStoreTemp = globOptData.canStoreTempObjectSyms && (
  16383. (instr->GetSrc1()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc1()->AsRegOpnd()->m_sym->m_id))
  16384. && (!instr->GetSrc2() || (instr->GetSrc2()->IsRegOpnd() && globOptData.canStoreTempObjectSyms->Test(instr->GetSrc2()->AsRegOpnd()->m_sym->m_id))));
  16385. AssertOrFailFast(!canStoreTemp || instr->dstIsTempObject);
  16386. AssertOrFailFast(!maybeTemp || instr->dstIsTempObject);
  16387. }
  16388. // Need to get the var equiv sym as assignment of type specialized sym kill the var sym value anyway.
  16389. StackSym * sym = opnd->m_sym;
  16390. if (!sym->IsVar())
  16391. {
  16392. sym = sym->GetVarEquivSym(nullptr);
  16393. if (sym == nullptr)
  16394. {
  16395. return;
  16396. }
  16397. }
  16398. SymID symId = sym->m_id;
  16399. if (maybeTemp)
  16400. {
  16401. // Only var sym should be temp objects
  16402. Assert(opnd->m_sym == sym);
  16403. if (globOptData.maybeTempObjectSyms == nullptr)
  16404. {
  16405. globOptData.maybeTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  16406. }
  16407. globOptData.maybeTempObjectSyms->Set(symId);
  16408. if (canStoreTemp)
  16409. {
  16410. if (instr->m_opcode == Js::OpCode::NewScObjectLiteral && !this->IsLoopPrePass())
  16411. {
  16412. // For object literal, we install the final type up front.
  16413. // If there are bailout before we finish initializing all the fields, we need to
  16414. // zero out the rest if we stack allocate the literal, so that the boxing would not
  16415. // try to box trash pointer in the properties.
  16416. // Although object Literal initialization can be done lexically, BailOnNoProfile may cause some path
  16417. // to disappear. Do it is flow base make it easier to stop propagate those entries.
  16418. IR::IntConstOpnd * propertyArrayIdOpnd = instr->GetSrc1()->AsIntConstOpnd();
  16419. const Js::PropertyIdArray * propIds = instr->m_func->GetJITFunctionBody()->ReadPropertyIdArrayFromAuxData(propertyArrayIdOpnd->AsUint32());
  16420. // Duplicates are removed by parser
  16421. Assert(!propIds->hadDuplicates);
  16422. if (globOptData.stackLiteralInitFldDataMap == nullptr)
  16423. {
  16424. globOptData.stackLiteralInitFldDataMap = JitAnew(alloc, StackLiteralInitFldDataMap, alloc);
  16425. }
  16426. else
  16427. {
  16428. Assert(!globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  16429. }
  16430. StackLiteralInitFldData data = { propIds, 0};
  16431. globOptData.stackLiteralInitFldDataMap->AddNew(sym, data);
  16432. }
  16433. if (globOptData.canStoreTempObjectSyms == nullptr)
  16434. {
  16435. globOptData.canStoreTempObjectSyms = JitAnew(this->alloc, BVSparse<JitArenaAllocator>, this->alloc);
  16436. }
  16437. globOptData.canStoreTempObjectSyms->Set(symId);
  16438. }
  16439. else if (globOptData.canStoreTempObjectSyms)
  16440. {
  16441. globOptData.canStoreTempObjectSyms->Clear(symId);
  16442. }
  16443. }
  16444. else
  16445. {
  16446. Assert(!canStoreTemp);
  16447. if (globOptData.maybeTempObjectSyms)
  16448. {
  16449. if (globOptData.canStoreTempObjectSyms)
  16450. {
  16451. globOptData.canStoreTempObjectSyms->Clear(symId);
  16452. }
  16453. globOptData.maybeTempObjectSyms->Clear(symId);
  16454. }
  16455. else
  16456. {
  16457. Assert(!globOptData.canStoreTempObjectSyms);
  16458. }
  16459. // The symbol is being assigned to, the sym shouldn't still be in the stackLiteralInitFldDataMap
  16460. Assert(this->IsLoopPrePass() ||
  16461. globOptData.stackLiteralInitFldDataMap == nullptr
  16462. || globOptData.stackLiteralInitFldDataMap->Count() == 0
  16463. || !globOptData.stackLiteralInitFldDataMap->ContainsKey(sym));
  16464. }
  16465. }
  16466. IR::Instr *
  16467. GlobOpt::GenerateBailOutMarkTempObjectIfNeeded(IR::Instr * instr, IR::Opnd * opnd, bool isDst)
  16468. {
  16469. Assert(opnd);
  16470. Assert(isDst == (opnd == instr->GetDst()));
  16471. Assert(opnd != instr->GetDst() || !opnd->IsRegOpnd());
  16472. Assert(!this->IsLoopPrePass());
  16473. Assert(instr->HasAnyImplicitCalls());
  16474. // Only dst reg opnd opcode or ArgOut_A should have dstIsTempObject marked
  16475. Assert(!isDst || !instr->dstIsTempObject || instr->m_opcode == Js::OpCode::ArgOut_A);
  16476. // Post-op implicit call shouldn't have installed yet
  16477. Assert(!instr->HasBailOutInfo() || (instr->GetBailOutKind() & IR::BailOutKindBits) != IR::BailOutOnImplicitCalls);
  16478. GlobOptBlockData& globOptData = *CurrentBlockData();
  16479. Assert(globOptData.maybeTempObjectSyms != nullptr);
  16480. IR::PropertySymOpnd * propertySymOpnd = nullptr;
  16481. StackSym * stackSym = ObjectTemp::GetStackSym(opnd, &propertySymOpnd);
  16482. // It is okay to not get the var equiv sym here, as use of a type specialized sym is not use of the temp object
  16483. // so no need to add mark temp bailout.
  16484. // TempObjectSysm doesn't contain any type spec sym, so we will get false here for all type spec sym.
  16485. if (stackSym && globOptData.maybeTempObjectSyms->Test(stackSym->m_id))
  16486. {
  16487. if (instr->HasBailOutInfo())
  16488. {
  16489. instr->SetBailOutKind(instr->GetBailOutKind() | IR::BailOutMarkTempObject);
  16490. }
  16491. else
  16492. {
  16493. // On insert the pre op bailout if it is not Direct field access do nothing, don't check the dst yet.
  16494. // SetTypeCheckBailout will clear this out if it is direct field access.
  16495. if (isDst
  16496. || (instr->m_opcode == Js::OpCode::FromVar && !opnd->GetValueType().IsPrimitive())
  16497. || propertySymOpnd == nullptr
  16498. || !propertySymOpnd->IsTypeCheckProtected())
  16499. {
  16500. this->GenerateBailAtOperation(&instr, IR::BailOutMarkTempObject);
  16501. }
  16502. }
  16503. if (!opnd->IsRegOpnd() && (!isDst || (globOptData.canStoreTempObjectSyms && globOptData.canStoreTempObjectSyms->Test(stackSym->m_id))))
  16504. {
  16505. // If this opnd is a dst, that means that the object pointer is a stack object,
  16506. // and we can store temp object/number on it.
  16507. // If the opnd is a src, that means that the object pointer may be a stack object
  16508. // so the load may be a temp object/number and we need to track its use.
  16509. // Don't mark start of indir as can store temp, because we don't actually know
  16510. // what it is assigning to.
  16511. if (!isDst || !opnd->IsIndirOpnd())
  16512. {
  16513. opnd->SetCanStoreTemp();
  16514. }
  16515. if (propertySymOpnd)
  16516. {
  16517. // Track initfld of stack literals
  16518. if (isDst && instr->m_opcode == Js::OpCode::InitFld)
  16519. {
  16520. const Js::PropertyId propertyId = propertySymOpnd->m_sym->AsPropertySym()->m_propertyId;
  16521. // We don't need to track numeric properties init
  16522. if (!this->func->GetThreadContextInfo()->IsNumericProperty(propertyId))
  16523. {
  16524. DebugOnly(bool found = false);
  16525. globOptData.stackLiteralInitFldDataMap->RemoveIf(stackSym,
  16526. [&](StackSym * key, StackLiteralInitFldData & data)
  16527. {
  16528. DebugOnly(found = true);
  16529. Assert(key == stackSym);
  16530. Assert(data.currentInitFldCount < data.propIds->count);
  16531. if (data.propIds->elements[data.currentInitFldCount] != propertyId)
  16532. {
  16533. #if DBG
  16534. bool duplicate = false;
  16535. for (uint i = 0; i < data.currentInitFldCount; i++)
  16536. {
  16537. if (data.propIds->elements[i] == propertyId)
  16538. {
  16539. duplicate = true;
  16540. break;
  16541. }
  16542. }
  16543. Assert(duplicate);
  16544. #endif
  16545. // duplicate initialization
  16546. return false;
  16547. }
  16548. bool finished = (++data.currentInitFldCount == data.propIds->count);
  16549. #if DBG
  16550. if (finished)
  16551. {
  16552. // We can still track the finished stack literal InitFld lexically.
  16553. this->finishedStackLiteralInitFld->Set(stackSym->m_id);
  16554. }
  16555. #endif
  16556. return finished;
  16557. });
  16558. // We might still see InitFld even we have finished with all the property Id because
  16559. // of duplicate entries at the end
  16560. Assert(found || finishedStackLiteralInitFld->Test(stackSym->m_id));
  16561. }
  16562. }
  16563. }
  16564. }
  16565. }
  16566. return instr;
  16567. }
  16568. LoopCount *
  16569. GlobOpt::GetOrGenerateLoopCountForMemOp(Loop *loop)
  16570. {
  16571. LoopCount *loopCount = loop->loopCount;
  16572. if (loopCount && !loopCount->HasGeneratedLoopCountSym())
  16573. {
  16574. Assert(loop->bailOutInfo);
  16575. EnsureBailTarget(loop);
  16576. GenerateLoopCountPlusOne(loop, loopCount);
  16577. }
  16578. return loopCount;
  16579. }
  16580. IR::Opnd *
  16581. GlobOpt::GenerateInductionVariableChangeForMemOp(Loop *loop, byte unroll, IR::Instr *insertBeforeInstr)
  16582. {
  16583. LoopCount *loopCount = loop->loopCount;
  16584. IR::Opnd *sizeOpnd = nullptr;
  16585. Assert(loopCount);
  16586. Assert(loop->memOpInfo->inductionVariableOpndPerUnrollMap);
  16587. if (loop->memOpInfo->inductionVariableOpndPerUnrollMap->TryGetValue(unroll, &sizeOpnd))
  16588. {
  16589. return sizeOpnd;
  16590. }
  16591. Func *localFunc = loop->GetFunc();
  16592. const auto InsertInstr = [&](IR::Instr *instr)
  16593. {
  16594. if (insertBeforeInstr == nullptr)
  16595. {
  16596. loop->landingPad->InsertAfter(instr);
  16597. }
  16598. else
  16599. {
  16600. insertBeforeInstr->InsertBefore(instr);
  16601. }
  16602. };
  16603. if (loopCount->LoopCountMinusOneSym())
  16604. {
  16605. IRType type = loopCount->LoopCountSym()->GetType();
  16606. // Loop count is off by one, so add one
  16607. IR::RegOpnd *loopCountOpnd = IR::RegOpnd::New(loopCount->LoopCountSym(), type, localFunc);
  16608. sizeOpnd = loopCountOpnd;
  16609. if (unroll != 1)
  16610. {
  16611. sizeOpnd = IR::RegOpnd::New(TyUint32, this->func);
  16612. IR::Opnd *unrollOpnd = IR::IntConstOpnd::New(unroll, type, localFunc);
  16613. InsertInstr(IR::Instr::New(Js::OpCode::Mul_I4,
  16614. sizeOpnd,
  16615. loopCountOpnd,
  16616. unrollOpnd,
  16617. localFunc));
  16618. }
  16619. }
  16620. else
  16621. {
  16622. uint size = (loopCount->LoopCountMinusOneConstantValue() + 1) * unroll;
  16623. sizeOpnd = IR::IntConstOpnd::New(size, IRType::TyUint32, localFunc);
  16624. }
  16625. loop->memOpInfo->inductionVariableOpndPerUnrollMap->Add(unroll, sizeOpnd);
  16626. return sizeOpnd;
  16627. }
  16628. IR::RegOpnd*
  16629. GlobOpt::GenerateStartIndexOpndForMemop(Loop *loop, IR::Opnd *indexOpnd, IR::Opnd *sizeOpnd, bool isInductionVariableChangeIncremental, bool bIndexAlreadyChanged, IR::Instr *insertBeforeInstr)
  16630. {
  16631. IR::RegOpnd *startIndexOpnd = nullptr;
  16632. Func *localFunc = loop->GetFunc();
  16633. IRType type = indexOpnd->GetType();
  16634. const int cacheIndex = ((int)isInductionVariableChangeIncremental << 1) | (int)bIndexAlreadyChanged;
  16635. if (loop->memOpInfo->startIndexOpndCache[cacheIndex])
  16636. {
  16637. return loop->memOpInfo->startIndexOpndCache[cacheIndex];
  16638. }
  16639. const auto InsertInstr = [&](IR::Instr *instr)
  16640. {
  16641. if (insertBeforeInstr == nullptr)
  16642. {
  16643. loop->landingPad->InsertAfter(instr);
  16644. }
  16645. else
  16646. {
  16647. insertBeforeInstr->InsertBefore(instr);
  16648. }
  16649. };
  16650. startIndexOpnd = IR::RegOpnd::New(type, localFunc);
  16651. // If the 2 are different we can simply use indexOpnd
  16652. if (isInductionVariableChangeIncremental != bIndexAlreadyChanged)
  16653. {
  16654. InsertInstr(IR::Instr::New(Js::OpCode::Ld_A,
  16655. startIndexOpnd,
  16656. indexOpnd,
  16657. localFunc));
  16658. }
  16659. else
  16660. {
  16661. // Otherwise add 1 to it
  16662. InsertInstr(IR::Instr::New(Js::OpCode::Add_I4,
  16663. startIndexOpnd,
  16664. indexOpnd,
  16665. IR::IntConstOpnd::New(1, type, localFunc, true),
  16666. localFunc));
  16667. }
  16668. if (!isInductionVariableChangeIncremental)
  16669. {
  16670. InsertInstr(IR::Instr::New(Js::OpCode::Sub_I4,
  16671. startIndexOpnd,
  16672. startIndexOpnd,
  16673. sizeOpnd,
  16674. localFunc));
  16675. }
  16676. loop->memOpInfo->startIndexOpndCache[cacheIndex] = startIndexOpnd;
  16677. return startIndexOpnd;
  16678. }
  16679. IR::Instr*
  16680. GlobOpt::FindUpperBoundsCheckInstr(IR::Instr* fromInstr)
  16681. {
  16682. IR::Instr *upperBoundCheck = fromInstr;
  16683. do
  16684. {
  16685. upperBoundCheck = upperBoundCheck->m_prev;
  16686. Assert(upperBoundCheck);
  16687. Assert(!upperBoundCheck->IsLabelInstr());
  16688. } while (upperBoundCheck->m_opcode != Js::OpCode::BoundCheck);
  16689. return upperBoundCheck;
  16690. }
  16691. IR::Instr*
  16692. GlobOpt::FindArraySegmentLoadInstr(IR::Instr* fromInstr)
  16693. {
  16694. IR::Instr *headSegmentLengthLoad = fromInstr;
  16695. do
  16696. {
  16697. headSegmentLengthLoad = headSegmentLengthLoad->m_prev;
  16698. Assert(headSegmentLengthLoad);
  16699. Assert(!headSegmentLengthLoad->IsLabelInstr());
  16700. } while (headSegmentLengthLoad->m_opcode != Js::OpCode::LdIndir);
  16701. return headSegmentLengthLoad;
  16702. }
  16703. void
  16704. GlobOpt::RemoveMemOpSrcInstr(IR::Instr* memopInstr, IR::Instr* srcInstr, BasicBlock* block)
  16705. {
  16706. Assert(srcInstr && (srcInstr->m_opcode == Js::OpCode::LdElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict));
  16707. Assert(memopInstr && (memopInstr->m_opcode == Js::OpCode::Memcopy || memopInstr->m_opcode == Js::OpCode::Memset));
  16708. Assert(block);
  16709. const bool isDst = srcInstr->m_opcode == Js::OpCode::StElemI_A || srcInstr->m_opcode == Js::OpCode::StElemI_A_Strict;
  16710. IR::RegOpnd* opnd = (isDst ? memopInstr->GetDst() : memopInstr->GetSrc1())->AsIndirOpnd()->GetBaseOpnd();
  16711. IR::ArrayRegOpnd* arrayOpnd = opnd->IsArrayRegOpnd() ? opnd->AsArrayRegOpnd() : nullptr;
  16712. IR::Instr* topInstr = srcInstr;
  16713. if (srcInstr->extractedUpperBoundCheckWithoutHoisting)
  16714. {
  16715. IR::Instr *upperBoundCheck = FindUpperBoundsCheckInstr(srcInstr);
  16716. Assert(upperBoundCheck && upperBoundCheck != srcInstr);
  16717. topInstr = upperBoundCheck;
  16718. }
  16719. if (srcInstr->loadedArrayHeadSegmentLength && arrayOpnd && arrayOpnd->HeadSegmentLengthSym())
  16720. {
  16721. IR::Instr *arrayLoadSegmentHeadLength = FindArraySegmentLoadInstr(topInstr);
  16722. Assert(arrayLoadSegmentHeadLength);
  16723. topInstr = arrayLoadSegmentHeadLength;
  16724. arrayOpnd->RemoveHeadSegmentLengthSym();
  16725. }
  16726. if (srcInstr->loadedArrayHeadSegment && arrayOpnd && arrayOpnd->HeadSegmentSym())
  16727. {
  16728. IR::Instr *arrayLoadSegmentHead = FindArraySegmentLoadInstr(topInstr);
  16729. Assert(arrayLoadSegmentHead);
  16730. topInstr = arrayLoadSegmentHead;
  16731. arrayOpnd->RemoveHeadSegmentSym();
  16732. }
  16733. // If no bounds check are present, simply look up for instruction added for instrumentation
  16734. if(topInstr == srcInstr)
  16735. {
  16736. bool checkPrev = true;
  16737. while (checkPrev)
  16738. {
  16739. switch (topInstr->m_prev->m_opcode)
  16740. {
  16741. case Js::OpCode::BailOnNotArray:
  16742. case Js::OpCode::NoImplicitCallUses:
  16743. case Js::OpCode::ByteCodeUses:
  16744. topInstr = topInstr->m_prev;
  16745. checkPrev = !!topInstr->m_prev;
  16746. break;
  16747. default:
  16748. checkPrev = false;
  16749. break;
  16750. }
  16751. }
  16752. }
  16753. while (topInstr != srcInstr)
  16754. {
  16755. IR::Instr* removeInstr = topInstr;
  16756. topInstr = topInstr->m_next;
  16757. Assert(
  16758. removeInstr->m_opcode == Js::OpCode::BailOnNotArray ||
  16759. removeInstr->m_opcode == Js::OpCode::NoImplicitCallUses ||
  16760. removeInstr->m_opcode == Js::OpCode::ByteCodeUses ||
  16761. removeInstr->m_opcode == Js::OpCode::LdIndir ||
  16762. removeInstr->m_opcode == Js::OpCode::BoundCheck
  16763. );
  16764. if (removeInstr->m_opcode != Js::OpCode::ByteCodeUses)
  16765. {
  16766. block->RemoveInstr(removeInstr);
  16767. }
  16768. }
  16769. this->ConvertToByteCodeUses(srcInstr);
  16770. }
  16771. void
  16772. GlobOpt::GetMemOpSrcInfo(Loop* loop, IR::Instr* instr, IR::RegOpnd*& base, IR::RegOpnd*& index, IRType& arrayType)
  16773. {
  16774. Assert(instr && (instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict));
  16775. IR::Opnd* arrayOpnd = instr->m_opcode == Js::OpCode::LdElemI_A ? instr->GetSrc1() : instr->GetDst();
  16776. Assert(arrayOpnd->IsIndirOpnd());
  16777. IR::IndirOpnd* indirArrayOpnd = arrayOpnd->AsIndirOpnd();
  16778. IR::RegOpnd* baseOpnd = (IR::RegOpnd*)indirArrayOpnd->GetBaseOpnd();
  16779. IR::RegOpnd* indexOpnd = (IR::RegOpnd*)indirArrayOpnd->GetIndexOpnd();
  16780. Assert(baseOpnd);
  16781. Assert(indexOpnd);
  16782. // Process Out Params
  16783. base = baseOpnd;
  16784. index = indexOpnd;
  16785. arrayType = indirArrayOpnd->GetType();
  16786. }
  16787. void
  16788. GlobOpt::EmitMemop(Loop * loop, LoopCount *loopCount, const MemOpEmitData* emitData)
  16789. {
  16790. Assert(emitData);
  16791. Assert(emitData->candidate);
  16792. Assert(emitData->stElemInstr);
  16793. Assert(emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A || emitData->stElemInstr->m_opcode == Js::OpCode::StElemI_A_Strict);
  16794. IR::BailOutKind bailOutKind = emitData->bailOutKind;
  16795. const byte unroll = emitData->inductionVar.unroll;
  16796. Assert(unroll == 1);
  16797. const bool isInductionVariableChangeIncremental = emitData->inductionVar.isIncremental;
  16798. const bool bIndexAlreadyChanged = emitData->candidate->bIndexAlreadyChanged;
  16799. IR::RegOpnd *baseOpnd = nullptr;
  16800. IR::RegOpnd *indexOpnd = nullptr;
  16801. IRType dstType;
  16802. GetMemOpSrcInfo(loop, emitData->stElemInstr, baseOpnd, indexOpnd, dstType);
  16803. Func *localFunc = loop->GetFunc();
  16804. // Handle bailout info
  16805. EnsureBailTarget(loop);
  16806. Assert(bailOutKind != IR::BailOutInvalid);
  16807. // Keep only Array bits bailOuts. Consider handling these bailouts instead of simply ignoring them
  16808. bailOutKind &= IR::BailOutForArrayBits;
  16809. // Add our custom bailout to handle Op_MemCopy return value.
  16810. bailOutKind |= IR::BailOutOnMemOpError;
  16811. BailOutInfo *const bailOutInfo = loop->bailOutInfo;
  16812. Assert(bailOutInfo);
  16813. IR::Instr *insertBeforeInstr = bailOutInfo->bailOutInstr;
  16814. Assert(insertBeforeInstr);
  16815. IR::Opnd *sizeOpnd = GenerateInductionVariableChangeForMemOp(loop, unroll, insertBeforeInstr);
  16816. IR::RegOpnd *startIndexOpnd = GenerateStartIndexOpndForMemop(loop, indexOpnd, sizeOpnd, isInductionVariableChangeIncremental, bIndexAlreadyChanged, insertBeforeInstr);
  16817. IR::IndirOpnd* dstOpnd = IR::IndirOpnd::New(baseOpnd, startIndexOpnd, dstType, localFunc);
  16818. IR::Opnd *src1;
  16819. const bool isMemset = emitData->candidate->IsMemSet();
  16820. // Get the source according to the memop type
  16821. if (isMemset)
  16822. {
  16823. MemSetEmitData* data = (MemSetEmitData*)emitData;
  16824. const Loop::MemSetCandidate* candidate = data->candidate->AsMemSet();
  16825. if (candidate->srcSym)
  16826. {
  16827. IR::RegOpnd* regSrc = IR::RegOpnd::New(candidate->srcSym, candidate->srcSym->GetType(), func);
  16828. regSrc->SetIsJITOptimizedReg(true);
  16829. src1 = regSrc;
  16830. }
  16831. else
  16832. {
  16833. src1 = IR::AddrOpnd::New(candidate->constant.ToVar(localFunc), IR::AddrOpndKindConstantAddress, localFunc);
  16834. }
  16835. }
  16836. else
  16837. {
  16838. Assert(emitData->candidate->IsMemCopy());
  16839. MemCopyEmitData* data = (MemCopyEmitData*)emitData;
  16840. Assert(data->ldElemInstr);
  16841. Assert(data->ldElemInstr->m_opcode == Js::OpCode::LdElemI_A);
  16842. IR::RegOpnd *srcBaseOpnd = nullptr;
  16843. IR::RegOpnd *srcIndexOpnd = nullptr;
  16844. IRType srcType;
  16845. GetMemOpSrcInfo(loop, data->ldElemInstr, srcBaseOpnd, srcIndexOpnd, srcType);
  16846. Assert(GetVarSymID(srcIndexOpnd->GetStackSym()) == GetVarSymID(indexOpnd->GetStackSym()));
  16847. src1 = IR::IndirOpnd::New(srcBaseOpnd, startIndexOpnd, srcType, localFunc);
  16848. }
  16849. // Generate memcopy
  16850. IR::Instr* memopInstr = IR::BailOutInstr::New(isMemset ? Js::OpCode::Memset : Js::OpCode::Memcopy, bailOutKind, bailOutInfo, localFunc);
  16851. memopInstr->SetDst(dstOpnd);
  16852. memopInstr->SetSrc1(src1);
  16853. memopInstr->SetSrc2(sizeOpnd);
  16854. insertBeforeInstr->InsertBefore(memopInstr);
  16855. #if DBG_DUMP
  16856. if (DO_MEMOP_TRACE())
  16857. {
  16858. char valueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  16859. baseOpnd->GetValueType().ToString(valueTypeStr);
  16860. const int loopCountBufSize = 16;
  16861. char16 loopCountBuf[loopCountBufSize];
  16862. if (loopCount->LoopCountMinusOneSym())
  16863. {
  16864. swprintf_s(loopCountBuf, _u("s%u"), loopCount->LoopCountMinusOneSym()->m_id);
  16865. }
  16866. else
  16867. {
  16868. swprintf_s(loopCountBuf, _u("%u"), loopCount->LoopCountMinusOneConstantValue() + 1);
  16869. }
  16870. if (isMemset)
  16871. {
  16872. const Loop::MemSetCandidate* candidate = emitData->candidate->AsMemSet();
  16873. const int constBufSize = 32;
  16874. char16 constBuf[constBufSize];
  16875. if (candidate->srcSym)
  16876. {
  16877. swprintf_s(constBuf, _u("s%u"), candidate->srcSym->m_id);
  16878. }
  16879. else
  16880. {
  16881. switch (candidate->constant.type)
  16882. {
  16883. case TyInt8:
  16884. case TyInt16:
  16885. case TyInt32:
  16886. case TyInt64:
  16887. swprintf_s(constBuf, sizeof(IntConstType) == 8 ? _u("%lld") : _u("%d"), candidate->constant.u.intConst.value);
  16888. break;
  16889. case TyFloat32:
  16890. case TyFloat64:
  16891. swprintf_s(constBuf, _u("%.4f"), candidate->constant.u.floatConst.value);
  16892. break;
  16893. case TyVar:
  16894. swprintf_s(constBuf, sizeof(Js::Var) == 8 ? _u("0x%.16llX") : _u("0x%.8X"), candidate->constant.u.varConst.value);
  16895. break;
  16896. default:
  16897. AssertMsg(false, "Unsupported constant type");
  16898. swprintf_s(constBuf, _u("Unknown"));
  16899. break;
  16900. }
  16901. }
  16902. TRACE_MEMOP_PHASE(MemSet, loop, emitData->stElemInstr,
  16903. _u("ValueType: %S, Base: s%u, Index: s%u, Constant: %s, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  16904. valueTypeStr,
  16905. candidate->base,
  16906. candidate->index,
  16907. constBuf,
  16908. loopCountBuf,
  16909. bIndexAlreadyChanged);
  16910. }
  16911. else
  16912. {
  16913. const Loop::MemCopyCandidate* candidate = emitData->candidate->AsMemCopy();
  16914. TRACE_MEMOP_PHASE(MemCopy, loop, emitData->stElemInstr,
  16915. _u("ValueType: %S, StBase: s%u, Index: s%u, LdBase: s%u, LoopCount: %s, IsIndexChangedBeforeUse: %d"),
  16916. valueTypeStr,
  16917. candidate->base,
  16918. candidate->index,
  16919. candidate->ldBase,
  16920. loopCountBuf,
  16921. bIndexAlreadyChanged);
  16922. }
  16923. }
  16924. #endif
  16925. RemoveMemOpSrcInstr(memopInstr, emitData->stElemInstr, emitData->block);
  16926. if (!isMemset)
  16927. {
  16928. RemoveMemOpSrcInstr(memopInstr, ((MemCopyEmitData*)emitData)->ldElemInstr, emitData->block);
  16929. }
  16930. }
  16931. bool
  16932. GlobOpt::InspectInstrForMemSetCandidate(Loop* loop, IR::Instr* instr, MemSetEmitData* emitData, bool& errorInInstr)
  16933. {
  16934. Assert(emitData && emitData->candidate && emitData->candidate->IsMemSet());
  16935. Loop::MemSetCandidate* candidate = (Loop::MemSetCandidate*)emitData->candidate;
  16936. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  16937. {
  16938. if (instr->GetDst()->IsIndirOpnd()
  16939. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base)
  16940. && (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  16941. )
  16942. {
  16943. Assert(instr->IsProfiledInstr());
  16944. emitData->stElemInstr = instr;
  16945. emitData->bailOutKind = instr->GetBailOutKind();
  16946. return true;
  16947. }
  16948. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan StElemI_A detected"));
  16949. errorInInstr = true;
  16950. }
  16951. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  16952. {
  16953. TRACE_MEMOP_PHASE_VERBOSE(MemSet, loop, instr, _u("Orphan LdElemI_A detected"));
  16954. errorInInstr = true;
  16955. }
  16956. return false;
  16957. }
  16958. bool
  16959. GlobOpt::InspectInstrForMemCopyCandidate(Loop* loop, IR::Instr* instr, MemCopyEmitData* emitData, bool& errorInInstr)
  16960. {
  16961. Assert(emitData && emitData->candidate && emitData->candidate->IsMemCopy());
  16962. Loop::MemCopyCandidate* candidate = (Loop::MemCopyCandidate*)emitData->candidate;
  16963. if (instr->m_opcode == Js::OpCode::StElemI_A || instr->m_opcode == Js::OpCode::StElemI_A_Strict)
  16964. {
  16965. if (
  16966. instr->GetDst()->IsIndirOpnd() &&
  16967. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->base) &&
  16968. (GetVarSymID(instr->GetDst()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  16969. )
  16970. {
  16971. Assert(instr->IsProfiledInstr());
  16972. emitData->stElemInstr = instr;
  16973. emitData->bailOutKind = instr->GetBailOutKind();
  16974. // Still need to find the LdElem
  16975. return false;
  16976. }
  16977. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan StElemI_A detected"));
  16978. errorInInstr = true;
  16979. }
  16980. else if (instr->m_opcode == Js::OpCode::LdElemI_A)
  16981. {
  16982. if (
  16983. emitData->stElemInstr &&
  16984. instr->GetSrc1()->IsIndirOpnd() &&
  16985. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetStackSym()) == candidate->ldBase) &&
  16986. (GetVarSymID(instr->GetSrc1()->AsIndirOpnd()->GetIndexOpnd()->GetStackSym()) == candidate->index)
  16987. )
  16988. {
  16989. Assert(instr->IsProfiledInstr());
  16990. emitData->ldElemInstr = instr;
  16991. ValueType stValueType = emitData->stElemInstr->GetDst()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  16992. ValueType ldValueType = emitData->ldElemInstr->GetSrc1()->AsIndirOpnd()->GetBaseOpnd()->GetValueType();
  16993. if (stValueType != ldValueType)
  16994. {
  16995. #if DBG_DUMP
  16996. char16 stValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  16997. stValueType.ToString(stValueTypeStr);
  16998. char16 ldValueTypeStr[VALUE_TYPE_MAX_STRING_SIZE];
  16999. ldValueType.ToString(ldValueTypeStr);
  17000. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("for mismatch in Load(%s) and Store(%s) value type"), ldValueTypeStr, stValueTypeStr);
  17001. #endif
  17002. errorInInstr = true;
  17003. return false;
  17004. }
  17005. // We found both instruction for this candidate
  17006. return true;
  17007. }
  17008. TRACE_MEMOP_PHASE_VERBOSE(MemCopy, loop, instr, _u("Orphan LdElemI_A detected"));
  17009. errorInInstr = true;
  17010. }
  17011. return false;
  17012. }
  17013. // The caller is responsible to free the memory allocated between inOrderEmitData[iEmitData -> end]
  17014. bool
  17015. GlobOpt::ValidateMemOpCandidates(Loop * loop, _Out_writes_(iEmitData) MemOpEmitData** inOrderEmitData, int& iEmitData)
  17016. {
  17017. AnalysisAssert(iEmitData == (int)loop->memOpInfo->candidates->Count());
  17018. // We iterate over the second block of the loop only. MemOp Works only if the loop has exactly 2 blocks
  17019. Assert(loop->blockList.HasTwo());
  17020. Loop::MemOpList::Iterator iter(loop->memOpInfo->candidates);
  17021. BasicBlock* bblock = loop->blockList.Head()->next;
  17022. Loop::MemOpCandidate* candidate = nullptr;
  17023. MemOpEmitData* emitData = nullptr;
  17024. // Iterate backward because the list of candidate is reversed
  17025. FOREACH_INSTR_BACKWARD_IN_BLOCK(instr, bblock)
  17026. {
  17027. if (!candidate)
  17028. {
  17029. // Time to check next candidate
  17030. if (!iter.Next())
  17031. {
  17032. // We have been through the whole list of candidates, finish
  17033. break;
  17034. }
  17035. candidate = iter.Data();
  17036. if (!candidate)
  17037. {
  17038. continue;
  17039. }
  17040. // Common check for memset and memcopy
  17041. Loop::InductionVariableChangeInfo inductionVariableChangeInfo = { 0, 0 };
  17042. // Get the inductionVariable changeInfo
  17043. if (!loop->memOpInfo->inductionVariableChangeInfoMap->TryGetValue(candidate->index, &inductionVariableChangeInfo))
  17044. {
  17045. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): no induction variable"), candidate->base);
  17046. return false;
  17047. }
  17048. if (inductionVariableChangeInfo.unroll != candidate->count)
  17049. {
  17050. TRACE_MEMOP_VERBOSE(loop, nullptr, _u("MemOp skipped (s%d): not matching unroll count"), candidate->base);
  17051. return false;
  17052. }
  17053. if (candidate->IsMemSet())
  17054. {
  17055. Assert(!PHASE_OFF(Js::MemSetPhase, this->func));
  17056. emitData = JitAnew(this->alloc, MemSetEmitData);
  17057. }
  17058. else
  17059. {
  17060. Assert(!PHASE_OFF(Js::MemCopyPhase, this->func));
  17061. // Specific check for memcopy
  17062. Assert(candidate->IsMemCopy());
  17063. Loop::MemCopyCandidate* memcopyCandidate = candidate->AsMemCopy();
  17064. if (memcopyCandidate->base == Js::Constants::InvalidSymID
  17065. || memcopyCandidate->ldBase == Js::Constants::InvalidSymID
  17066. || (memcopyCandidate->ldCount != memcopyCandidate->count))
  17067. {
  17068. TRACE_MEMOP_PHASE(MemCopy, loop, nullptr, _u("(s%d): not matching ldElem and stElem"), candidate->base);
  17069. return false;
  17070. }
  17071. emitData = JitAnew(this->alloc, MemCopyEmitData);
  17072. }
  17073. Assert(emitData);
  17074. emitData->block = bblock;
  17075. emitData->inductionVar = inductionVariableChangeInfo;
  17076. emitData->candidate = candidate;
  17077. }
  17078. bool errorInInstr = false;
  17079. bool candidateFound = candidate->IsMemSet() ?
  17080. InspectInstrForMemSetCandidate(loop, instr, (MemSetEmitData*)emitData, errorInInstr)
  17081. : InspectInstrForMemCopyCandidate(loop, instr, (MemCopyEmitData*)emitData, errorInInstr);
  17082. if (errorInInstr)
  17083. {
  17084. JitAdelete(this->alloc, emitData);
  17085. return false;
  17086. }
  17087. if (candidateFound)
  17088. {
  17089. AnalysisAssert(iEmitData > 0);
  17090. if (iEmitData == 0)
  17091. {
  17092. // Explicit for OACR
  17093. break;
  17094. }
  17095. inOrderEmitData[--iEmitData] = emitData;
  17096. candidate = nullptr;
  17097. emitData = nullptr;
  17098. }
  17099. } NEXT_INSTR_BACKWARD_IN_BLOCK;
  17100. if (iter.IsValid())
  17101. {
  17102. TRACE_MEMOP(loop, nullptr, _u("Candidates not found in loop while validating"));
  17103. return false;
  17104. }
  17105. return true;
  17106. }
  17107. void
  17108. GlobOpt::ProcessMemOp()
  17109. {
  17110. FOREACH_LOOP_IN_FUNC_EDITING(loop, this->func)
  17111. {
  17112. if (HasMemOp(loop))
  17113. {
  17114. const int candidateCount = loop->memOpInfo->candidates->Count();
  17115. Assert(candidateCount > 0);
  17116. LoopCount * loopCount = GetOrGenerateLoopCountForMemOp(loop);
  17117. // If loopCount is not available we can not continue with memop
  17118. if (!loopCount || !(loopCount->LoopCountMinusOneSym() || loopCount->LoopCountMinusOneConstantValue()))
  17119. {
  17120. TRACE_MEMOP(loop, nullptr, _u("MemOp skipped for no loop count"));
  17121. loop->doMemOp = false;
  17122. loop->memOpInfo->candidates->Clear();
  17123. continue;
  17124. }
  17125. // The list is reversed, check them and place them in order in the following array
  17126. MemOpEmitData** inOrderCandidates = JitAnewArray(this->alloc, MemOpEmitData*, candidateCount);
  17127. int i = candidateCount;
  17128. if (ValidateMemOpCandidates(loop, inOrderCandidates, i))
  17129. {
  17130. Assert(i == 0);
  17131. // Process the valid MemOp candidate in order.
  17132. for (; i < candidateCount; ++i)
  17133. {
  17134. // Emit
  17135. EmitMemop(loop, loopCount, inOrderCandidates[i]);
  17136. JitAdelete(this->alloc, inOrderCandidates[i]);
  17137. }
  17138. }
  17139. else
  17140. {
  17141. Assert(i != 0);
  17142. for (; i < candidateCount; ++i)
  17143. {
  17144. JitAdelete(this->alloc, inOrderCandidates[i]);
  17145. }
  17146. // One of the memop candidates did not validate. Do not emit for this loop.
  17147. loop->doMemOp = false;
  17148. loop->memOpInfo->candidates->Clear();
  17149. }
  17150. // Free memory
  17151. JitAdeleteArray(this->alloc, candidateCount, inOrderCandidates);
  17152. }
  17153. } NEXT_LOOP_EDITING;
  17154. }